diff --git a/ptvsd/__init__.py b/ptvsd/__init__.py index cb46d954..9907ffec 100644 --- a/ptvsd/__init__.py +++ b/ptvsd/__init__.py @@ -4,3 +4,33 @@ __author__ = "Microsoft Corporation " __version__ = "4.0.0a1" + +import sys +import os.path + +# ptvsd must always be imported before pydevd +if 'pydevd' in sys.modules: + raise ImportError('ptvsd must be imported before pydevd') + +# Add our vendored pydevd directory to path, so that it gets found first. +sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'pydevd')) + +# Load our wrapper module, which will detour various functionality inside pydevd. +# This must be done before the imports below, otherwise some modules will end up +# with local copies of pre-detour functions. +import ptvsd.wrapper + +# Now make sure all the top-level modules and packages in pydevd are loaded. +import _pydev_bundle +import _pydev_imps +import _pydev_runfiles +import _pydevd_bundle +import _pydevd_frame_eval +import pydev_ipython +import pydevd_concurrency_analyser +import pydevd_plugins +import pydevd + +# Remove sys.path entry added above - any pydevd modules that aren't loaded at +# this point, will be loaded using their parent package's __path__. +del sys.path[0] diff --git a/ptvsd/pydevd/.gitignore b/ptvsd/pydevd/.gitignore new file mode 100644 index 00000000..02478b5f --- /dev/null +++ b/ptvsd/pydevd/.gitignore @@ -0,0 +1,36 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*.class +_pydevd_bundle/*.so +# Distribution / packaging +.Python +env/ +bin/ +build/temp.* +develop-eggs/ +dist/ +eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +*.egg-info/ +.installed.cfg +*.egg + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.coverage +.cache +nosetests.xml +coverage.xml + +snippet.py +build/* \ No newline at end of file diff --git a/ptvsd/pydevd/.project b/ptvsd/pydevd/.project new file mode 100644 index 00000000..a6cc6d69 --- /dev/null +++ b/ptvsd/pydevd/.project @@ -0,0 +1,17 @@ + + + PyDev.Debugger + + + + + + org.python.pydev.PyDevBuilder + + + + + + org.python.pydev.pythonNature + + diff --git a/ptvsd/pydevd/.pydevproject b/ptvsd/pydevd/.pydevproject new file mode 100644 index 00000000..bbf19252 --- /dev/null +++ b/ptvsd/pydevd/.pydevproject @@ -0,0 +1,12 @@ + + + +/${PROJECT_DIR_NAME} +/${PROJECT_DIR_NAME}/build_tools +/${PROJECT_DIR_NAME}/jython_test_deps/ant.jar +/${PROJECT_DIR_NAME}/jython_test_deps/junit.jar + +python 3.0 +Default +2.6, 2.7, 3.6 + diff --git a/ptvsd/pydevd/.settings/org.eclipse.core.resources.prefs b/ptvsd/pydevd/.settings/org.eclipse.core.resources.prefs new file mode 100644 index 00000000..dbd6be65 --- /dev/null +++ b/ptvsd/pydevd/.settings/org.eclipse.core.resources.prefs @@ -0,0 +1,9 @@ +eclipse.preferences.version=1 +encoding//.settings/org.python.pydev.yaml=UTF-8 +encoding//pydev_ipython/inputhook.py=utf-8 +encoding//pydev_ipython/inputhookglut.py=utf-8 +encoding//pydev_ipython/inputhookpyglet.py=utf-8 +encoding//pydev_ipython/inputhookqt4.py=utf-8 +encoding//pydev_ipython/inputhookqt5.py=utf-8 +encoding//pydev_ipython/inputhookwx.py=utf-8 +encoding//pydevd_attach_to_process/winappdbg/__init__.py=utf-8 diff --git a/ptvsd/pydevd/.settings/org.python.pydev.yaml b/ptvsd/pydevd/.settings/org.python.pydev.yaml new file mode 100644 index 00000000..465232a3 --- /dev/null +++ b/ptvsd/pydevd/.settings/org.python.pydev.yaml @@ -0,0 +1,28 @@ +ADD_NEW_LINE_AT_END_OF_FILE: true +AUTOPEP8_PARAMETERS: '' +BREAK_IMPORTS_MODE: ESCAPE +DATE_FIELD_FORMAT: yyyy-MM-dd +DATE_FIELD_NAME: __updated__ +DELETE_UNUSED_IMPORTS: false +ENABLE_DATE_FIELD_ACTION: false +FORMAT_BEFORE_SAVING: false +FORMAT_ONLY_CHANGED_LINES: false +FORMAT_WITH_AUTOPEP8: false +FROM_IMPORTS_FIRST: false +GROUP_IMPORTS: true +MULTILINE_IMPORTS: true +PEP8_IMPORTS: true +PYDEV_TEST_RUNNER: '2' +PYDEV_TEST_RUNNER_DEFAULT_PARAMETERS: "--capture=no\r\n-vv" +PYDEV_USE_PYUNIT_VIEW: true +SAVE_ACTIONS_ONLY_ON_WORKSPACE_FILES: true +SORT_IMPORTS_ON_SAVE: false +SORT_NAMES_GROUPED: false +SPACES_BEFORE_COMMENT: '2' +SPACES_IN_START_COMMENT: '1' +TRIM_EMPTY_LINES: false +TRIM_MULTILINE_LITERALS: false +USE_ASSIGN_WITH_PACES_INSIDER_PARENTESIS: false +USE_OPERATORS_WITH_SPACE: true +USE_SPACE_AFTER_COMMA: true +USE_SPACE_FOR_PARENTESIS: false diff --git a/ptvsd/pydevd/.travis.yml b/ptvsd/pydevd/.travis.yml new file mode 100644 index 00000000..5d83e9f1 --- /dev/null +++ b/ptvsd/pydevd/.travis.yml @@ -0,0 +1,75 @@ +language: python + +matrix: + include: + # Python 2.6 (with and without cython) + - python: 2.6 + env: PYDEVD_USE_CYTHON=YES + env: PYDEVD_TEST_JYTHON=NO + - python: 2.6 + env: PYDEVD_USE_CYTHON=NO + env: PYDEVD_TEST_JYTHON=NO + # Python 2.7 (with and without cython) + - python: 2.7 + env: PYDEVD_USE_CYTHON=YES + env: PYDEVD_TEST_JYTHON=NO + - python: 2.7 + env: PYDEVD_USE_CYTHON=NO + env: PYDEVD_TEST_JYTHON=NO + # Python 3.5 (with and without cython) + - python: 3.5 + env: PYDEVD_USE_CYTHON=YES + env: PYDEVD_TEST_JYTHON=NO + - python: 3.5 + env: PYDEVD_USE_CYTHON=NO + env: PYDEVD_TEST_JYTHON=NO + # Python 3.6 (with and without cython) + - python: 3.6 + env: PYDEVD_USE_CYTHON=YES + env: PYDEVD_TEST_JYTHON=NO + - python: 3.6 + env: PYDEVD_USE_CYTHON=NO + env: PYDEVD_TEST_JYTHON=NO + # Jython + - python: 2.7 + env: PYDEVD_USE_CYTHON=NO + env: PYDEVD_TEST_JYTHON=YES + env: JYTHON_URL=http://search.maven.org/remotecontent?filepath=org/python/jython-installer/2.7.0/jython-installer-2.7.0.jar + +before_install: + # CPython setup + - if [ "$PYDEVD_TEST_JYTHON" == "NO" ]; then wget http://repo.continuum.io/miniconda/Miniconda-latest-Linux-x86_64.sh -O miniconda.sh; fi + - if [ "$PYDEVD_TEST_JYTHON" == "NO" ]; then chmod +x miniconda.sh; fi + - if [ "$PYDEVD_TEST_JYTHON" == "NO" ]; then ./miniconda.sh -b; fi + - if [ "$PYDEVD_TEST_JYTHON" == "NO" ]; then export PATH=/home/travis/miniconda2/bin:$PATH; fi + - if [ "$PYDEVD_TEST_JYTHON" == "NO" ]; then conda update --yes conda; fi + # Jython setup + - if [ "$PYDEVD_TEST_JYTHON" == "YES" ]; then wget $JYTHON_URL -O jython_installer.jar; java -jar jython_installer.jar -s -d $HOME/jython; export PATH=$HOME/jython:$HOME/jython/bin:$PATH; fi + - if [ "$PYDEVD_TEST_JYTHON" == "YES" ]; then jython -c "print('')"; fi + # The next couple lines fix a crash with multiprocessing on Travis and are not specific to using Miniconda + - sudo rm -rf /dev/shm + - sudo ln -s /run/shm /dev/shm + # Fix issue with testGui + - "export DISPLAY=:99.0" + - "sh -e /etc/init.d/xvfb start" +# Install packages +install: + # Both + - export PYTHONPATH=. + # Python setup + - if [ "$PYDEVD_TEST_JYTHON" = "NO" ]; then conda create --yes -n build_env python=$TRAVIS_PYTHON_VERSION; fi + - if [ "$PYDEVD_TEST_JYTHON" = "NO" ]; then source activate build_env; fi + - if [ "$PYDEVD_TEST_JYTHON" = "NO" ]; then chmod +x ./.travis_install_python_deps.sh; fi + - if [ "$PYDEVD_TEST_JYTHON" = "NO" ]; then ./.travis_install_python_deps.sh; fi + - if [ "$PYDEVD_TEST_JYTHON" = "NO" ]; then python build_tools/build.py; fi + # Jython setup + - if [ "$PYDEVD_TEST_JYTHON" = "YES" ]; then chmod +x ./.travis_install_jython_deps.sh; fi + - if [ "$PYDEVD_TEST_JYTHON" = "YES" ]; then ./.travis_install_jython_deps.sh; fi + +# Run test +# On local machine with jython: c:\bin\jython2.7.0\bin\jython.exe -Dpython.path=.;jython_test_deps/ant.jar;jython_test_deps/junit.jar -m pytest +# On remove machine with python: c:\bin\python27\python.exe -m pytest +script: + - if [ "$PYDEVD_TEST_JYTHON" = "NO" ]; then python -m pytest; fi + - if [ "$PYDEVD_TEST_JYTHON" = "YES" ]; then jython -Dpython.path=.:jython_test_deps/ant.jar:jython_test_deps/junit.jar -m pytest; fi + diff --git a/ptvsd/pydevd/.travis_install_jython_deps.sh b/ptvsd/pydevd/.travis_install_jython_deps.sh new file mode 100644 index 00000000..7ee528ad --- /dev/null +++ b/ptvsd/pydevd/.travis_install_jython_deps.sh @@ -0,0 +1,4 @@ +#!/bin/bash +set -ev + +pip install pytest diff --git a/ptvsd/pydevd/.travis_install_python_deps.sh b/ptvsd/pydevd/.travis_install_python_deps.sh new file mode 100644 index 00000000..32d37b98 --- /dev/null +++ b/ptvsd/pydevd/.travis_install_python_deps.sh @@ -0,0 +1,21 @@ +#!/bin/bash +set -ev + +conda install --yes numpy ipython cython pytest psutil + +if [ "$TRAVIS_PYTHON_VERSION" = "2.6" ]; then + conda install --yes pyqt=4 + # Django 1.7 does not support Python 2.7 +fi +if [ "$TRAVIS_PYTHON_VERSION" = "2.7" ]; then + conda install --yes pyqt=4 + pip install "django>=1.7,<1.8" + +fi +if [ "$TRAVIS_PYTHON_VERSION" = "3.5" ]; then + conda install --yes pyqt=5 + pip install "django>=1.7,<1.8" +fi + +pip install Pympler +pip install pytest diff --git a/ptvsd/pydevd/LICENSE b/ptvsd/pydevd/LICENSE new file mode 100644 index 00000000..50328437 --- /dev/null +++ b/ptvsd/pydevd/LICENSE @@ -0,0 +1,203 @@ +Eclipse Public License - v 1.0 + +THE ACCOMPANYING PROGRAM IS PROVIDED UNDER THE TERMS OF THIS ECLIPSE PUBLIC +LICENSE ("AGREEMENT"). ANY USE, REPRODUCTION OR DISTRIBUTION OF THE PROGRAM +CONSTITUTES RECIPIENT'S ACCEPTANCE OF THIS AGREEMENT. + +1. DEFINITIONS + +"Contribution" means: + +a) in the case of the initial Contributor, the initial code and documentation + distributed under this Agreement, and +b) in the case of each subsequent Contributor: + i) changes to the Program, and + ii) additions to the Program; + + where such changes and/or additions to the Program originate from and are + distributed by that particular Contributor. A Contribution 'originates' + from a Contributor if it was added to the Program by such Contributor + itself or anyone acting on such Contributor's behalf. Contributions do not + include additions to the Program which: (i) are separate modules of + software distributed in conjunction with the Program under their own + license agreement, and (ii) are not derivative works of the Program. + +"Contributor" means any person or entity that distributes the Program. + +"Licensed Patents" mean patent claims licensable by a Contributor which are +necessarily infringed by the use or sale of its Contribution alone or when +combined with the Program. + +"Program" means the Contributions distributed in accordance with this +Agreement. + +"Recipient" means anyone who receives the Program under this Agreement, +including all Contributors. + +2. GRANT OF RIGHTS + a) Subject to the terms of this Agreement, each Contributor hereby grants + Recipient a non-exclusive, worldwide, royalty-free copyright license to + reproduce, prepare derivative works of, publicly display, publicly + perform, distribute and sublicense the Contribution of such Contributor, + if any, and such derivative works, in source code and object code form. + b) Subject to the terms of this Agreement, each Contributor hereby grants + Recipient a non-exclusive, worldwide, royalty-free patent license under + Licensed Patents to make, use, sell, offer to sell, import and otherwise + transfer the Contribution of such Contributor, if any, in source code and + object code form. This patent license shall apply to the combination of + the Contribution and the Program if, at the time the Contribution is + added by the Contributor, such addition of the Contribution causes such + combination to be covered by the Licensed Patents. The patent license + shall not apply to any other combinations which include the Contribution. + No hardware per se is licensed hereunder. + c) Recipient understands that although each Contributor grants the licenses + to its Contributions set forth herein, no assurances are provided by any + Contributor that the Program does not infringe the patent or other + intellectual property rights of any other entity. Each Contributor + disclaims any liability to Recipient for claims brought by any other + entity based on infringement of intellectual property rights or + otherwise. As a condition to exercising the rights and licenses granted + hereunder, each Recipient hereby assumes sole responsibility to secure + any other intellectual property rights needed, if any. For example, if a + third party patent license is required to allow Recipient to distribute + the Program, it is Recipient's responsibility to acquire that license + before distributing the Program. + d) Each Contributor represents that to its knowledge it has sufficient + copyright rights in its Contribution, if any, to grant the copyright + license set forth in this Agreement. + +3. REQUIREMENTS + +A Contributor may choose to distribute the Program in object code form under +its own license agreement, provided that: + + a) it complies with the terms and conditions of this Agreement; and + b) its license agreement: + i) effectively disclaims on behalf of all Contributors all warranties + and conditions, express and implied, including warranties or + conditions of title and non-infringement, and implied warranties or + conditions of merchantability and fitness for a particular purpose; + ii) effectively excludes on behalf of all Contributors all liability for + damages, including direct, indirect, special, incidental and + consequential damages, such as lost profits; + iii) states that any provisions which differ from this Agreement are + offered by that Contributor alone and not by any other party; and + iv) states that source code for the Program is available from such + Contributor, and informs licensees how to obtain it in a reasonable + manner on or through a medium customarily used for software exchange. + +When the Program is made available in source code form: + + a) it must be made available under this Agreement; and + b) a copy of this Agreement must be included with each copy of the Program. + Contributors may not remove or alter any copyright notices contained + within the Program. + +Each Contributor must identify itself as the originator of its Contribution, +if +any, in a manner that reasonably allows subsequent Recipients to identify the +originator of the Contribution. + +4. COMMERCIAL DISTRIBUTION + +Commercial distributors of software may accept certain responsibilities with +respect to end users, business partners and the like. While this license is +intended to facilitate the commercial use of the Program, the Contributor who +includes the Program in a commercial product offering should do so in a manner +which does not create potential liability for other Contributors. Therefore, +if a Contributor includes the Program in a commercial product offering, such +Contributor ("Commercial Contributor") hereby agrees to defend and indemnify +every other Contributor ("Indemnified Contributor") against any losses, +damages and costs (collectively "Losses") arising from claims, lawsuits and +other legal actions brought by a third party against the Indemnified +Contributor to the extent caused by the acts or omissions of such Commercial +Contributor in connection with its distribution of the Program in a commercial +product offering. The obligations in this section do not apply to any claims +or Losses relating to any actual or alleged intellectual property +infringement. In order to qualify, an Indemnified Contributor must: +a) promptly notify the Commercial Contributor in writing of such claim, and +b) allow the Commercial Contributor to control, and cooperate with the +Commercial Contributor in, the defense and any related settlement +negotiations. The Indemnified Contributor may participate in any such claim at +its own expense. + +For example, a Contributor might include the Program in a commercial product +offering, Product X. That Contributor is then a Commercial Contributor. If +that Commercial Contributor then makes performance claims, or offers +warranties related to Product X, those performance claims and warranties are +such Commercial Contributor's responsibility alone. Under this section, the +Commercial Contributor would have to defend claims against the other +Contributors related to those performance claims and warranties, and if a +court requires any other Contributor to pay any damages as a result, the +Commercial Contributor must pay those damages. + +5. NO WARRANTY + +EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, THE PROGRAM IS PROVIDED ON AN +"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, EITHER EXPRESS OR +IMPLIED INCLUDING, WITHOUT LIMITATION, ANY WARRANTIES OR CONDITIONS OF TITLE, +NON-INFRINGEMENT, MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. Each +Recipient is solely responsible for determining the appropriateness of using +and distributing the Program and assumes all risks associated with its +exercise of rights under this Agreement , including but not limited to the +risks and costs of program errors, compliance with applicable laws, damage to +or loss of data, programs or equipment, and unavailability or interruption of +operations. + +6. DISCLAIMER OF LIABILITY + +EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, NEITHER RECIPIENT NOR ANY +CONTRIBUTORS SHALL HAVE ANY LIABILITY FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING WITHOUT LIMITATION +LOST PROFITS), HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +ARISING IN ANY WAY OUT OF THE USE OR DISTRIBUTION OF THE PROGRAM OR THE +EXERCISE OF ANY RIGHTS GRANTED HEREUNDER, EVEN IF ADVISED OF THE POSSIBILITY +OF SUCH DAMAGES. + +7. GENERAL + +If any provision of this Agreement is invalid or unenforceable under +applicable law, it shall not affect the validity or enforceability of the +remainder of the terms of this Agreement, and without further action by the +parties hereto, such provision shall be reformed to the minimum extent +necessary to make such provision valid and enforceable. + +If Recipient institutes patent litigation against any entity (including a +cross-claim or counterclaim in a lawsuit) alleging that the Program itself +(excluding combinations of the Program with other software or hardware) +infringes such Recipient's patent(s), then such Recipient's rights granted +under Section 2(b) shall terminate as of the date such litigation is filed. + +All Recipient's rights under this Agreement shall terminate if it fails to +comply with any of the material terms or conditions of this Agreement and does +not cure such failure in a reasonable period of time after becoming aware of +such noncompliance. If all Recipient's rights under this Agreement terminate, +Recipient agrees to cease use and distribution of the Program as soon as +reasonably practicable. However, Recipient's obligations under this Agreement +and any licenses granted by Recipient relating to the Program shall continue +and survive. + +Everyone is permitted to copy and distribute copies of this Agreement, but in +order to avoid inconsistency the Agreement is copyrighted and may only be +modified in the following manner. The Agreement Steward reserves the right to +publish new versions (including revisions) of this Agreement from time to +time. No one other than the Agreement Steward has the right to modify this +Agreement. The Eclipse Foundation is the initial Agreement Steward. The +Eclipse Foundation may assign the responsibility to serve as the Agreement +Steward to a suitable separate entity. Each new version of the Agreement will +be given a distinguishing version number. The Program (including +Contributions) may always be distributed subject to the version of the +Agreement under which it was received. In addition, after a new version of the +Agreement is published, Contributor may elect to distribute the Program +(including its Contributions) under the new version. Except as expressly +stated in Sections 2(a) and 2(b) above, Recipient receives no rights or +licenses to the intellectual property of any Contributor under this Agreement, +whether expressly, by implication, estoppel or otherwise. All rights in the +Program not expressly granted under this Agreement are reserved. + +This Agreement is governed by the laws of the State of New York and the +intellectual property laws of the United States of America. No party to this +Agreement will bring a legal action under this Agreement more than one year +after the cause of action arose. Each party waives its rights to a jury trial in +any resulting litigation. \ No newline at end of file diff --git a/ptvsd/pydevd/MANIFEST.in b/ptvsd/pydevd/MANIFEST.in new file mode 100644 index 00000000..bf7ead14 --- /dev/null +++ b/ptvsd/pydevd/MANIFEST.in @@ -0,0 +1,4 @@ +include *.rst *.txt *.md LICENSE .travis.yml appveyor.yml *.pyx +recursive-include pydevd_attach_to_process *.py *.dll *.so *.dylib *.txt *.c *.h *.bat Makefile *.sh *.pyx +recursive-include _pydevd_bundle *.pyx +recursive-include build_tools *.py \ No newline at end of file diff --git a/ptvsd/pydevd/README.rst b/ptvsd/pydevd/README.rst new file mode 100644 index 00000000..4827a87c --- /dev/null +++ b/ptvsd/pydevd/README.rst @@ -0,0 +1,48 @@ +PyDev.Debugger +============== + +The sources for the PyDev.Debugger (used in PyDev & PyCharm) may be seen at: + +https://github.com/fabioz/PyDev.Debugger + +In general, the debugger backend should **NOT** be installed separately if you're using an IDE which already +bundles it (such as PyDev or PyCharm). + +It is however available in PyPi so that it can be installed for doing remote debugging with `pip` -- so, when +debugging a process which runs in another machine, it's possible to `pip install pydevd` and in the code use +`pydevd.settrace(host='10.1.1.1')` to connect the debugger backend to the debugger UI running in the IDE +(whereas previously the sources had to be manually copied from the IDE installation). + +It should be compatible with Python 2.6 onwards (as well as Jython 2.7, IronPython and PyPy -- and +any other variant which properly supports the Python structure for debuggers -- i.e.: sys.settrace/threading.settrace). + +Recent versions contain speedup modules using Cython, which are generated with a few changes in the regular files +to `cythonize` the files. To update and compile the cython sources (and generate some other auto-generated files), +`build_tools/build.py` should be run -- note that the resulting .pyx and .c files should be commited. + +To see performance changes, see: + +https://www.speedtin.com/reports/7_pydevd_cython (performance results with cython). +https://www.speedtin.com/reports/8_pydevd_pure_python (performance results without cython). + +To generate a distribution with the precompiled binaries for the IDE, `build_binaries_windows.py` should be run ( +note that the environments must be pre-created as specified in that file). + +To generate a distribution to upload to PyPi, `python setup.py sdist bdist_wheel` should be run for each python version +which should have a wheel and afterwards `twine upload -s dist/pydevd-*` shoud be run to actually upload the contents +to PyPi. + +Travis (Linux CI): + +.. |travis| image:: https://travis-ci.org/fabioz/PyDev.Debugger.png + :target: https://travis-ci.org/fabioz/PyDev.Debugger + +|travis| + +Appveyor (Windows CI): + +.. |appveyor| image:: https://ci.appveyor.com/api/projects/status/j6vjq687brbk20ux?svg=true + :target: https://ci.appveyor.com/project/fabioz/pydev-debugger + +|appveyor| + diff --git a/ptvsd/pydevd/_pydev_bundle/__init__.py b/ptvsd/pydevd/_pydev_bundle/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/ptvsd/pydevd/_pydev_bundle/_pydev_calltip_util.py b/ptvsd/pydevd/_pydev_bundle/_pydev_calltip_util.py new file mode 100644 index 00000000..b846fb4e --- /dev/null +++ b/ptvsd/pydevd/_pydev_bundle/_pydev_calltip_util.py @@ -0,0 +1,158 @@ +''' +License: Apache 2.0 +Author: Yuli Fitterman +''' +# noinspection PyBroadException +import types + +from _pydevd_bundle.pydevd_constants import IS_JYTHON, IS_PY3K + +try: + import inspect +except: + try: + from _pydev_imps import _pydev_inspect as inspect + except: + import traceback; + + traceback.print_exc() # Ok, no inspect available (search will not work)from _pydevd_bundle.pydevd_constants import IS_JYTHON, IS_PY3K + +from _pydev_bundle._pydev_imports_tipper import signature_from_docstring + + +def is_bound_method(obj): + if isinstance(obj, types.MethodType): + return getattr(obj, '__self__', getattr(obj, 'im_self', None)) is not None + else: + return False + + +def get_class_name(instance): + return getattr(getattr(instance, "__class__", None), "__name__", None) + + +def get_bound_class_name(obj): + my_self = getattr(obj, '__self__', getattr(obj, 'im_self', None)) + if my_self is None: + return None + return get_class_name(my_self) + + +def get_description(obj): + try: + ob_call = obj.__call__ + except: + ob_call = None + + if isinstance(obj, type) or type(obj).__name__ == 'classobj': + fob = getattr(obj, '__init__', lambda: None) + if not isinstance(fob, (types.FunctionType, types.MethodType)): + fob = obj + elif is_bound_method(ob_call): + fob = ob_call + else: + fob = obj + + argspec = "" + fn_name = None + fn_class = None + if isinstance(fob, (types.FunctionType, types.MethodType)): + spec_info = inspect.getfullargspec(fob) if IS_PY3K else inspect.getargspec(fob) + argspec = inspect.formatargspec(*spec_info) + fn_name = getattr(fob, '__name__', None) + if isinstance(obj, type) or type(obj).__name__ == 'classobj': + fn_name = "__init__" + fn_class = getattr(obj, "__name__", "UnknownClass") + elif is_bound_method(obj) or is_bound_method(ob_call): + fn_class = get_bound_class_name(obj) or "UnknownClass" + + else: + fn_name = getattr(fob, '__name__', None) + fn_self = getattr(fob, '__self__', None) + if fn_self is not None and not isinstance(fn_self, types.ModuleType): + fn_class = get_class_name(fn_self) + + doc_string = get_docstring(ob_call) if is_bound_method(ob_call) else get_docstring(obj) + return create_method_stub(fn_name, fn_class, argspec, doc_string) + + +def create_method_stub(fn_name, fn_class, argspec, doc_string): + if fn_name and argspec: + doc_string = "" if doc_string is None else doc_string + fn_stub = create_function_stub(fn_name, argspec, doc_string, indent=1 if fn_class else 0) + if fn_class: + expr = fn_class if fn_name == '__init__' else fn_class + '().' + fn_name + return create_class_stub(fn_class, fn_stub) + "\n" + expr + else: + expr = fn_name + return fn_stub + "\n" + expr + elif doc_string: + if fn_name: + restored_signature, _ = signature_from_docstring(doc_string, fn_name) + if restored_signature: + return create_method_stub(fn_name, fn_class, restored_signature, doc_string) + return create_function_stub('unknown', '(*args, **kwargs)', doc_string) + '\nunknown' + + else: + return '' + + +def get_docstring(obj): + if obj is not None: + try: + if IS_JYTHON: + # Jython + doc = obj.__doc__ + if doc is not None: + return doc + + from _pydev_bundle import _pydev_jy_imports_tipper + + is_method, infos = _pydev_jy_imports_tipper.ismethod(obj) + ret = '' + if is_method: + for info in infos: + ret += info.get_as_doc() + return ret + + else: + + doc = inspect.getdoc(obj) + if doc is not None: + return doc + except: + pass + else: + return '' + try: + # if no attempt succeeded, try to return repr()... + return repr(obj) + except: + try: + # otherwise the class + return str(obj.__class__) + except: + # if all fails, go to an empty string + return '' + + +def create_class_stub(class_name, contents): + return "class %s(object):\n%s" % (class_name, contents) + + +def create_function_stub(fn_name, fn_argspec, fn_docstring, indent=0): + def shift_right(string, prefix): + return ''.join(prefix + line for line in string.splitlines(True)) + + fn_docstring = shift_right(inspect.cleandoc(fn_docstring), " " * (indent + 1)) + ret = ''' +def %s%s: + """%s""" + pass +''' % (fn_name, fn_argspec, fn_docstring) + ret = ret[1:] # remove first /n + ret = ret.replace('\t', " ") + if indent: + prefix = " " * indent + ret = shift_right(ret, prefix) + return ret diff --git a/ptvsd/pydevd/_pydev_bundle/_pydev_completer.py b/ptvsd/pydevd/_pydev_bundle/_pydev_completer.py new file mode 100644 index 00000000..4b51342c --- /dev/null +++ b/ptvsd/pydevd/_pydev_bundle/_pydev_completer.py @@ -0,0 +1,191 @@ +import pydevconsole +import sys + +if sys.version_info[0] >= 3: + import builtins as __builtin__ # Py3 +else: + import __builtin__ + +try: + import java.lang #@UnusedImport + from _pydev_bundle import _pydev_jy_imports_tipper + _pydev_imports_tipper = _pydev_jy_imports_tipper +except ImportError: + IS_JYTHON = False + from _pydev_bundle import _pydev_imports_tipper + +from _pydevd_bundle import pydevd_xml +dir2 = _pydev_imports_tipper.generate_imports_tip_for_module + + +#======================================================================================================================= +# _StartsWithFilter +#======================================================================================================================= +class _StartsWithFilter: + ''' + Used because we can't create a lambda that'll use an outer scope in jython 2.1 + ''' + + + def __init__(self, start_with): + self.start_with = start_with.lower() + + def __call__(self, name): + return name.lower().startswith(self.start_with) + +#======================================================================================================================= +# Completer +# +# This class was gotten from IPython.completer (dir2 was replaced with the completer already in pydev) +#======================================================================================================================= +class Completer: + + def __init__(self, namespace=None, global_namespace=None): + """Create a new completer for the command line. + + Completer([namespace,global_namespace]) -> completer instance. + + If unspecified, the default namespace where completions are performed + is __main__ (technically, __main__.__dict__). Namespaces should be + given as dictionaries. + + An optional second namespace can be given. This allows the completer + to handle cases where both the local and global scopes need to be + distinguished. + + Completer instances should be used as the completion mechanism of + readline via the set_completer() call: + + readline.set_completer(Completer(my_namespace).complete) + """ + + # Don't bind to namespace quite yet, but flag whether the user wants a + # specific namespace or to use __main__.__dict__. This will allow us + # to bind to __main__.__dict__ at completion time, not now. + if namespace is None: + self.use_main_ns = 1 + else: + self.use_main_ns = 0 + self.namespace = namespace + + # The global namespace, if given, can be bound directly + if global_namespace is None: + self.global_namespace = {} + else: + self.global_namespace = global_namespace + + def complete(self, text): + """Return the next possible completion for 'text'. + + This is called successively with state == 0, 1, 2, ... until it + returns None. The completion should begin with 'text'. + + """ + if self.use_main_ns: + #In pydev this option should never be used + raise RuntimeError('Namespace must be provided!') + self.namespace = __main__.__dict__ #@UndefinedVariable + + if "." in text: + return self.attr_matches(text) + else: + return self.global_matches(text) + + def global_matches(self, text): + """Compute matches when text is a simple name. + + Return a list of all keywords, built-in functions and names currently + defined in self.namespace or self.global_namespace that match. + + """ + + + def get_item(obj, attr): + return obj[attr] + + a = {} + + for dict_with_comps in [__builtin__.__dict__, self.namespace, self.global_namespace]: #@UndefinedVariable + a.update(dict_with_comps) + + filter = _StartsWithFilter(text) + + return dir2(a, a.keys(), get_item, filter) + + def attr_matches(self, text): + """Compute matches when text contains a dot. + + Assuming the text is of the form NAME.NAME....[NAME], and is + evaluatable in self.namespace or self.global_namespace, it will be + evaluated and its attributes (as revealed by dir()) are used as + possible completions. (For class instances, class members are are + also considered.) + + WARNING: this can still invoke arbitrary C code, if an object + with a __getattr__ hook is evaluated. + + """ + import re + + # Another option, seems to work great. Catches things like ''. + m = re.match(r"(\S+(\.\w+)*)\.(\w*)$", text) #@UndefinedVariable + + if not m: + return [] + + expr, attr = m.group(1, 3) + try: + obj = eval(expr, self.namespace) + except: + try: + obj = eval(expr, self.global_namespace) + except: + return [] + + filter = _StartsWithFilter(attr) + + words = dir2(obj, filter=filter) + + return words + + +#======================================================================================================================= +# generate_completions_as_xml +#======================================================================================================================= +def generate_completions_as_xml(frame, act_tok): + if frame is None: + return '' + + #Not using frame.f_globals because of https://sourceforge.net/tracker2/?func=detail&aid=2541355&group_id=85796&atid=577329 + #(Names not resolved in generator expression in method) + #See message: http://mail.python.org/pipermail/python-list/2009-January/526522.html + updated_globals = {} + updated_globals.update(frame.f_globals) + updated_globals.update(frame.f_locals) #locals later because it has precedence over the actual globals + + if pydevconsole.IPYTHON: + completions = pydevconsole.get_completions(act_tok, act_tok, updated_globals, frame.f_locals) + else: + completer = Completer(updated_globals, None) + #list(tuple(name, descr, parameters, type)) + completions = completer.complete(act_tok) + + valid_xml = pydevd_xml.make_valid_xml_value + quote = pydevd_xml.quote + + msg = [""] + + for comp in completions: + msg.append('') + msg.append("") + + return ''.join(msg) + diff --git a/ptvsd/pydevd/_pydev_bundle/_pydev_filesystem_encoding.py b/ptvsd/pydevd/_pydev_bundle/_pydev_filesystem_encoding.py new file mode 100644 index 00000000..6264e3db --- /dev/null +++ b/ptvsd/pydevd/_pydev_bundle/_pydev_filesystem_encoding.py @@ -0,0 +1,41 @@ +import sys + + +def __getfilesystemencoding(): + ''' + Note: there's a copy of this method in interpreterInfo.py + ''' + try: + ret = sys.getfilesystemencoding() + if not ret: + raise RuntimeError('Unable to get encoding.') + return ret + except: + try: + #Handle Jython + from java.lang import System # @UnresolvedImport + env = System.getProperty("os.name").lower() + if env.find('win') != -1: + return 'ISO-8859-1' #mbcs does not work on Jython, so, use a (hopefully) suitable replacement + return 'utf-8' + except: + pass + + #Only available from 2.3 onwards. + if sys.platform == 'win32': + return 'mbcs' + return 'utf-8' + +def getfilesystemencoding(): + try: + ret = __getfilesystemencoding() + + #Check if the encoding is actually there to be used! + if hasattr('', 'encode'): + ''.encode(ret) + if hasattr('', 'decode'): + ''.decode(ret) + + return ret + except: + return 'utf-8' diff --git a/ptvsd/pydevd/_pydev_bundle/_pydev_getopt.py b/ptvsd/pydevd/_pydev_bundle/_pydev_getopt.py new file mode 100644 index 00000000..5548651e --- /dev/null +++ b/ptvsd/pydevd/_pydev_bundle/_pydev_getopt.py @@ -0,0 +1,130 @@ + +#======================================================================================================================= +# getopt code copied since gnu_getopt is not available on jython 2.1 +#======================================================================================================================= +class GetoptError(Exception): + opt = '' + msg = '' + def __init__(self, msg, opt=''): + self.msg = msg + self.opt = opt + Exception.__init__(self, msg, opt) + + def __str__(self): + return self.msg + + +def gnu_getopt(args, shortopts, longopts=[]): + """getopt(args, options[, long_options]) -> opts, args + + This function works like getopt(), except that GNU style scanning + mode is used by default. This means that option and non-option + arguments may be intermixed. The getopt() function stops + processing options as soon as a non-option argument is + encountered. + + If the first character of the option string is `+', or if the + environment variable POSIXLY_CORRECT is set, then option + processing stops as soon as a non-option argument is encountered. + """ + + opts = [] + prog_args = [] + if type('') == type(longopts): + longopts = [longopts] + else: + longopts = list(longopts) + + # Allow options after non-option arguments? + all_options_first = False + if shortopts.startswith('+'): + shortopts = shortopts[1:] + all_options_first = True + + while args: + if args[0] == '--': + prog_args += args[1:] + break + + if args[0][:2] == '--': + opts, args = do_longs(opts, args[0][2:], longopts, args[1:]) + elif args[0][:1] == '-': + opts, args = do_shorts(opts, args[0][1:], shortopts, args[1:]) + else: + if all_options_first: + prog_args += args + break + else: + prog_args.append(args[0]) + args = args[1:] + + return opts, prog_args + +def do_longs(opts, opt, longopts, args): + try: + i = opt.index('=') + except ValueError: + optarg = None + else: + opt, optarg = opt[:i], opt[i + 1:] + + has_arg, opt = long_has_args(opt, longopts) + if has_arg: + if optarg is None: + if not args: + raise GetoptError('option --%s requires argument' % opt, opt) + optarg, args = args[0], args[1:] + elif optarg: + raise GetoptError('option --%s must not have an argument' % opt, opt) + opts.append(('--' + opt, optarg or '')) + return opts, args + +# Return: +# has_arg? +# full option name +def long_has_args(opt, longopts): + possibilities = [o for o in longopts if o.startswith(opt)] + if not possibilities: + raise GetoptError('option --%s not recognized' % opt, opt) + # Is there an exact match? + if opt in possibilities: + return False, opt + elif opt + '=' in possibilities: + return True, opt + # No exact match, so better be unique. + if len(possibilities) > 1: + # XXX since possibilities contains all valid continuations, might be + # nice to work them into the error msg + raise GetoptError('option --%s not a unique prefix' % opt, opt) + assert len(possibilities) == 1 + unique_match = possibilities[0] + has_arg = unique_match.endswith('=') + if has_arg: + unique_match = unique_match[:-1] + return has_arg, unique_match + +def do_shorts(opts, optstring, shortopts, args): + while optstring != '': + opt, optstring = optstring[0], optstring[1:] + if short_has_arg(opt, shortopts): + if optstring == '': + if not args: + raise GetoptError('option -%s requires argument' % opt, + opt) + optstring, args = args[0], args[1:] + optarg, optstring = optstring, '' + else: + optarg = '' + opts.append(('-' + opt, optarg)) + return opts, args + +def short_has_arg(opt, shortopts): + for i in range(len(shortopts)): + if opt == shortopts[i] != ':': + return shortopts.startswith(':', i + 1) + raise GetoptError('option -%s not recognized' % opt, opt) + + +#======================================================================================================================= +# End getopt code +#======================================================================================================================= diff --git a/ptvsd/pydevd/_pydev_bundle/_pydev_imports_tipper.py b/ptvsd/pydevd/_pydev_bundle/_pydev_imports_tipper.py new file mode 100644 index 00000000..ac1c9823 --- /dev/null +++ b/ptvsd/pydevd/_pydev_bundle/_pydev_imports_tipper.py @@ -0,0 +1,350 @@ +import inspect +import os.path +import sys + +from _pydev_bundle._pydev_tipper_common import do_find + +try: + xrange +except: + xrange = range + +#completion types. +TYPE_IMPORT = '0' +TYPE_CLASS = '1' +TYPE_FUNCTION = '2' +TYPE_ATTR = '3' +TYPE_BUILTIN = '4' +TYPE_PARAM = '5' + +def _imp(name, log=None): + try: + return __import__(name) + except: + if '.' in name: + sub = name[0:name.rfind('.')] + + if log is not None: + log.add_content('Unable to import', name, 'trying with', sub) + log.add_exception() + + return _imp(sub, log) + else: + s = 'Unable to import module: %s - sys.path: %s' % (str(name), sys.path) + if log is not None: + log.add_content(s) + log.add_exception() + + raise ImportError(s) + + +IS_IPY = False +if sys.platform == 'cli': + IS_IPY = True + _old_imp = _imp + def _imp(name, log=None): + #We must add a reference in clr for .Net + import clr #@UnresolvedImport + initial_name = name + while '.' in name: + try: + clr.AddReference(name) + break #If it worked, that's OK. + except: + name = name[0:name.rfind('.')] + else: + try: + clr.AddReference(name) + except: + pass #That's OK (not dot net module). + + return _old_imp(initial_name, log) + + + +def get_file(mod): + f = None + try: + f = inspect.getsourcefile(mod) or inspect.getfile(mod) + except: + if hasattr(mod, '__file__'): + f = mod.__file__ + if f.lower(f[-4:]) in ['.pyc', '.pyo']: + filename = f[:-4] + '.py' + if os.path.exists(filename): + f = filename + + return f + +def Find(name, log=None): + f = None + + mod = _imp(name, log) + parent = mod + foundAs = '' + + if inspect.ismodule(mod): + f = get_file(mod) + + components = name.split('.') + + old_comp = None + for comp in components[1:]: + try: + #this happens in the following case: + #we have mx.DateTime.mxDateTime.mxDateTime.pyd + #but after importing it, mx.DateTime.mxDateTime shadows access to mxDateTime.pyd + mod = getattr(mod, comp) + except AttributeError: + if old_comp != comp: + raise + + if inspect.ismodule(mod): + f = get_file(mod) + else: + if len(foundAs) > 0: + foundAs = foundAs + '.' + foundAs = foundAs + comp + + old_comp = comp + + return f, mod, parent, foundAs + +def search_definition(data): + '''@return file, line, col + ''' + + data = data.replace('\n', '') + if data.endswith('.'): + data = data.rstrip('.') + f, mod, parent, foundAs = Find(data) + try: + return do_find(f, mod), foundAs + except: + return do_find(f, parent), foundAs + + +def generate_tip(data, log=None): + data = data.replace('\n', '') + if data.endswith('.'): + data = data.rstrip('.') + + f, mod, parent, foundAs = Find(data, log) + #print_ >> open('temp.txt', 'w'), f + tips = generate_imports_tip_for_module(mod) + return f, tips + + +def check_char(c): + if c == '-' or c == '.': + return '_' + return c + +def generate_imports_tip_for_module(obj_to_complete, dir_comps=None, getattr=getattr, filter=lambda name:True): + ''' + @param obj_to_complete: the object from where we should get the completions + @param dir_comps: if passed, we should not 'dir' the object and should just iterate those passed as a parameter + @param getattr: the way to get a given object from the obj_to_complete (used for the completer) + @param filter: a callable that receives the name and decides if it should be appended or not to the results + @return: list of tuples, so that each tuple represents a completion with: + name, doc, args, type (from the TYPE_* constants) + ''' + ret = [] + + if dir_comps is None: + dir_comps = dir(obj_to_complete) + if hasattr(obj_to_complete, '__dict__'): + dir_comps.append('__dict__') + if hasattr(obj_to_complete, '__class__'): + dir_comps.append('__class__') + + get_complete_info = True + + if len(dir_comps) > 1000: + #ok, we don't want to let our users wait forever... + #no complete info for you... + + get_complete_info = False + + dontGetDocsOn = (float, int, str, tuple, list) + for d in dir_comps: + + if d is None: + continue + + if not filter(d): + continue + + args = '' + + try: + try: + obj = getattr(obj_to_complete.__class__, d) + except: + obj = getattr(obj_to_complete, d) + except: #just ignore and get it without additional info + ret.append((d, '', args, TYPE_BUILTIN)) + else: + + if get_complete_info: + try: + retType = TYPE_BUILTIN + + #check if we have to get docs + getDoc = True + for class_ in dontGetDocsOn: + + if isinstance(obj, class_): + getDoc = False + break + + doc = '' + if getDoc: + #no need to get this info... too many constants are defined and + #makes things much slower (passing all that through sockets takes quite some time) + try: + doc = inspect.getdoc(obj) + if doc is None: + doc = '' + except: #may happen on jython when checking java classes (so, just ignore it) + doc = '' + + + if inspect.ismethod(obj) or inspect.isbuiltin(obj) or inspect.isfunction(obj) or inspect.isroutine(obj): + try: + args, vargs, kwargs, defaults = inspect.getargspec(obj) + + r = '' + for a in (args): + if len(r) > 0: + r = r + ', ' + r = r + str(a) + args = '(%s)' % (r) + except TypeError: + #ok, let's see if we can get the arguments from the doc + args, doc = signature_from_docstring(doc, getattr(obj, '__name__', None)) + + retType = TYPE_FUNCTION + + elif inspect.isclass(obj): + retType = TYPE_CLASS + + elif inspect.ismodule(obj): + retType = TYPE_IMPORT + + else: + retType = TYPE_ATTR + + + #add token and doc to return - assure only strings. + ret.append((d, doc, args, retType)) + + except: #just ignore and get it without aditional info + ret.append((d, '', args, TYPE_BUILTIN)) + + else: #get_complete_info == False + if inspect.ismethod(obj) or inspect.isbuiltin(obj) or inspect.isfunction(obj) or inspect.isroutine(obj): + retType = TYPE_FUNCTION + + elif inspect.isclass(obj): + retType = TYPE_CLASS + + elif inspect.ismodule(obj): + retType = TYPE_IMPORT + + else: + retType = TYPE_ATTR + #ok, no complete info, let's try to do this as fast and clean as possible + #so, no docs for this kind of information, only the signatures + ret.append((d, '', str(args), retType)) + + return ret + + +def signature_from_docstring(doc, obj_name): + args = '()' + try: + found = False + if len(doc) > 0: + if IS_IPY: + # Handle case where we have the situation below + # sort(self, object cmp, object key) + # sort(self, object cmp, object key, bool reverse) + # sort(self) + # sort(self, object cmp) + + # Or: sort(self: list, cmp: object, key: object) + # sort(self: list, cmp: object, key: object, reverse: bool) + # sort(self: list) + # sort(self: list, cmp: object) + if obj_name: + name = obj_name + '(' + + # Fix issue where it was appearing sort(aa)sort(bb)sort(cc) in the same line. + lines = doc.splitlines() + if len(lines) == 1: + c = doc.count(name) + if c > 1: + doc = ('\n' + name).join(doc.split(name)) + + major = '' + for line in doc.splitlines(): + if line.startswith(name) and line.endswith(')'): + if len(line) > len(major): + major = line + if major: + args = major[major.index('('):] + found = True + + if not found: + i = doc.find('->') + if i < 0: + i = doc.find('--') + if i < 0: + i = doc.find('\n') + if i < 0: + i = doc.find('\r') + + if i > 0: + s = doc[0:i] + s = s.strip() + + # let's see if we have a docstring in the first line + if s[-1] == ')': + start = s.find('(') + if start >= 0: + end = s.find('[') + if end <= 0: + end = s.find(')') + if end <= 0: + end = len(s) + + args = s[start:end] + if not args[-1] == ')': + args = args + ')' + + # now, get rid of unwanted chars + l = len(args) - 1 + r = [] + for i in xrange(len(args)): + if i == 0 or i == l: + r.append(args[i]) + else: + r.append(check_char(args[i])) + + args = ''.join(r) + + if IS_IPY: + if args.startswith('(self:'): + i = args.find(',') + if i >= 0: + args = '(self' + args[i:] + else: + args = '(self)' + i = args.find(')') + if i > 0: + args = args[:i + 1] + + except: + pass + return args, doc diff --git a/ptvsd/pydevd/_pydev_bundle/_pydev_jy_imports_tipper.py b/ptvsd/pydevd/_pydev_bundle/_pydev_jy_imports_tipper.py new file mode 100644 index 00000000..28d5f6b1 --- /dev/null +++ b/ptvsd/pydevd/_pydev_bundle/_pydev_jy_imports_tipper.py @@ -0,0 +1,504 @@ +try: + import StringIO +except: + import io as StringIO + +import traceback +from java.lang import StringBuffer #@UnresolvedImport +from java.lang import String #@UnresolvedImport +import java.lang #@UnresolvedImport +import sys +from _pydev_bundle._pydev_tipper_common import do_find + + +from org.python.core import PyReflectedFunction #@UnresolvedImport + +from org.python import core #@UnresolvedImport +from org.python.core import PyClass #@UnresolvedImport + +try: + xrange +except: + xrange = range + + +#completion types. +TYPE_IMPORT = '0' +TYPE_CLASS = '1' +TYPE_FUNCTION = '2' +TYPE_ATTR = '3' +TYPE_BUILTIN = '4' +TYPE_PARAM = '5' + +def _imp(name): + try: + return __import__(name) + except: + if '.' in name: + sub = name[0:name.rfind('.')] + return _imp(sub) + else: + s = 'Unable to import module: %s - sys.path: %s' % (str(name), sys.path) + raise RuntimeError(s) + +import java.util +_java_rt_file = getattr(java.util, '__file__', None) + +def Find(name): + f = None + if name.startswith('__builtin__'): + if name == '__builtin__.str': + name = 'org.python.core.PyString' + elif name == '__builtin__.dict': + name = 'org.python.core.PyDictionary' + + mod = _imp(name) + parent = mod + foundAs = '' + + if hasattr(mod, '__file__'): + f = mod.__file__ + + + components = name.split('.') + old_comp = None + for comp in components[1:]: + try: + #this happens in the following case: + #we have mx.DateTime.mxDateTime.mxDateTime.pyd + #but after importing it, mx.DateTime.mxDateTime does shadows access to mxDateTime.pyd + mod = getattr(mod, comp) + except AttributeError: + if old_comp != comp: + raise + + if hasattr(mod, '__file__'): + f = mod.__file__ + else: + if len(foundAs) > 0: + foundAs = foundAs + '.' + foundAs = foundAs + comp + + old_comp = comp + + if f is None and name.startswith('java.lang'): + # Hack: java.lang.__file__ is None on Jython 2.7 (whereas it pointed to rt.jar on Jython 2.5). + f = _java_rt_file + + if f is not None: + if f.endswith('.pyc'): + f = f[:-1] + elif f.endswith('$py.class'): + f = f[:-len('$py.class')] + '.py' + return f, mod, parent, foundAs + +def format_param_class_name(paramClassName): + if paramClassName.startswith(''): + paramClassName = paramClassName[len(' + paramClassName = paramClassName.split('\'')[1] + except: + paramClassName = repr(paramTypesClass) #just in case something else happens... it will at least be visible + #if the parameter equals [C, it means it it a char array, so, let's change it + + a = format_param_class_name(paramClassName) + #a = a.replace('[]','Array') + #a = a.replace('Object', 'obj') + #a = a.replace('String', 's') + #a = a.replace('Integer', 'i') + #a = a.replace('Char', 'c') + #a = a.replace('Double', 'd') + args.append(a) #so we don't leave invalid code + + + info = Info(name, args=args, ret=ret) + #print_ info.basic_as_str() + infos.append(info) + + return 1, infos + except Exception: + s = StringIO.StringIO() + traceback.print_exc(file=s) + return 1, [Info(str('ERROR'), doc=s.getvalue())] + + return 0, None + +def ismodule(mod): + #java modules... do we have other way to know that? + if not hasattr(mod, 'getClass') and not hasattr(mod, '__class__') \ + and hasattr(mod, '__name__'): + return 1 + + return isinstance(mod, core.PyModule) + + +def dir_obj(obj): + ret = [] + found = java.util.HashMap() + original = obj + if hasattr(obj, '__class__'): + if obj.__class__ == java.lang.Class: + + #get info about superclasses + classes = [] + classes.append(obj) + try: + c = obj.getSuperclass() + except TypeError: + #may happen on jython when getting the java.lang.Class class + c = obj.getSuperclass(obj) + + while c != None: + classes.append(c) + c = c.getSuperclass() + + #get info about interfaces + interfs = [] + for obj in classes: + try: + interfs.extend(obj.getInterfaces()) + except TypeError: + interfs.extend(obj.getInterfaces(obj)) + classes.extend(interfs) + + #now is the time when we actually get info on the declared methods and fields + for obj in classes: + try: + declaredMethods = obj.getDeclaredMethods() + except TypeError: + declaredMethods = obj.getDeclaredMethods(obj) + + try: + declaredFields = obj.getDeclaredFields() + except TypeError: + declaredFields = obj.getDeclaredFields(obj) + + for i in xrange(len(declaredMethods)): + name = declaredMethods[i].getName() + ret.append(name) + found.put(name, 1) + + for i in xrange(len(declaredFields)): + name = declaredFields[i].getName() + ret.append(name) + found.put(name, 1) + + + elif isclass(obj.__class__): + d = dir(obj.__class__) + for name in d: + ret.append(name) + found.put(name, 1) + + + #this simple dir does not always get all the info, that's why we have the part before + #(e.g.: if we do a dir on String, some methods that are from other interfaces such as + #charAt don't appear) + d = dir(original) + for name in d: + if found.get(name) != 1: + ret.append(name) + + return ret + + +def format_arg(arg): + '''formats an argument to be shown + ''' + + s = str(arg) + dot = s.rfind('.') + if dot >= 0: + s = s[dot + 1:] + + s = s.replace(';', '') + s = s.replace('[]', 'Array') + if len(s) > 0: + c = s[0].lower() + s = c + s[1:] + + return s + + + +def search_definition(data): + '''@return file, line, col + ''' + + data = data.replace('\n', '') + if data.endswith('.'): + data = data.rstrip('.') + f, mod, parent, foundAs = Find(data) + try: + return do_find(f, mod), foundAs + except: + return do_find(f, parent), foundAs + + +def generate_imports_tip_for_module(obj_to_complete, dir_comps=None, getattr=getattr, filter=lambda name:True): + ''' + @param obj_to_complete: the object from where we should get the completions + @param dir_comps: if passed, we should not 'dir' the object and should just iterate those passed as a parameter + @param getattr: the way to get a given object from the obj_to_complete (used for the completer) + @param filter: a callable that receives the name and decides if it should be appended or not to the results + @return: list of tuples, so that each tuple represents a completion with: + name, doc, args, type (from the TYPE_* constants) + ''' + ret = [] + + if dir_comps is None: + dir_comps = dir_obj(obj_to_complete) + + for d in dir_comps: + + if d is None: + continue + + if not filter(d): + continue + + args = '' + doc = '' + retType = TYPE_BUILTIN + + try: + obj = getattr(obj_to_complete, d) + except (AttributeError, java.lang.NoClassDefFoundError): + #jython has a bug in its custom classloader that prevents some things from working correctly, so, let's see if + #we can fix that... (maybe fixing it in jython itself would be a better idea, as this is clearly a bug) + #for that we need a custom classloader... we have references from it in the below places: + # + #http://mindprod.com/jgloss/classloader.html + #http://www.javaworld.com/javaworld/jw-03-2000/jw-03-classload-p2.html + #http://freshmeat.net/articles/view/1643/ + # + #note: this only happens when we add things to the sys.path at runtime, if they are added to the classpath + #before the run, everything goes fine. + # + #The code below ilustrates what I mean... + # + #import sys + #sys.path.insert(1, r"C:\bin\eclipse310\plugins\org.junit_3.8.1\junit.jar" ) + # + #import junit.framework + #print_ dir(junit.framework) #shows the TestCase class here + # + #import junit.framework.TestCase + # + #raises the error: + #Traceback (innermost last): + # File "", line 1, in ? + #ImportError: No module named TestCase + # + #whereas if we had added the jar to the classpath before, everything would be fine by now... + + ret.append((d, '', '', retType)) + #that's ok, private things cannot be gotten... + continue + else: + + isMet = ismethod(obj) + if isMet[0] and isMet[1]: + info = isMet[1][0] + try: + args, vargs, kwargs = info.args, info.varargs, info.kwargs + doc = info.get_as_doc() + r = '' + for a in (args): + if len(r) > 0: + r += ', ' + r += format_arg(a) + args = '(%s)' % (r) + except TypeError: + traceback.print_exc() + args = '()' + + retType = TYPE_FUNCTION + + elif isclass(obj): + retType = TYPE_CLASS + + elif ismodule(obj): + retType = TYPE_IMPORT + + #add token and doc to return - assure only strings. + ret.append((d, doc, args, retType)) + + + return ret + + +if __name__ == "__main__": + sys.path.append(r'D:\dev_programs\eclipse_3\310\eclipse\plugins\org.junit_3.8.1\junit.jar') + sys.stdout.write('%s\n' % Find('junit.framework.TestCase')) diff --git a/ptvsd/pydevd/_pydev_bundle/_pydev_log.py b/ptvsd/pydevd/_pydev_bundle/_pydev_log.py new file mode 100644 index 00000000..853348b2 --- /dev/null +++ b/ptvsd/pydevd/_pydev_bundle/_pydev_log.py @@ -0,0 +1,28 @@ +import traceback +import sys +try: + import StringIO +except: + import io as StringIO #Python 3.0 + + +class Log: + + def __init__(self): + self._contents = [] + + def add_content(self, *content): + self._contents.append(' '.join(content)) + + def add_exception(self): + s = StringIO.StringIO() + exc_info = sys.exc_info() + traceback.print_exception(exc_info[0], exc_info[1], exc_info[2], limit=None, file=s) + self._contents.append(s.getvalue()) + + + def get_contents(self): + return '\n'.join(self._contents) + + def clear_log(self): + del self._contents[:] \ No newline at end of file diff --git a/ptvsd/pydevd/_pydev_bundle/_pydev_tipper_common.py b/ptvsd/pydevd/_pydev_bundle/_pydev_tipper_common.py new file mode 100644 index 00000000..79ce4988 --- /dev/null +++ b/ptvsd/pydevd/_pydev_bundle/_pydev_tipper_common.py @@ -0,0 +1,67 @@ +try: + import inspect +except: + try: + from _pydev_imps import _pydev_inspect as inspect + except: + import traceback;traceback.print_exc() #Ok, no inspect available (search will not work) + +try: + import re +except: + try: + import sre as re # for older versions + except: + import traceback;traceback.print_exc() #Ok, no inspect available (search will not work) + + +from _pydevd_bundle.pydevd_constants import xrange + +def do_find(f, mod): + import linecache + if inspect.ismodule(mod): + return f, 0, 0 + + lines = linecache.getlines(f) + + if inspect.isclass(mod): + name = mod.__name__ + pat = re.compile(r'^\s*class\s*' + name + r'\b') + for i in xrange(len(lines)): + if pat.match(lines[i]): + return f, i, 0 + + return f, 0, 0 + + if inspect.ismethod(mod): + mod = mod.im_func + + if inspect.isfunction(mod): + try: + mod = mod.func_code + except AttributeError: + mod = mod.__code__ #python 3k + + if inspect.istraceback(mod): + mod = mod.tb_frame + + if inspect.isframe(mod): + mod = mod.f_code + + if inspect.iscode(mod): + if not hasattr(mod, 'co_filename'): + return None, 0, 0 + + if not hasattr(mod, 'co_firstlineno'): + return mod.co_filename, 0, 0 + + lnum = mod.co_firstlineno + pat = re.compile(r'^(\s*def\s)|(.*(? 0: + if pat.match(lines[lnum]): + break + lnum -= 1 + + return f, lnum, 0 + + raise RuntimeError('Do not know about: ' + f + ' ' + str(mod)) diff --git a/ptvsd/pydevd/_pydev_bundle/fix_getpass.py b/ptvsd/pydevd/_pydev_bundle/fix_getpass.py new file mode 100644 index 00000000..2bb2ab1f --- /dev/null +++ b/ptvsd/pydevd/_pydev_bundle/fix_getpass.py @@ -0,0 +1,13 @@ +def fix_getpass(): + try: + import getpass + except ImportError: + return #If we can't import it, we can't fix it + import warnings + fallback = getattr(getpass, 'fallback_getpass', None) # >= 2.6 + if not fallback: + fallback = getpass.default_getpass # <= 2.5 @UndefinedVariable + getpass.getpass = fallback + if hasattr(getpass, 'GetPassWarning'): + warnings.simplefilter("ignore", category=getpass.GetPassWarning) + diff --git a/ptvsd/pydevd/_pydev_bundle/pydev_console_utils.py b/ptvsd/pydevd/_pydev_bundle/pydev_console_utils.py new file mode 100644 index 00000000..337a1fa9 --- /dev/null +++ b/ptvsd/pydevd/_pydev_bundle/pydev_console_utils.py @@ -0,0 +1,616 @@ +import os +import sys +import traceback +from _pydev_bundle.pydev_imports import xmlrpclib, _queue, Exec +from _pydev_bundle._pydev_calltip_util import get_description +from _pydev_imps._pydev_saved_modules import thread +from _pydevd_bundle import pydevd_vars +from _pydevd_bundle import pydevd_xml +from _pydevd_bundle.pydevd_constants import IS_JYTHON, dict_iter_items +from _pydevd_bundle.pydevd_utils import to_string + + +# ======================================================================================================================= +# Null +# ======================================================================================================================= +class Null: + """ + Gotten from: http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/68205 + """ + + def __init__(self, *args, **kwargs): + return None + + def __call__(self, *args, **kwargs): + return self + + def __getattr__(self, mname): + return self + + def __setattr__(self, name, value): + return self + + def __delattr__(self, name): + return self + + def __repr__(self): + return "" + + def __str__(self): + return "Null" + + def __len__(self): + return 0 + + def __getitem__(self): + return self + + def __setitem__(self, *args, **kwargs): + pass + + def write(self, *args, **kwargs): + pass + + def __nonzero__(self): + return 0 + + +# ======================================================================================================================= +# BaseStdIn +# ======================================================================================================================= +class BaseStdIn: + def __init__(self, original_stdin=sys.stdin, *args, **kwargs): + try: + self.encoding = sys.stdin.encoding + except: + # Not sure if it's available in all Python versions... + pass + self.original_stdin = original_stdin + + try: + self.errors = sys.stdin.errors # Who knew? sys streams have an errors attribute! + except: + #Not sure if it's available in all Python versions... + pass + + def readline(self, *args, **kwargs): + # sys.stderr.write('Cannot readline out of the console evaluation\n') -- don't show anything + # This could happen if the user had done input('enter number).<-- upon entering this, that message would appear, + # which is not something we want. + return '\n' + + def write(self, *args, **kwargs): + pass # not available StdIn (but it can be expected to be in the stream interface) + + def flush(self, *args, **kwargs): + pass # not available StdIn (but it can be expected to be in the stream interface) + + def read(self, *args, **kwargs): + # in the interactive interpreter, a read and a readline are the same. + return self.readline() + + def close(self, *args, **kwargs): + pass # expected in StdIn + + def __getattr__(self, item): + # it's called if the attribute wasn't found + if hasattr(self.original_stdin, item): + return getattr(self.original_stdin, item) + raise AttributeError("%s has no attribute %s" % (self.original_stdin, item)) + + +# ======================================================================================================================= +# StdIn +# ======================================================================================================================= +class StdIn(BaseStdIn): + ''' + Object to be added to stdin (to emulate it as non-blocking while the next line arrives) + ''' + + def __init__(self, interpreter, host, client_port, original_stdin=sys.stdin): + BaseStdIn.__init__(self, original_stdin) + self.interpreter = interpreter + self.client_port = client_port + self.host = host + + def readline(self, *args, **kwargs): + # Ok, callback into the client to get the new input + try: + server = xmlrpclib.Server('http://%s:%s' % (self.host, self.client_port)) + requested_input = server.RequestInput() + if not requested_input: + return '\n' #Yes, a readline must return something (otherwise we can get an EOFError on the input() call). + else: + # readline should end with '\n' (not doing so makes IPython 5 remove the last *valid* character). + requested_input += '\n' + return requested_input + except KeyboardInterrupt: + raise # Let KeyboardInterrupt go through -- #PyDev-816: Interrupting infinite loop in the Interactive Console + except: + return '\n' + + def close(self, *args, **kwargs): + pass # expected in StdIn + +#======================================================================================================================= +# DebugConsoleStdIn +#======================================================================================================================= +class DebugConsoleStdIn(BaseStdIn): + ''' + Object to be added to stdin (to emulate it as non-blocking while the next line arrives) + ''' + + def __init__(self, dbg, original_stdin): + BaseStdIn.__init__(self, original_stdin) + self.debugger = dbg + + def __pydev_run_command(self, is_started): + try: + cmd = self.debugger.cmd_factory.make_input_requested_message(is_started) + self.debugger.writer.add_command(cmd) + except Exception: + import traceback + traceback.print_exc() + return '\n' + + def readline(self, *args, **kwargs): + # Notify Java side about input and call original function + self.__pydev_run_command(True) + result = self.original_stdin.readline(*args, **kwargs) + self.__pydev_run_command(False) + return result + + +class CodeFragment: + def __init__(self, text, is_single_line=True): + self.text = text + self.is_single_line = is_single_line + + def append(self, code_fragment): + self.text = self.text + "\n" + code_fragment.text + if not code_fragment.is_single_line: + self.is_single_line = False + + +# ======================================================================================================================= +# BaseInterpreterInterface +# ======================================================================================================================= +class BaseInterpreterInterface: + def __init__(self, mainThread): + self.mainThread = mainThread + self.interruptable = False + self.exec_queue = _queue.Queue(0) + self.buffer = None + + def need_more_for_code(self, source): + # PyDev-502: PyDev 3.9 F2 doesn't support backslash continuations + + # Strangely even the IPython console is_complete said it was complete + # even with a continuation char at the end. + if source.endswith('\\'): + return True + + if hasattr(self.interpreter, 'is_complete'): + return not self.interpreter.is_complete(source) + try: + # At this point, it should always be single. + # If we don't do this, things as: + # + # for i in range(10): print(i) + # + # (in a single line) don't work. + # Note that it won't give an error and code will be None (so, it'll + # use execMultipleLines in the next call in this case). + symbol = 'single' + code = self.interpreter.compile(source, '', symbol) + except (OverflowError, SyntaxError, ValueError): + # Case 1 + return False + if code is None: + # Case 2 + return True + + # Case 3 + return False + + def need_more(self, code_fragment): + if self.buffer is None: + self.buffer = code_fragment + else: + self.buffer.append(code_fragment) + + return self.need_more_for_code(self.buffer.text) + + def create_std_in(self, debugger=None, original_std_in=None): + if debugger is None: + return StdIn(self, self.host, self.client_port, original_stdin=original_std_in) + else: + return DebugConsoleStdIn(dbg=debugger, original_stdin=original_std_in) + + def add_exec(self, code_fragment, debugger=None): + original_in = sys.stdin + try: + help = None + if 'pydoc' in sys.modules: + pydoc = sys.modules['pydoc'] # Don't import it if it still is not there. + + if hasattr(pydoc, 'help'): + # You never know how will the API be changed, so, let's code defensively here + help = pydoc.help + if not hasattr(help, 'input'): + help = None + except: + # Just ignore any error here + pass + + more = False + try: + sys.stdin = self.create_std_in(debugger, original_in) + try: + if help is not None: + # This will enable the help() function to work. + try: + try: + help.input = sys.stdin + except AttributeError: + help._input = sys.stdin + except: + help = None + if not self._input_error_printed: + self._input_error_printed = True + sys.stderr.write('\nError when trying to update pydoc.help.input\n') + sys.stderr.write('(help() may not work -- please report this as a bug in the pydev bugtracker).\n\n') + traceback.print_exc() + + try: + self.start_exec() + if hasattr(self, 'debugger'): + import pydevd_tracing + pydevd_tracing.SetTrace(self.debugger.trace_dispatch) + + more = self.do_add_exec(code_fragment) + + if hasattr(self, 'debugger'): + import pydevd_tracing + pydevd_tracing.SetTrace(None) + + self.finish_exec(more) + finally: + if help is not None: + try: + try: + help.input = original_in + except AttributeError: + help._input = original_in + except: + pass + + finally: + sys.stdin = original_in + except SystemExit: + raise + except: + traceback.print_exc() + + return more + + def do_add_exec(self, codeFragment): + ''' + Subclasses should override. + + @return: more (True if more input is needed to complete the statement and False if the statement is complete). + ''' + raise NotImplementedError() + + def get_namespace(self): + ''' + Subclasses should override. + + @return: dict with namespace. + ''' + raise NotImplementedError() + + def __resolve_reference__(self, text): + """ + + :type text: str + """ + obj = None + if '.' not in text: + try: + obj = self.get_namespace()[text] + except KeyError: + pass + + if obj is None: + try: + obj = self.get_namespace()['__builtins__'][text] + except: + pass + + if obj is None: + try: + obj = getattr(self.get_namespace()['__builtins__'], text, None) + except: + pass + + else: + try: + last_dot = text.rindex('.') + parent_context = text[0:last_dot] + res = pydevd_vars.eval_in_context(parent_context, self.get_namespace(), self.get_namespace()) + obj = getattr(res, text[last_dot + 1:]) + except: + pass + return obj + + def getDescription(self, text): + try: + obj = self.__resolve_reference__(text) + if obj is None: + return '' + return get_description(obj) + except: + return '' + + def do_exec_code(self, code, is_single_line): + try: + code_fragment = CodeFragment(code, is_single_line) + more = self.need_more(code_fragment) + if not more: + code_fragment = self.buffer + self.buffer = None + self.exec_queue.put(code_fragment) + + return more + except: + traceback.print_exc() + return False + + def execLine(self, line): + return self.do_exec_code(line, True) + + def execMultipleLines(self, lines): + if IS_JYTHON: + more = False + for line in lines.split('\n'): + more = self.do_exec_code(line, True) + return more + else: + return self.do_exec_code(lines, False) + + def interrupt(self): + self.buffer = None # Also clear the buffer when it's interrupted. + try: + if self.interruptable: + called = False + try: + # Fix for #PyDev-500: Console interrupt can't interrupt on sleep + import os + import signal + if os.name == 'posix': + # On Linux we can't interrupt 0 as in Windows because it's + # actually owned by a process -- on the good side, signals + # work much better on Linux! + os.kill(os.getpid(), signal.SIGINT) + called = True + + elif os.name == 'nt': + # Stupid windows: sending a Ctrl+C to a process given its pid + # is absurdly difficult. + # There are utilities to make it work such as + # http://www.latenighthacking.com/projects/2003/sendSignal/ + # but fortunately for us, it seems Python does allow a CTRL_C_EVENT + # for the current process in Windows if pid 0 is passed... if we needed + # to send a signal to another process the approach would be + # much more difficult. + # Still, note that CTRL_C_EVENT is only Python 2.7 onwards... + # Also, this doesn't seem to be documented anywhere!? (stumbled + # upon it by chance after digging quite a lot). + os.kill(0, signal.CTRL_C_EVENT) + called = True + except: + # Many things to go wrong (from CTRL_C_EVENT not being there + # to failing import signal)... if that's the case, ask for + # forgiveness and go on to the approach which will interrupt + # the main thread (but it'll only work when it's executing some Python + # code -- not on sleep() for instance). + pass + + if not called: + if hasattr(thread, 'interrupt_main'): # Jython doesn't have it + thread.interrupt_main() + else: + self.mainThread._thread.interrupt() # Jython + self.finish_exec(False) + return True + except: + traceback.print_exc() + return False + + def close(self): + sys.exit(0) + + def start_exec(self): + self.interruptable = True + + def get_server(self): + if getattr(self, 'host', None) is not None: + return xmlrpclib.Server('http://%s:%s' % (self.host, self.client_port)) + else: + return None + + server = property(get_server) + + def ShowConsole(self): + server = self.get_server() + if server is not None: + server.ShowConsole() + + def finish_exec(self, more): + self.interruptable = False + + server = self.get_server() + + if server is not None: + return server.NotifyFinished(more) + else: + return True + + def getFrame(self): + hidden_ns = self.get_ipython_hidden_vars_dict() + xml = "" + xml += pydevd_xml.frame_vars_to_xml(self.get_namespace(), hidden_ns) + xml += "" + + return xml + + def getVariable(self, attributes): + xml = "" + valDict = pydevd_vars.resolve_var(self.get_namespace(), attributes) + if valDict is None: + valDict = {} + + keys = valDict.keys() + + for k in keys: + xml += pydevd_vars.var_to_xml(valDict[k], to_string(k)) + + xml += "" + + return xml + + def getArray(self, attr, roffset, coffset, rows, cols, format): + name = attr.split("\t")[-1] + array = pydevd_vars.eval_in_context(name, self.get_namespace(), self.get_namespace()) + return pydevd_vars.table_like_struct_to_xml(array, name, roffset, coffset, rows, cols, format) + + def evaluate(self, expression): + xml = "" + result = pydevd_vars.eval_in_context(expression, self.get_namespace(), self.get_namespace()) + + xml += pydevd_vars.var_to_xml(result, expression) + + xml += "" + + return xml + + def changeVariable(self, attr, value): + def do_change_variable(): + Exec('%s=%s' % (attr, value), self.get_namespace(), self.get_namespace()) + + # Important: it has to be really enabled in the main thread, so, schedule + # it to run in the main thread. + self.exec_queue.put(do_change_variable) + + def _findFrame(self, thread_id, frame_id): + ''' + Used to show console with variables connection. + Always return a frame where the locals map to our internal namespace. + ''' + VIRTUAL_FRAME_ID = "1" # matches PyStackFrameConsole.java + VIRTUAL_CONSOLE_ID = "console_main" # matches PyThreadConsole.java + if thread_id == VIRTUAL_CONSOLE_ID and frame_id == VIRTUAL_FRAME_ID: + f = FakeFrame() + f.f_globals = {} # As globals=locals here, let's simply let it empty (and save a bit of network traffic). + f.f_locals = self.get_namespace() + return f + else: + return self.orig_find_frame(thread_id, frame_id) + + def connectToDebugger(self, debuggerPort, debugger_options=None): + ''' + Used to show console with variables connection. + Mainly, monkey-patches things in the debugger structure so that the debugger protocol works. + ''' + + if debugger_options is None: + debugger_options = {} + env_key = "PYDEVD_EXTRA_ENVS" + if env_key in debugger_options: + for (env_name, value) in dict_iter_items(debugger_options[env_key]): + os.environ[env_name] = value + del debugger_options[env_key] + def do_connect_to_debugger(): + try: + # Try to import the packages needed to attach the debugger + import pydevd + from _pydev_imps._pydev_saved_modules import threading + + except: + # This happens on Jython embedded in host eclipse + traceback.print_exc() + sys.stderr.write('pydevd is not available, cannot connect\n', ) + + from _pydev_bundle import pydev_localhost + threading.currentThread().__pydevd_id__ = "console_main" + + self.orig_find_frame = pydevd_vars.find_frame + pydevd_vars.find_frame = self._findFrame + + self.debugger = pydevd.PyDB() + try: + pydevd.apply_debugger_options(debugger_options) + self.debugger.connect(pydev_localhost.get_localhost(), debuggerPort) + self.debugger.prepare_to_run() + import pydevd_tracing + pydevd_tracing.SetTrace(None) + except: + traceback.print_exc() + sys.stderr.write('Failed to connect to target debugger.\n') + + # Register to process commands when idle + self.debugrunning = False + try: + import pydevconsole + pydevconsole.set_debug_hook(self.debugger.process_internal_commands) + except: + traceback.print_exc() + sys.stderr.write('Version of Python does not support debuggable Interactive Console.\n') + + # Important: it has to be really enabled in the main thread, so, schedule + # it to run in the main thread. + self.exec_queue.put(do_connect_to_debugger) + + return ('connect complete',) + + def hello(self, input_str): + # Don't care what the input string is + return ("Hello eclipse",) + + def enableGui(self, guiname): + ''' Enable the GUI specified in guiname (see inputhook for list). + As with IPython, enabling multiple GUIs isn't an error, but + only the last one's main loop runs and it may not work + ''' + def do_enable_gui(): + from _pydev_bundle.pydev_versioncheck import versionok_for_gui + if versionok_for_gui(): + try: + from pydev_ipython.inputhook import enable_gui + enable_gui(guiname) + except: + sys.stderr.write("Failed to enable GUI event loop integration for '%s'\n" % guiname) + traceback.print_exc() + elif guiname not in ['none', '', None]: + # Only print a warning if the guiname was going to do something + sys.stderr.write("PyDev console: Python version does not support GUI event loop integration for '%s'\n" % guiname) + # Return value does not matter, so return back what was sent + return guiname + + # Important: it has to be really enabled in the main thread, so, schedule + # it to run in the main thread. + self.exec_queue.put(do_enable_gui) + + def get_ipython_hidden_vars_dict(self): + return None + + +# ======================================================================================================================= +# FakeFrame +# ======================================================================================================================= +class FakeFrame: + ''' + Used to show console with variables connection. + A class to be used as a mock of a frame. + ''' diff --git a/ptvsd/pydevd/_pydev_bundle/pydev_import_hook.py b/ptvsd/pydevd/_pydev_bundle/pydev_import_hook.py new file mode 100644 index 00000000..2f8e3f22 --- /dev/null +++ b/ptvsd/pydevd/_pydev_bundle/pydev_import_hook.py @@ -0,0 +1,36 @@ + +import sys +from types import ModuleType + + +class ImportHookManager(ModuleType): + def __init__(self, name, system_import): + ModuleType.__init__(self, name) + self._system_import = system_import + self._modules_to_patch = {} + + def add_module_name(self, module_name, activate_function): + self._modules_to_patch[module_name] = activate_function + + def do_import(self, name, *args, **kwargs): + activate_func = None + if name in self._modules_to_patch: + activate_func = self._modules_to_patch.pop(name) + + module = self._system_import(name, *args, **kwargs) + try: + if activate_func: + activate_func() #call activate function + except: + sys.stderr.write("Matplotlib support failed\n") + return module + +if sys.version_info[0] >= 3: + import builtins # py3 +else: + import __builtin__ as builtins + +import_hook_manager = ImportHookManager(__name__ + '.import_hook', builtins.__import__) +builtins.__import__ = import_hook_manager.do_import +sys.modules[import_hook_manager.__name__] = import_hook_manager +del builtins \ No newline at end of file diff --git a/ptvsd/pydevd/_pydev_bundle/pydev_imports.py b/ptvsd/pydevd/_pydev_bundle/pydev_imports.py new file mode 100644 index 00000000..f40030b8 --- /dev/null +++ b/ptvsd/pydevd/_pydev_bundle/pydev_imports.py @@ -0,0 +1,60 @@ +from _pydevd_bundle.pydevd_constants import USE_LIB_COPY, izip + + +try: + try: + if USE_LIB_COPY: + from _pydev_imps._pydev_saved_modules import xmlrpclib + else: + import xmlrpclib + except ImportError: + import xmlrpc.client as xmlrpclib +except ImportError: + from _pydev_imps import _pydev_xmlrpclib as xmlrpclib + + +try: + try: + if USE_LIB_COPY: + from _pydev_imps._pydev_saved_modules import _pydev_SimpleXMLRPCServer + from _pydev_SimpleXMLRPCServer import SimpleXMLRPCServer + else: + from SimpleXMLRPCServer import SimpleXMLRPCServer + except ImportError: + from xmlrpc.server import SimpleXMLRPCServer +except ImportError: + from _pydev_imps._pydev_SimpleXMLRPCServer import SimpleXMLRPCServer + + + +try: + from StringIO import StringIO +except ImportError: + from io import StringIO + + +try: + execfile=execfile #Not in Py3k +except NameError: + from _pydev_imps._pydev_execfile import execfile + + +try: + if USE_LIB_COPY: + from _pydev_imps._pydev_saved_modules import _queue + else: + import Queue as _queue +except: + import queue as _queue #@UnresolvedImport + + +try: + from _pydevd_bundle.pydevd_exec import Exec +except: + from _pydevd_bundle.pydevd_exec2 import Exec + +try: + from urllib import quote, quote_plus, unquote_plus +except: + from urllib.parse import quote, quote_plus, unquote_plus #@UnresolvedImport + diff --git a/ptvsd/pydevd/_pydev_bundle/pydev_ipython_console.py b/ptvsd/pydevd/_pydev_bundle/pydev_ipython_console.py new file mode 100644 index 00000000..549c9980 --- /dev/null +++ b/ptvsd/pydevd/_pydev_bundle/pydev_ipython_console.py @@ -0,0 +1,92 @@ +import sys +from _pydev_bundle.pydev_console_utils import BaseInterpreterInterface + +import os +import traceback + +# Uncomment to force PyDev standard shell. +# raise ImportError() + +from _pydev_bundle.pydev_ipython_console_011 import get_pydev_frontend +from _pydevd_bundle.pydevd_constants import dict_iter_items + +#======================================================================================================================= +# InterpreterInterface +#======================================================================================================================= +class InterpreterInterface(BaseInterpreterInterface): + ''' + The methods in this class should be registered in the xml-rpc server. + ''' + + def __init__(self, host, client_port, mainThread, show_banner=True): + BaseInterpreterInterface.__init__(self, mainThread) + self.client_port = client_port + self.host = host + self.interpreter = get_pydev_frontend(host, client_port, show_banner=show_banner) + self._input_error_printed = False + self.notification_succeeded = False + self.notification_tries = 0 + self.notification_max_tries = 3 + + self.notify_about_magic() + + def get_greeting_msg(self): + return self.interpreter.get_greeting_msg() + + def do_add_exec(self, codeFragment): + self.notify_about_magic() + if (codeFragment.text.rstrip().endswith('??')): + print('IPython-->') + try: + res = bool(self.interpreter.add_exec(codeFragment.text)) + finally: + if (codeFragment.text.rstrip().endswith('??')): + print('<--IPython') + + return res + + + def get_namespace(self): + return self.interpreter.get_namespace() + + + def getCompletions(self, text, act_tok): + return self.interpreter.getCompletions(text, act_tok) + + def close(self): + sys.exit(0) + + def notify_about_magic(self): + if not self.notification_succeeded: + self.notification_tries+=1 + if self.notification_tries>self.notification_max_tries: + return + completions = self.getCompletions("%", "%") + magic_commands = [x[0] for x in completions] + + server = self.get_server() + + if server is not None: + try: + server.NotifyAboutMagic(magic_commands, self.interpreter.is_automagic()) + self.notification_succeeded = True + except : + self.notification_succeeded = False + + def get_ipython_hidden_vars_dict(self): + try: + useful_ipython_vars = ['_', '__'] + if hasattr(self.interpreter, 'ipython') and hasattr(self.interpreter.ipython, 'user_ns_hidden'): + user_ns_hidden = self.interpreter.ipython.user_ns_hidden + if isinstance(user_ns_hidden, dict): + # Since IPython 2 dict `user_ns_hidden` contains hidden variables and values + user_hidden_dict = user_ns_hidden + else: + # In IPython 1.x `user_ns_hidden` used to be a set with names of hidden variables + user_hidden_dict = dict([(key, val) for key, val in dict_iter_items(self.interpreter.ipython.user_ns) + if key in user_ns_hidden]) + return dict([(key, val) for key, val in dict_iter_items(user_hidden_dict) if key not in useful_ipython_vars]) + except: + # Getting IPython variables shouldn't break loading frame variables + traceback.print_exc() + diff --git a/ptvsd/pydevd/_pydev_bundle/pydev_ipython_console_011.py b/ptvsd/pydevd/_pydev_bundle/pydev_ipython_console_011.py new file mode 100644 index 00000000..b2c21427 --- /dev/null +++ b/ptvsd/pydevd/_pydev_bundle/pydev_ipython_console_011.py @@ -0,0 +1,508 @@ +# TODO that would make IPython integration better +# - show output other times then when enter was pressed +# - support proper exit to allow IPython to cleanup (e.g. temp files created with %edit) +# - support Ctrl-D (Ctrl-Z on Windows) +# - use IPython (numbered) prompts in PyDev +# - better integration of IPython and PyDev completions +# - some of the semantics on handling the code completion are not correct: +# eg: Start a line with % and then type c should give %cd as a completion by it doesn't +# however type %c and request completions and %cd is given as an option +# eg: Completing a magic when user typed it without the leading % causes the % to be inserted +# to the left of what should be the first colon. +"""Interface to TerminalInteractiveShell for PyDev Interactive Console frontend + for IPython 0.11 to 1.0+. +""" + +from __future__ import print_function + +import os +import codeop + +from IPython.core.error import UsageError +from IPython.core.completer import IPCompleter +from IPython.core.interactiveshell import InteractiveShell, InteractiveShellABC +from IPython.core.usage import default_banner_parts +from IPython.utils.strdispatch import StrDispatch +import IPython.core.release as IPythonRelease +try: + from IPython.terminal.interactiveshell import TerminalInteractiveShell +except ImportError: + # Versions of IPython [0.11,1.0) had an extra hierarchy level + from IPython.frontend.terminal.interactiveshell import TerminalInteractiveShell +try: + from traitlets import CBool, Unicode +except ImportError: + from IPython.utils.traitlets import CBool, Unicode +from IPython.core import release + +from _pydev_bundle.pydev_imports import xmlrpclib + +default_pydev_banner_parts = default_banner_parts + +default_pydev_banner = ''.join(default_pydev_banner_parts) + +def show_in_pager(self, strng, *args, **kwargs): + """ Run a string through pager """ + # On PyDev we just output the string, there are scroll bars in the console + # to handle "paging". This is the same behaviour as when TERM==dump (see + # page.py) + print(strng) + +def create_editor_hook(pydev_host, pydev_client_port): + + def call_editor(filename, line=0, wait=True): + """ Open an editor in PyDev """ + if line is None: + line = 0 + + # Make sure to send an absolution path because unlike most editor hooks + # we don't launch a process. This is more like what happens in the zmqshell + filename = os.path.abspath(filename) + + # import sys + # sys.__stderr__.write('Calling editor at: %s:%s\n' % (pydev_host, pydev_client_port)) + + # Tell PyDev to open the editor + server = xmlrpclib.Server('http://%s:%s' % (pydev_host, pydev_client_port)) + server.IPythonEditor(filename, str(line)) + + if wait: + try: + raw_input("Press Enter when done editing:") + except NameError: + input("Press Enter when done editing:") + return call_editor + + + +class PyDevIPCompleter(IPCompleter): + + def __init__(self, *args, **kwargs): + """ Create a Completer that reuses the advanced completion support of PyDev + in addition to the completion support provided by IPython """ + IPCompleter.__init__(self, *args, **kwargs) + # Use PyDev for python matches, see getCompletions below + self.matchers.remove(self.python_matches) + +class PyDevTerminalInteractiveShell(TerminalInteractiveShell): + banner1 = Unicode(default_pydev_banner, config=True, + help="""The part of the banner to be printed before the profile""" + ) + + # TODO term_title: (can PyDev's title be changed???, see terminal.py for where to inject code, in particular set_term_title as used by %cd) + # for now, just disable term_title + term_title = CBool(False) + + # Note in version 0.11 there is no guard in the IPython code about displaying a + # warning, so with 0.11 you get: + # WARNING: Readline services not available or not loaded. + # WARNING: The auto-indent feature requires the readline library + # Disable readline, readline type code is all handled by PyDev (on Java side) + readline_use = CBool(False) + # autoindent has no meaning in PyDev (PyDev always handles that on the Java side), + # and attempting to enable it will print a warning in the absence of readline. + autoindent = CBool(False) + # Force console to not give warning about color scheme choice and default to NoColor. + # TODO It would be nice to enable colors in PyDev but: + # - The PyDev Console (Eclipse Console) does not support the full range of colors, so the + # effect isn't as nice anyway at the command line + # - If done, the color scheme should default to LightBG, but actually be dependent on + # any settings the user has (such as if a dark theme is in use, then Linux is probably + # a better theme). + colors_force = CBool(True) + colors = Unicode("NoColor") + # Since IPython 5 the terminal interface is not compatible with Emacs `inferior-shell` and + # the `simple_prompt` flag is needed + simple_prompt = CBool(True) + + # In the PyDev Console, GUI control is done via hookable XML-RPC server + @staticmethod + def enable_gui(gui=None, app=None): + """Switch amongst GUI input hooks by name. + """ + # Deferred import + from pydev_ipython.inputhook import enable_gui as real_enable_gui + try: + return real_enable_gui(gui, app) + except ValueError as e: + raise UsageError("%s" % e) + + #------------------------------------------------------------------------- + # Things related to hooks + #------------------------------------------------------------------------- + + def init_hooks(self): + super(PyDevTerminalInteractiveShell, self).init_hooks() + self.set_hook('show_in_pager', show_in_pager) + + #------------------------------------------------------------------------- + # Things related to exceptions + #------------------------------------------------------------------------- + + def showtraceback(self, exc_tuple=None, filename=None, tb_offset=None, + exception_only=False): + # IPython does a lot of clever stuff with Exceptions. However mostly + # it is related to IPython running in a terminal instead of an IDE. + # (e.g. it prints out snippets of code around the stack trace) + # PyDev does a lot of clever stuff too, so leave exception handling + # with default print_exc that PyDev can parse and do its clever stuff + # with (e.g. it puts links back to the original source code) + import traceback;traceback.print_exc() + + + #------------------------------------------------------------------------- + # Things related to text completion + #------------------------------------------------------------------------- + + # The way to construct an IPCompleter changed in most versions, + # so we have a custom, per version implementation of the construction + + def _new_completer_011(self): + return PyDevIPCompleter(self, + self.user_ns, + self.user_global_ns, + self.readline_omit__names, + self.alias_manager.alias_table, + self.has_readline) + + + def _new_completer_012(self): + completer = PyDevIPCompleter(shell=self, + namespace=self.user_ns, + global_namespace=self.user_global_ns, + alias_table=self.alias_manager.alias_table, + use_readline=self.has_readline, + config=self.config, + ) + return completer + + + def _new_completer_100(self): + completer = PyDevIPCompleter(shell=self, + namespace=self.user_ns, + global_namespace=self.user_global_ns, + alias_table=self.alias_manager.alias_table, + use_readline=self.has_readline, + parent=self, + ) + return completer + + def _new_completer_234(self): + # correct for IPython versions 2.x, 3.x, 4.x + completer = PyDevIPCompleter(shell=self, + namespace=self.user_ns, + global_namespace=self.user_global_ns, + use_readline=self.has_readline, + parent=self, + ) + return completer + + def _new_completer_500(self): + completer = PyDevIPCompleter(shell=self, + namespace=self.user_ns, + global_namespace=self.user_global_ns, + use_readline=False, + parent=self + ) + return completer + + def add_completer_hooks(self): + from IPython.core.completerlib import module_completer, magic_run_completer, cd_completer + try: + from IPython.core.completerlib import reset_completer + except ImportError: + # reset_completer was added for rel-0.13 + reset_completer = None + self.configurables.append(self.Completer) + + # Add custom completers to the basic ones built into IPCompleter + sdisp = self.strdispatchers.get('complete_command', StrDispatch()) + self.strdispatchers['complete_command'] = sdisp + self.Completer.custom_completers = sdisp + + self.set_hook('complete_command', module_completer, str_key = 'import') + self.set_hook('complete_command', module_completer, str_key = 'from') + self.set_hook('complete_command', magic_run_completer, str_key = '%run') + self.set_hook('complete_command', cd_completer, str_key = '%cd') + if reset_completer: + self.set_hook('complete_command', reset_completer, str_key = '%reset') + + def init_completer(self): + """Initialize the completion machinery. + + This creates a completer that provides the completions that are + IPython specific. We use this to supplement PyDev's core code + completions. + """ + # PyDev uses its own completer and custom hooks so that it uses + # most completions from PyDev's core completer which provides + # extra information. + # See getCompletions for where the two sets of results are merged + + if IPythonRelease._version_major >= 5: + self.Completer = self._new_completer_500() + elif IPythonRelease._version_major >= 2: + self.Completer = self._new_completer_234() + elif IPythonRelease._version_major >= 1: + self.Completer = self._new_completer_100() + elif IPythonRelease._version_minor >= 12: + self.Completer = self._new_completer_012() + else: + self.Completer = self._new_completer_011() + + self.add_completer_hooks() + + if IPythonRelease._version_major <= 3: + # Only configure readline if we truly are using readline. IPython can + # do tab-completion over the network, in GUIs, etc, where readline + # itself may be absent + if self.has_readline: + self.set_readline_completer() + + #------------------------------------------------------------------------- + # Things related to aliases + #------------------------------------------------------------------------- + + def init_alias(self): + # InteractiveShell defines alias's we want, but TerminalInteractiveShell defines + # ones we don't. So don't use super and instead go right to InteractiveShell + InteractiveShell.init_alias(self) + + #------------------------------------------------------------------------- + # Things related to exiting + #------------------------------------------------------------------------- + def ask_exit(self): + """ Ask the shell to exit. Can be overiden and used as a callback. """ + # TODO PyDev's console does not have support from the Python side to exit + # the console. If user forces the exit (with sys.exit()) then the console + # simply reports errors. e.g.: + # >>> import sys + # >>> sys.exit() + # Failed to create input stream: Connection refused + # >>> + # Console already exited with value: 0 while waiting for an answer. + # Error stream: + # Output stream: + # >>> + # + # Alternatively if you use the non-IPython shell this is what happens + # >>> exit() + # :None + # >>> + # :None + # >>> + # + super(PyDevTerminalInteractiveShell, self).ask_exit() + print('To exit the PyDev Console, terminate the console within IDE.') + + #------------------------------------------------------------------------- + # Things related to magics + #------------------------------------------------------------------------- + + def init_magics(self): + super(PyDevTerminalInteractiveShell, self).init_magics() + # TODO Any additional magics for PyDev? + +InteractiveShellABC.register(PyDevTerminalInteractiveShell) # @UndefinedVariable + +#======================================================================================================================= +# _PyDevFrontEnd +#======================================================================================================================= +class _PyDevFrontEnd: + + version = release.__version__ + + def __init__(self, show_banner=True): + # Create and initialize our IPython instance. + if hasattr(PyDevTerminalInteractiveShell, '_instance') and PyDevTerminalInteractiveShell._instance is not None: + self.ipython = PyDevTerminalInteractiveShell._instance + else: + self.ipython = PyDevTerminalInteractiveShell.instance() + + if show_banner: + # Display the IPython banner, this has version info and + # help info + self.ipython.show_banner() + + self._curr_exec_line = 0 + self._curr_exec_lines = [] + + + def update(self, globals, locals): + ns = self.ipython.user_ns + + for ind in ['_oh', '_ih', '_dh', '_sh', 'In', 'Out', 'get_ipython', 'exit', 'quit']: + try: + locals[ind] = ns[ind] + except KeyError: + pass # Ignore if it's not there -- #PyDev-817: Error on autocomplete with IPython on interactive console + + self.ipython.user_global_ns.clear() + self.ipython.user_global_ns.update(globals) + self.ipython.user_ns = locals + + if hasattr(self.ipython, 'history_manager') and hasattr(self.ipython.history_manager, 'save_thread'): + self.ipython.history_manager.save_thread.pydev_do_not_trace = True #don't trace ipython history saving thread + + def complete(self, string): + try: + if string: + return self.ipython.complete(None, line=string, cursor_pos=string.__len__()) + else: + return self.ipython.complete(string, string, 0) + except: + # Silence completer exceptions + pass + + def is_complete(self, string): + #Based on IPython 0.10.1 + + if string in ('', '\n'): + # Prefiltering, eg through ipython0, may return an empty + # string although some operations have been accomplished. We + # thus want to consider an empty string as a complete + # statement. + return True + else: + try: + # Add line returns here, to make sure that the statement is + # complete (except if '\' was used). + # This should probably be done in a different place (like + # maybe 'prefilter_input' method? For now, this works. + clean_string = string.rstrip('\n') + if not clean_string.endswith('\\'): + clean_string += '\n\n' + + is_complete = codeop.compile_command( + clean_string, + "", + "exec" + ) + except Exception: + # XXX: Hack: return True so that the + # code gets executed and the error captured. + is_complete = True + return is_complete + + + def getCompletions(self, text, act_tok): + # Get completions from IPython and from PyDev and merge the results + # IPython only gives context free list of completions, while PyDev + # gives detailed information about completions. + try: + TYPE_IPYTHON = '11' + TYPE_IPYTHON_MAGIC = '12' + _line, ipython_completions = self.complete(text) + + from _pydev_bundle._pydev_completer import Completer + completer = Completer(self.get_namespace(), None) + ret = completer.complete(act_tok) + append = ret.append + ip = self.ipython + pydev_completions = set([f[0] for f in ret]) + for ipython_completion in ipython_completions: + + #PyCharm was not expecting completions with '%'... + #Could be fixed in the backend, but it's probably better + #fixing it at PyCharm. + #if ipython_completion.startswith('%'): + # ipython_completion = ipython_completion[1:] + + if ipython_completion not in pydev_completions: + pydev_completions.add(ipython_completion) + inf = ip.object_inspect(ipython_completion) + if inf['type_name'] == 'Magic function': + pydev_type = TYPE_IPYTHON_MAGIC + else: + pydev_type = TYPE_IPYTHON + pydev_doc = inf['docstring'] + if pydev_doc is None: + pydev_doc = '' + append((ipython_completion, pydev_doc, '', pydev_type)) + return ret + except: + import traceback;traceback.print_exc() + return [] + + + def get_namespace(self): + return self.ipython.user_ns + + def clear_buffer(self): + del self._curr_exec_lines[:] + + def add_exec(self, line): + if self._curr_exec_lines: + self._curr_exec_lines.append(line) + + buf = '\n'.join(self._curr_exec_lines) + + if self.is_complete(buf): + self._curr_exec_line += 1 + self.ipython.run_cell(buf) + del self._curr_exec_lines[:] + return False #execute complete (no more) + + return True #needs more + else: + + if not self.is_complete(line): + #Did not execute + self._curr_exec_lines.append(line) + return True #needs more + else: + self._curr_exec_line += 1 + self.ipython.run_cell(line, store_history=True) + #hist = self.ipython.history_manager.output_hist_reprs + #rep = hist.get(self._curr_exec_line, None) + #if rep is not None: + # print(rep) + return False #execute complete (no more) + + def is_automagic(self): + return self.ipython.automagic + + def get_greeting_msg(self): + return 'PyDev console: using IPython %s\n' % self.version + + +# If we have succeeded in importing this module, then monkey patch inputhook +# in IPython to redirect to PyDev's version. This is essential to make +# %gui in 0.11 work (0.12+ fixes it by calling self.enable_gui, which is implemented +# above, instead of inputhook.enable_gui). +# See test_gui (test_pydev_ipython_011.TestRunningCode) which fails on 0.11 without +# this patch +import IPython.lib.inputhook +import pydev_ipython.inputhook +IPython.lib.inputhook.enable_gui = pydev_ipython.inputhook.enable_gui +# In addition to enable_gui, make all publics in pydev_ipython.inputhook replace +# the IPython versions. This enables the examples in IPython's examples/lib/gui-* +# to operate properly because those examples don't use %gui magic and instead +# rely on using the inputhooks directly. +for name in pydev_ipython.inputhook.__all__: + setattr(IPython.lib.inputhook, name, getattr(pydev_ipython.inputhook, name)) + + +class _PyDevFrontEndContainer: + _instance = None + _last_host_port = None + +def get_pydev_frontend(pydev_host, pydev_client_port, show_banner=True): + if _PyDevFrontEndContainer._instance is None: + _PyDevFrontEndContainer._instance = _PyDevFrontEnd(show_banner=show_banner) + + if _PyDevFrontEndContainer._last_host_port != (pydev_host, pydev_client_port): + _PyDevFrontEndContainer._last_host_port = pydev_host, pydev_client_port + + # Back channel to PyDev to open editors (in the future other + # info may go back this way. This is the same channel that is + # used to get stdin, see StdIn in pydev_console_utils) + _PyDevFrontEndContainer._instance.ipython.hooks['editor'] = create_editor_hook(pydev_host, pydev_client_port) + + # Note: setting the callback directly because setting it with set_hook would actually create a chain instead + # of ovewriting at each new call). + # _PyDevFrontEndContainer._instance.ipython.set_hook('editor', create_editor_hook(pydev_host, pydev_client_port)) + + return _PyDevFrontEndContainer._instance + + diff --git a/ptvsd/pydevd/_pydev_bundle/pydev_is_thread_alive.py b/ptvsd/pydevd/_pydev_bundle/pydev_is_thread_alive.py new file mode 100644 index 00000000..d1a63d75 --- /dev/null +++ b/ptvsd/pydevd/_pydev_bundle/pydev_is_thread_alive.py @@ -0,0 +1,23 @@ +from _pydev_imps._pydev_saved_modules import threading + +# Hack for https://sw-brainwy.rhcloud.com/tracker/PyDev/363 (i.e.: calling isAlive() can throw AssertionError under some circumstances) +# It is required to debug threads started by start_new_thread in Python 3.4 +_temp = threading.Thread() +if hasattr(_temp, '_is_stopped'): # Python 3.4 has this + def is_thread_alive(t): + try: + return not t._is_stopped + except: + return t.isAlive() + +elif hasattr(_temp, '_Thread__stopped'): # Python 2.7 has this + def is_thread_alive(t): + try: + return not t._Thread__stopped + except: + return t.isAlive() + +else: # Haven't checked all other versions, so, let's use the regular isAlive call in this case. + def is_thread_alive(t): + return t.isAlive() +del _temp diff --git a/ptvsd/pydevd/_pydev_bundle/pydev_localhost.py b/ptvsd/pydevd/_pydev_bundle/pydev_localhost.py new file mode 100644 index 00000000..00975360 --- /dev/null +++ b/ptvsd/pydevd/_pydev_bundle/pydev_localhost.py @@ -0,0 +1,64 @@ +from _pydevd_bundle import pydevd_constants +from _pydev_imps._pydev_saved_modules import socket +import sys + +IS_JYTHON = sys.platform.find('java') != -1 + +_cache = None +def get_localhost(): + ''' + Should return 127.0.0.1 in ipv4 and ::1 in ipv6 + + localhost is not used because on windows vista/windows 7, there can be issues where the resolving doesn't work + properly and takes a lot of time (had this issue on the pyunit server). + + Using the IP directly solves the problem. + ''' + #TODO: Needs better investigation! + + global _cache + if _cache is None: + try: + for addr_info in socket.getaddrinfo("localhost", 80, 0, 0, socket.SOL_TCP): + config = addr_info[4] + if config[0] == '127.0.0.1': + _cache = '127.0.0.1' + return _cache + except: + #Ok, some versions of Python don't have getaddrinfo or SOL_TCP... Just consider it 127.0.0.1 in this case. + _cache = '127.0.0.1' + else: + _cache = 'localhost' + + return _cache + + +def get_socket_names(n_sockets, close=False): + socket_names = [] + sockets = [] + for _ in range(n_sockets): + if IS_JYTHON: + # Although the option which would be pure java *should* work for Jython, the socket being returned is still 0 + # (i.e.: it doesn't give the local port bound, only the original port, which was 0). + from java.net import ServerSocket + sock = ServerSocket(0) + socket_name = get_localhost(), sock.getLocalPort() + else: + sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + sock.bind((get_localhost(), 0)) + socket_name = sock.getsockname() + + sockets.append(sock) + socket_names.append(socket_name) + + if close: + for s in sockets: + s.close() + return socket_names + +def get_socket_name(close=False): + return get_socket_names(1, close)[0] + +if __name__ == '__main__': + print(get_socket_name()) \ No newline at end of file diff --git a/ptvsd/pydevd/_pydev_bundle/pydev_log.py b/ptvsd/pydevd/_pydev_bundle/pydev_log.py new file mode 100644 index 00000000..40b562de --- /dev/null +++ b/ptvsd/pydevd/_pydev_bundle/pydev_log.py @@ -0,0 +1,40 @@ +import sys +from _pydevd_bundle.pydevd_constants import DebugInfoHolder +from _pydev_imps._pydev_saved_modules import threading +currentThread = threading.currentThread + + +import traceback + +WARN_ONCE_MAP = {} + +def stderr_write(message): + sys.stderr.write(message) + sys.stderr.write("\n") + + +def debug(message): + if DebugInfoHolder.DEBUG_TRACE_LEVEL>2: + stderr_write(message) + + +def warn(message): + if DebugInfoHolder.DEBUG_TRACE_LEVEL>1: + stderr_write(message) + + +def info(message): + stderr_write(message) + + +def error(message, tb=False): + stderr_write(message) + if tb: + traceback.print_exc() + + +def error_once(message): + if message not in WARN_ONCE_MAP: + WARN_ONCE_MAP[message] = True + error(message) + diff --git a/ptvsd/pydevd/_pydev_bundle/pydev_monkey.py b/ptvsd/pydevd/_pydev_bundle/pydev_monkey.py new file mode 100644 index 00000000..a9b1d8b1 --- /dev/null +++ b/ptvsd/pydevd/_pydev_bundle/pydev_monkey.py @@ -0,0 +1,692 @@ +# License: EPL +import os +import sys +import traceback + +try: + xrange +except: + xrange = range + +#=============================================================================== +# Things that are dependent on having the pydevd debugger +#=============================================================================== +def log_debug(msg): + from _pydev_bundle import pydev_log + pydev_log.debug(msg) + +def log_error_once(msg): + from _pydev_bundle import pydev_log + pydev_log.error_once(msg) + +pydev_src_dir = os.path.dirname(os.path.dirname(__file__)) + +def _get_python_c_args(host, port, indC, args): + return ("import sys; sys.path.append(r'%s'); import pydevd; " + "pydevd.settrace(host='%s', port=%s, suspend=False, trace_only_current_thread=False, patch_multiprocessing=True); " + "%s" + ) % ( + pydev_src_dir, + host, + port, + args[indC + 1]) + +def _get_host_port(): + import pydevd + host, port = pydevd.dispatch() + return host, port + +def _is_managed_arg(arg): + if arg.endswith('pydevd.py'): + return True + return False + +def _on_forked_process(): + import pydevd + pydevd.threadingCurrentThread().__pydevd_main_thread = True + pydevd.settrace_forked() + +def _on_set_trace_for_new_thread(global_debugger): + if global_debugger is not None: + global_debugger.SetTrace(global_debugger.trace_dispatch, global_debugger.frame_eval_func, global_debugger.dummy_trace_dispatch) + +#=============================================================================== +# Things related to monkey-patching +#=============================================================================== +def is_python(path): + if path.endswith("'") or path.endswith('"'): + path = path[1:len(path) - 1] + filename = os.path.basename(path).lower() + for name in ['python', 'jython', 'pypy']: + if filename.find(name) != -1: + return True + + return False + + +def remove_quotes_from_args(args): + if sys.platform == "win32": + new_args = [] + for x in args: + if len(x) > 1 and x.startswith('"') and x.endswith('"'): + x = x[1:-1] + new_args.append(x) + return new_args + else: + return args + + +def quote_args(args): + if sys.platform == "win32": + quoted_args = [] + for x in args: + if x.startswith('"') and x.endswith('"'): + quoted_args.append(x) + else: + if ' ' in x: + x = x.replace('"', '\\"') + quoted_args.append('"%s"' % x) + else: + quoted_args.append(x) + return quoted_args + else: + return args + + +def patch_args(args): + try: + log_debug("Patching args: %s"% str(args)) + args = remove_quotes_from_args(args) + + import sys + new_args = [] + i = 0 + if len(args) == 0: + return args + + if is_python(args[0]): + try: + indC = args.index('-c') + except ValueError: + indC = -1 + + if indC != -1: + host, port = _get_host_port() + + if port is not None: + new_args.extend(args) + new_args[indC + 1] = _get_python_c_args(host, port, indC, args) + return quote_args(new_args) + else: + # Check for Python ZIP Applications and don't patch the args for them. + # Assumes the first non `-` argument is what we need to check. + # There's probably a better way to determine this but it works for most cases. + continue_next = False + for i in xrange(1, len(args)): + if continue_next: + continue_next = False + continue + + arg = args[i] + if arg.startswith('-'): + # Skip the next arg too if this flag expects a value. + continue_next = arg in ['-m', '-W', '-X'] + continue + + if arg.rsplit('.')[-1] in ['zip', 'pyz', 'pyzw']: + log_debug('Executing a PyZip, returning') + return args + break + + new_args.append(args[0]) + else: + log_debug("Process is not python, returning.") + return args + + i = 1 + + # Original args should be something as: + # ['X:\\pysrc\\pydevd.py', '--multiprocess', '--print-in-debugger-startup', + # '--vm_type', 'python', '--client', '127.0.0.1', '--port', '56352', '--file', 'x:\\snippet1.py'] + from _pydevd_bundle.pydevd_command_line_handling import setup_to_argv + from pydevd import SetupHolder + original = setup_to_argv(SetupHolder.setup) + ['--file'] + while i < len(args): + if args[i] == '-m': + # Always insert at pos == 1 (i.e.: pydevd "--module" --multiprocess ...) + original.insert(1, '--module') + else: + if args[i].startswith('-'): + new_args.append(args[i]) + else: + break + i += 1 + + # Note: undoing https://github.com/Elizaveta239/PyDev.Debugger/commit/053c9d6b1b455530bca267e7419a9f63bf51cddf + # (i >= len(args) instead of i < len(args)) + # in practice it'd raise an exception here and would return original args, which is not what we want... providing + # a proper fix for https://youtrack.jetbrains.com/issue/PY-9767 elsewhere. + if i < len(args) and _is_managed_arg(args[i]): # no need to add pydevd twice + return args + + for x in original: + new_args.append(x) + if x == '--file': + break + + while i < len(args): + new_args.append(args[i]) + i += 1 + + return quote_args(new_args) + except: + traceback.print_exc() + return args + + +def str_to_args_windows(args): + # see http:#msdn.microsoft.com/en-us/library/a1y7w461.aspx + result = [] + + DEFAULT = 0 + ARG = 1 + IN_DOUBLE_QUOTE = 2 + + state = DEFAULT + backslashes = 0 + buf = '' + + args_len = len(args) + for i in xrange(args_len): + ch = args[i] + if (ch == '\\'): + backslashes += 1 + continue + elif (backslashes != 0): + if ch == '"': + while backslashes >= 2: + backslashes -= 2 + buf += '\\' + if (backslashes == 1): + if (state == DEFAULT): + state = ARG + + buf += '"' + backslashes = 0 + continue + # else fall through to switch + else: + # false alarm, treat passed backslashes literally... + if (state == DEFAULT): + state = ARG + + while backslashes > 0: + backslashes -= 1 + buf += '\\' + # fall through to switch + if ch in (' ', '\t'): + if (state == DEFAULT): + # skip + continue + elif (state == ARG): + state = DEFAULT + result.append(buf) + buf = '' + continue + + if state in (DEFAULT, ARG): + if ch == '"': + state = IN_DOUBLE_QUOTE + else: + state = ARG + buf += ch + + elif state == IN_DOUBLE_QUOTE: + if ch == '"': + if (i + 1 < args_len and args[i + 1] == '"'): + # Undocumented feature in Windows: + # Two consecutive double quotes inside a double-quoted argument are interpreted as + # a single double quote. + buf += '"' + i += 1 + elif len(buf) == 0: + # empty string on Windows platform. Account for bug in constructor of + # JDK's java.lang.ProcessImpl. + result.append("\"\"") + state = DEFAULT + else: + state = ARG + else: + buf += ch + + else: + raise RuntimeError('Illegal condition') + + if len(buf) > 0 or state != DEFAULT: + result.append(buf) + + return result + + +def patch_arg_str_win(arg_str): + args = str_to_args_windows(arg_str) + # Fix https://youtrack.jetbrains.com/issue/PY-9767 (args may be empty) + if not args or not is_python(args[0]): + return arg_str + arg_str = ' '.join(patch_args(args)) + log_debug("New args: %s" % arg_str) + return arg_str + +def monkey_patch_module(module, funcname, create_func): + if hasattr(module, funcname): + original_name = 'original_' + funcname + if not hasattr(module, original_name): + setattr(module, original_name, getattr(module, funcname)) + setattr(module, funcname, create_func(original_name)) + + +def monkey_patch_os(funcname, create_func): + monkey_patch_module(os, funcname, create_func) + + +def warn_multiproc(): + log_error_once( + "pydev debugger: New process is launching (breakpoints won't work in the new process).\n" + "pydev debugger: To debug that process please enable 'Attach to subprocess automatically while debugging?' option in the debugger settings.\n") + + +def create_warn_multiproc(original_name): + + def new_warn_multiproc(*args): + import os + + warn_multiproc() + + return getattr(os, original_name)(*args) + return new_warn_multiproc + +def create_execl(original_name): + def new_execl(path, *args): + """ + os.execl(path, arg0, arg1, ...) + os.execle(path, arg0, arg1, ..., env) + os.execlp(file, arg0, arg1, ...) + os.execlpe(file, arg0, arg1, ..., env) + """ + import os + args = patch_args(args) + send_process_created_message() + return getattr(os, original_name)(path, *args) + return new_execl + + +def create_execv(original_name): + def new_execv(path, args): + """ + os.execv(path, args) + os.execvp(file, args) + """ + import os + send_process_created_message() + return getattr(os, original_name)(path, patch_args(args)) + return new_execv + + +def create_execve(original_name): + """ + os.execve(path, args, env) + os.execvpe(file, args, env) + """ + def new_execve(path, args, env): + import os + send_process_created_message() + return getattr(os, original_name)(path, patch_args(args), env) + return new_execve + + +def create_spawnl(original_name): + def new_spawnl(mode, path, *args): + """ + os.spawnl(mode, path, arg0, arg1, ...) + os.spawnlp(mode, file, arg0, arg1, ...) + """ + import os + args = patch_args(args) + send_process_created_message() + return getattr(os, original_name)(mode, path, *args) + return new_spawnl + + +def create_spawnv(original_name): + def new_spawnv(mode, path, args): + """ + os.spawnv(mode, path, args) + os.spawnvp(mode, file, args) + """ + import os + send_process_created_message() + return getattr(os, original_name)(mode, path, patch_args(args)) + return new_spawnv + + +def create_spawnve(original_name): + """ + os.spawnve(mode, path, args, env) + os.spawnvpe(mode, file, args, env) + """ + def new_spawnve(mode, path, args, env): + import os + send_process_created_message() + return getattr(os, original_name)(mode, path, patch_args(args), env) + return new_spawnve + + +def create_fork_exec(original_name): + """ + _posixsubprocess.fork_exec(args, executable_list, close_fds, ... (13 more)) + """ + def new_fork_exec(args, *other_args): + import _posixsubprocess # @UnresolvedImport + args = patch_args(args) + send_process_created_message() + return getattr(_posixsubprocess, original_name)(args, *other_args) + return new_fork_exec + + +def create_warn_fork_exec(original_name): + """ + _posixsubprocess.fork_exec(args, executable_list, close_fds, ... (13 more)) + """ + def new_warn_fork_exec(*args): + try: + import _posixsubprocess + warn_multiproc() + return getattr(_posixsubprocess, original_name)(*args) + except: + pass + return new_warn_fork_exec + + +def create_CreateProcess(original_name): + """ + CreateProcess(*args, **kwargs) + """ + def new_CreateProcess(app_name, cmd_line, *args): + try: + import _subprocess + except ImportError: + import _winapi as _subprocess + send_process_created_message() + return getattr(_subprocess, original_name)(app_name, patch_arg_str_win(cmd_line), *args) + return new_CreateProcess + + +def create_CreateProcessWarnMultiproc(original_name): + """ + CreateProcess(*args, **kwargs) + """ + def new_CreateProcess(*args): + try: + import _subprocess + except ImportError: + import _winapi as _subprocess + warn_multiproc() + return getattr(_subprocess, original_name)(*args) + return new_CreateProcess + + +def create_fork(original_name): + def new_fork(): + import os + + # A simple fork will result in a new python process + is_new_python_process = True + frame = sys._getframe() + + while frame is not None: + if frame.f_code.co_name == '_execute_child' and 'subprocess' in frame.f_code.co_filename: + # If we're actually in subprocess.Popen creating a child, it may + # result in something which is not a Python process, (so, we + # don't want to connect with it in the forked version). + executable = frame.f_locals.get('executable') + if executable is not None: + is_new_python_process = False + if is_python(executable): + is_new_python_process = True + break + + frame = frame.f_back + frame = None # Just make sure we don't hold on to it. + + child_process = getattr(os, original_name)() # fork + if not child_process: + if is_new_python_process: + _on_forked_process() + else: + if is_new_python_process: + send_process_created_message() + return child_process + return new_fork + + +def send_process_created_message(): + from _pydevd_bundle.pydevd_comm import get_global_debugger + debugger = get_global_debugger() + if debugger is not None: + debugger.send_process_created_message() + + +def patch_new_process_functions(): + # os.execl(path, arg0, arg1, ...) + # os.execle(path, arg0, arg1, ..., env) + # os.execlp(file, arg0, arg1, ...) + # os.execlpe(file, arg0, arg1, ..., env) + # os.execv(path, args) + # os.execve(path, args, env) + # os.execvp(file, args) + # os.execvpe(file, args, env) + monkey_patch_os('execl', create_execl) + monkey_patch_os('execle', create_execl) + monkey_patch_os('execlp', create_execl) + monkey_patch_os('execlpe', create_execl) + monkey_patch_os('execv', create_execv) + monkey_patch_os('execve', create_execve) + monkey_patch_os('execvp', create_execv) + monkey_patch_os('execvpe', create_execve) + + # os.spawnl(mode, path, ...) + # os.spawnle(mode, path, ..., env) + # os.spawnlp(mode, file, ...) + # os.spawnlpe(mode, file, ..., env) + # os.spawnv(mode, path, args) + # os.spawnve(mode, path, args, env) + # os.spawnvp(mode, file, args) + # os.spawnvpe(mode, file, args, env) + + monkey_patch_os('spawnl', create_spawnl) + monkey_patch_os('spawnle', create_spawnl) + monkey_patch_os('spawnlp', create_spawnl) + monkey_patch_os('spawnlpe', create_spawnl) + monkey_patch_os('spawnv', create_spawnv) + monkey_patch_os('spawnve', create_spawnve) + monkey_patch_os('spawnvp', create_spawnv) + monkey_patch_os('spawnvpe', create_spawnve) + + if sys.platform != 'win32': + monkey_patch_os('fork', create_fork) + try: + import _posixsubprocess + monkey_patch_module(_posixsubprocess, 'fork_exec', create_fork_exec) + except ImportError: + pass + else: + # Windows + try: + import _subprocess + except ImportError: + import _winapi as _subprocess + monkey_patch_module(_subprocess, 'CreateProcess', create_CreateProcess) + + +def patch_new_process_functions_with_warning(): + monkey_patch_os('execl', create_warn_multiproc) + monkey_patch_os('execle', create_warn_multiproc) + monkey_patch_os('execlp', create_warn_multiproc) + monkey_patch_os('execlpe', create_warn_multiproc) + monkey_patch_os('execv', create_warn_multiproc) + monkey_patch_os('execve', create_warn_multiproc) + monkey_patch_os('execvp', create_warn_multiproc) + monkey_patch_os('execvpe', create_warn_multiproc) + monkey_patch_os('spawnl', create_warn_multiproc) + monkey_patch_os('spawnle', create_warn_multiproc) + monkey_patch_os('spawnlp', create_warn_multiproc) + monkey_patch_os('spawnlpe', create_warn_multiproc) + monkey_patch_os('spawnv', create_warn_multiproc) + monkey_patch_os('spawnve', create_warn_multiproc) + monkey_patch_os('spawnvp', create_warn_multiproc) + monkey_patch_os('spawnvpe', create_warn_multiproc) + + if sys.platform != 'win32': + monkey_patch_os('fork', create_warn_multiproc) + try: + import _posixsubprocess + monkey_patch_module(_posixsubprocess, 'fork_exec', create_warn_fork_exec) + except ImportError: + pass + else: + # Windows + try: + import _subprocess + except ImportError: + import _winapi as _subprocess + monkey_patch_module(_subprocess, 'CreateProcess', create_CreateProcessWarnMultiproc) + + +class _NewThreadStartupWithTrace: + + def __init__(self, original_func, args, kwargs): + self.original_func = original_func + self.args = args + self.kwargs = kwargs + self.global_debugger = self.get_debugger() + + def get_debugger(self): + from _pydevd_bundle.pydevd_comm import get_global_debugger + return get_global_debugger() + + def __call__(self): + _on_set_trace_for_new_thread(self.global_debugger) + global_debugger = self.global_debugger + + if global_debugger is not None and global_debugger.thread_analyser is not None: + # we can detect start_new_thread only here + try: + from pydevd_concurrency_analyser.pydevd_concurrency_logger import log_new_thread + log_new_thread(global_debugger) + except: + sys.stderr.write("Failed to detect new thread for visualization") + + return self.original_func(*self.args, **self.kwargs) + + +class _NewThreadStartupWithoutTrace: + + def __init__(self, original_func, args, kwargs): + self.original_func = original_func + self.args = args + self.kwargs = kwargs + + def __call__(self): + return self.original_func(*self.args, **self.kwargs) + +_UseNewThreadStartup = _NewThreadStartupWithTrace + + +def _get_threading_modules_to_patch(): + threading_modules_to_patch = [] + + try: + import thread as _thread + except: + import _thread + threading_modules_to_patch.append(_thread) + + return threading_modules_to_patch + +threading_modules_to_patch = _get_threading_modules_to_patch() + + +def patch_thread_module(thread): + + if getattr(thread, '_original_start_new_thread', None) is None: + _original_start_new_thread = thread._original_start_new_thread = thread.start_new_thread + else: + _original_start_new_thread = thread._original_start_new_thread + + class ClassWithPydevStartNewThread: + + def pydev_start_new_thread(self, function, args=(), kwargs={}): + ''' + We need to replace the original thread.start_new_thread with this function so that threads started + through it and not through the threading module are properly traced. + ''' + return _original_start_new_thread(_UseNewThreadStartup(function, args, kwargs), ()) + + # This is a hack for the situation where the thread.start_new_thread is declared inside a class, such as the one below + # class F(object): + # start_new_thread = thread.start_new_thread + # + # def start_it(self): + # self.start_new_thread(self.function, args, kwargs) + # So, if it's an already bound method, calling self.start_new_thread won't really receive a different 'self' -- it + # does work in the default case because in builtins self isn't passed either. + pydev_start_new_thread = ClassWithPydevStartNewThread().pydev_start_new_thread + + try: + # We need to replace the original thread.start_new_thread with this function so that threads started through + # it and not through the threading module are properly traced. + thread.start_new_thread = pydev_start_new_thread + thread.start_new = pydev_start_new_thread + except: + pass + + +def patch_thread_modules(): + for t in threading_modules_to_patch: + patch_thread_module(t) + + +def undo_patch_thread_modules(): + for t in threading_modules_to_patch: + try: + t.start_new_thread = t._original_start_new_thread + except: + pass + + try: + t.start_new = t._original_start_new_thread + except: + pass + + +def disable_trace_thread_modules(): + ''' + Can be used to temporarily stop tracing threads created with thread.start_new_thread. + ''' + global _UseNewThreadStartup + _UseNewThreadStartup = _NewThreadStartupWithoutTrace + + +def enable_trace_thread_modules(): + ''' + Can be used to start tracing threads created with thread.start_new_thread again. + ''' + global _UseNewThreadStartup + _UseNewThreadStartup = _NewThreadStartupWithTrace + + +def get_original_start_new_thread(threading_module): + try: + return threading_module._original_start_new_thread + except: + return threading_module.start_new_thread diff --git a/ptvsd/pydevd/_pydev_bundle/pydev_monkey_qt.py b/ptvsd/pydevd/_pydev_bundle/pydev_monkey_qt.py new file mode 100644 index 00000000..85885e2e --- /dev/null +++ b/ptvsd/pydevd/_pydev_bundle/pydev_monkey_qt.py @@ -0,0 +1,190 @@ +from __future__ import nested_scopes +import os + +def set_trace_in_qt(): + import pydevd_tracing + from _pydevd_bundle.pydevd_comm import get_global_debugger + debugger = get_global_debugger() + if debugger is not None: + pydevd_tracing.SetTrace(debugger.trace_dispatch, debugger.frame_eval_func) + + +_patched_qt = False +def patch_qt(qt_support_mode): + ''' + This method patches qt (PySide, PyQt4, PyQt5) so that we have hooks to set the tracing for QThread. + ''' + if not qt_support_mode: + return + + if qt_support_mode is True or qt_support_mode == 'True': + # do not break backward compatibility + qt_support_mode = 'auto' + + if qt_support_mode == 'auto': + qt_support_mode = os.getenv('PYDEVD_PYQT_MODE', 'auto') + + # Avoid patching more than once + global _patched_qt + if _patched_qt: + return + + _patched_qt = True + + if qt_support_mode == 'auto': + + patch_qt_on_import = None + try: + import PySide # @UnresolvedImport @UnusedImport + qt_support_mode = 'pyside' + except: + try: + import PyQt5 # @UnresolvedImport @UnusedImport + qt_support_mode = 'pyqt5' + except: + try: + import PyQt4 # @UnresolvedImport @UnusedImport + qt_support_mode = 'pyqt4' + except: + return + + + if qt_support_mode == 'pyside': + import PySide.QtCore # @UnresolvedImport + _internal_patch_qt(PySide.QtCore, qt_support_mode) + + elif qt_support_mode == 'pyqt5': + import PyQt5.QtCore # @UnresolvedImport + _internal_patch_qt(PyQt5.QtCore) + + elif qt_support_mode == 'pyqt4': + # Ok, we have an issue here: + # PyDev-452: Selecting PyQT API version using sip.setapi fails in debug mode + # http://pyqt.sourceforge.net/Docs/PyQt4/incompatible_apis.html + # Mostly, if the user uses a different API version (i.e.: v2 instead of v1), + # that has to be done before importing PyQt4 modules (PySide/PyQt5 don't have this issue + # as they only implements v2). + patch_qt_on_import = 'PyQt4' + def get_qt_core_module(): + import PyQt4.QtCore # @UnresolvedImport + return PyQt4.QtCore + _patch_import_to_patch_pyqt_on_import(patch_qt_on_import, get_qt_core_module) + + else: + raise ValueError('Unexpected qt support mode: %s' % (qt_support_mode,)) + + +def _patch_import_to_patch_pyqt_on_import(patch_qt_on_import, get_qt_core_module): + # I don't like this approach very much as we have to patch __import__, but I like even less + # asking the user to configure something in the client side... + # So, our approach is to patch PyQt4 right before the user tries to import it (at which + # point he should've set the sip api version properly already anyways). + + dotted = patch_qt_on_import + '.' + original_import = __import__ + + from _pydev_imps._pydev_sys_patch import patch_sys_module, patch_reload, cancel_patches_in_sys_module + + patch_sys_module() + patch_reload() + + def patched_import(name, *args, **kwargs): + if patch_qt_on_import == name or name.startswith(dotted): + builtins.__import__ = original_import + cancel_patches_in_sys_module() + _internal_patch_qt(get_qt_core_module()) # Patch it only when the user would import the qt module + return original_import(name, *args, **kwargs) + + import sys + if sys.version_info[0] >= 3: + import builtins # Py3 + else: + import __builtin__ as builtins + + builtins.__import__ = patched_import + + +def _internal_patch_qt(QtCore, qt_support_mode='auto'): + _original_thread_init = QtCore.QThread.__init__ + _original_runnable_init = QtCore.QRunnable.__init__ + _original_QThread = QtCore.QThread + + class FuncWrapper: + def __init__(self, original): + self._original = original + + def __call__(self, *args, **kwargs): + set_trace_in_qt() + return self._original(*args, **kwargs) + + class StartedSignalWrapper(QtCore.QObject): # Wrapper for the QThread.started signal + + try: + _signal = QtCore.Signal() # @UndefinedVariable + except: + _signal = QtCore.pyqtSignal() # @UndefinedVariable + + def __init__(self, thread, original_started): + QtCore.QObject.__init__(self) + self.thread = thread + self.original_started = original_started + if qt_support_mode == 'pyside': + self._signal = original_started + else: + self._signal.connect(self._on_call) + self.original_started.connect(self._signal) + + def connect(self, func, *args, **kwargs): + if qt_support_mode == 'pyside': + return self._signal.connect(FuncWrapper(func), *args, **kwargs) + else: + return self._signal.connect(func, *args, **kwargs) + + def disconnect(self, *args, **kwargs): + return self._signal.disconnect(*args, **kwargs) + + def emit(self, *args, **kwargs): + return self._signal.emit(*args, **kwargs) + + def _on_call(self, *args, **kwargs): + set_trace_in_qt() + + class ThreadWrapper(QtCore.QThread): # Wrapper for QThread + + def __init__(self, *args, **kwargs): + _original_thread_init(self, *args, **kwargs) + + # In PyQt5 the program hangs when we try to call original run method of QThread class. + # So we need to distinguish instances of QThread class and instances of QThread inheritors. + if self.__class__.run == _original_QThread.run: + self.run = self._exec_run + else: + self._original_run = self.run + self.run = self._new_run + self._original_started = self.started + self.started = StartedSignalWrapper(self, self.started) + + def _exec_run(self): + set_trace_in_qt() + self.exec_() + return None + + def _new_run(self): + set_trace_in_qt() + return self._original_run() + + class RunnableWrapper(QtCore.QRunnable): # Wrapper for QRunnable + + def __init__(self, *args, **kwargs): + _original_runnable_init(self, *args, **kwargs) + + self._original_run = self.run + self.run = self._new_run + + + def _new_run(self): + set_trace_in_qt() + return self._original_run() + + QtCore.QThread = ThreadWrapper + QtCore.QRunnable = RunnableWrapper diff --git a/ptvsd/pydevd/_pydev_bundle/pydev_override.py b/ptvsd/pydevd/_pydev_bundle/pydev_override.py new file mode 100644 index 00000000..bb0c5043 --- /dev/null +++ b/ptvsd/pydevd/_pydev_bundle/pydev_override.py @@ -0,0 +1,49 @@ +def overrides(method): + ''' + Initially meant to be used as + + class B: + @overrides(A.m1) + def m1(self): + pass + + but as we want to be compatible with Jython 2.1 where decorators have an uglier syntax (needing an assign + after the method), it should now be used without being a decorator as below (in which case we don't even check + for anything, just that the parent name was actually properly loaded). + + i.e.: + + class B: + overrides(A.m1) + def m1(self): + pass + ''' + return + +# def wrapper(func): +# if func.__name__ != method.__name__: +# msg = "Wrong @override: %r expected, but overwriting %r." +# msg = msg % (func.__name__, method.__name__) +# raise AssertionError(msg) +# +# if func.__doc__ is None: +# func.__doc__ = method.__doc__ +# +# return func +# +# return wrapper + +def implements(method): + return +# def wrapper(func): +# if func.__name__ != method.__name__: +# msg = "Wrong @implements: %r expected, but implementing %r." +# msg = msg % (func.__name__, method.__name__) +# raise AssertionError(msg) +# +# if func.__doc__ is None: +# func.__doc__ = method.__doc__ +# +# return func +# +# return wrapper \ No newline at end of file diff --git a/ptvsd/pydevd/_pydev_bundle/pydev_umd.py b/ptvsd/pydevd/_pydev_bundle/pydev_umd.py new file mode 100644 index 00000000..0bfeda74 --- /dev/null +++ b/ptvsd/pydevd/_pydev_bundle/pydev_umd.py @@ -0,0 +1,172 @@ +""" +The UserModuleDeleter and runfile methods are copied from +Spyder and carry their own license agreement. +http://code.google.com/p/spyderlib/source/browse/spyderlib/widgets/externalshell/sitecustomize.py + +Spyder License Agreement (MIT License) +-------------------------------------- + +Copyright (c) 2009-2012 Pierre Raybaut + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. +""" + +import sys +import os + +# The following classes and functions are mainly intended to be used from +# an interactive Python session +class UserModuleDeleter: + """ + User Module Deleter (UMD) aims at deleting user modules + to force Python to deeply reload them during import + + pathlist [list]: blacklist in terms of module path + namelist [list]: blacklist in terms of module name + """ + def __init__(self, namelist=None, pathlist=None): + if namelist is None: + namelist = [] + self.namelist = namelist + if pathlist is None: + pathlist = [] + self.pathlist = pathlist + try: + # blacklist all files in org.python.pydev/pysrc + import pydev_pysrc, inspect + self.pathlist.append(os.path.dirname(pydev_pysrc.__file__)) + except: + pass + self.previous_modules = list(sys.modules.keys()) + + def is_module_blacklisted(self, modname, modpath): + for path in [sys.prefix] + self.pathlist: + if modpath.startswith(path): + return True + else: + return set(modname.split('.')) & set(self.namelist) + + def run(self, verbose=False): + """ + Del user modules to force Python to deeply reload them + + Do not del modules which are considered as system modules, i.e. + modules installed in subdirectories of Python interpreter's binary + Do not del C modules + """ + log = [] + modules_copy = dict(sys.modules) + for modname, module in modules_copy.items(): + if modname == 'aaaaa': + print(modname, module) + print(self.previous_modules) + if modname not in self.previous_modules: + modpath = getattr(module, '__file__', None) + if modpath is None: + # *module* is a C module that is statically linked into the + # interpreter. There is no way to know its path, so we + # choose to ignore it. + continue + if not self.is_module_blacklisted(modname, modpath): + log.append(modname) + del sys.modules[modname] + if verbose and log: + print("\x1b[4;33m%s\x1b[24m%s\x1b[0m" % ("UMD has deleted", + ": " + ", ".join(log))) + +__umd__ = None + +_get_globals_callback = None +def _set_globals_function(get_globals): + global _get_globals_callback + _get_globals_callback = get_globals +def _get_globals(): + """Return current Python interpreter globals namespace""" + if _get_globals_callback is not None: + return _get_globals_callback() + else: + try: + from __main__ import __dict__ as namespace + except ImportError: + try: + # The import fails on IronPython + import __main__ + namespace = __main__.__dict__ + except: + namespace + shell = namespace.get('__ipythonshell__') + if shell is not None and hasattr(shell, 'user_ns'): + # IPython 0.12+ kernel + return shell.user_ns + else: + # Python interpreter + return namespace + return namespace + + +def runfile(filename, args=None, wdir=None, namespace=None): + """ + Run filename + args: command line arguments (string) + wdir: working directory + """ + try: + if hasattr(filename, 'decode'): + filename = filename.decode('utf-8') + except (UnicodeError, TypeError): + pass + global __umd__ + if os.environ.get("PYDEV_UMD_ENABLED", "").lower() == "true": + if __umd__ is None: + namelist = os.environ.get("PYDEV_UMD_NAMELIST", None) + if namelist is not None: + namelist = namelist.split(',') + __umd__ = UserModuleDeleter(namelist=namelist) + else: + verbose = os.environ.get("PYDEV_UMD_VERBOSE", "").lower() == "true" + __umd__.run(verbose=verbose) + if args is not None and not isinstance(args, basestring): + raise TypeError("expected a character buffer object") + if namespace is None: + namespace = _get_globals() + if '__file__' in namespace: + old_file = namespace['__file__'] + else: + old_file = None + namespace['__file__'] = filename + sys.argv = [filename] + if args is not None: + for arg in args.split(): + sys.argv.append(arg) + if wdir is not None: + try: + if hasattr(wdir, 'decode'): + wdir = wdir.decode('utf-8') + except (UnicodeError, TypeError): + pass + os.chdir(wdir) + execfile(filename, namespace) + sys.argv = [''] + if old_file is None: + del namespace['__file__'] + else: + namespace['__file__'] = old_file diff --git a/ptvsd/pydevd/_pydev_bundle/pydev_versioncheck.py b/ptvsd/pydevd/_pydev_bundle/pydev_versioncheck.py new file mode 100644 index 00000000..70bf765f --- /dev/null +++ b/ptvsd/pydevd/_pydev_bundle/pydev_versioncheck.py @@ -0,0 +1,16 @@ +import sys + +def versionok_for_gui(): + ''' Return True if running Python is suitable for GUI Event Integration and deeper IPython integration ''' + # We require Python 2.6+ ... + if sys.hexversion < 0x02060000: + return False + # Or Python 3.2+ + if sys.hexversion >= 0x03000000 and sys.hexversion < 0x03020000: + return False + # Not supported under Jython nor IronPython + if sys.platform.startswith("java") or sys.platform.startswith('cli'): + return False + + return True + diff --git a/ptvsd/pydevd/_pydev_imps/__init__.py b/ptvsd/pydevd/_pydev_imps/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/ptvsd/pydevd/_pydev_imps/_pydev_BaseHTTPServer.py b/ptvsd/pydevd/_pydev_imps/_pydev_BaseHTTPServer.py new file mode 100644 index 00000000..f8dd9111 --- /dev/null +++ b/ptvsd/pydevd/_pydev_imps/_pydev_BaseHTTPServer.py @@ -0,0 +1,604 @@ +"""HTTP server base class. + +Note: the class in this module doesn't implement any HTTP request; see +SimpleHTTPServer for simple implementations of GET, HEAD and POST +(including CGI scripts). It does, however, optionally implement HTTP/1.1 +persistent connections, as of version 0.3. + +Contents: + +- BaseHTTPRequestHandler: HTTP request handler base class +- test: test function + +XXX To do: + +- log requests even later (to capture byte count) +- log user-agent header and other interesting goodies +- send error log to separate file +""" + + +# See also: +# +# HTTP Working Group T. Berners-Lee +# INTERNET-DRAFT R. T. Fielding +# H. Frystyk Nielsen +# Expires September 8, 1995 March 8, 1995 +# +# URL: http://www.ics.uci.edu/pub/ietf/http/draft-ietf-http-v10-spec-00.txt +# +# and +# +# Network Working Group R. Fielding +# Request for Comments: 2616 et al +# Obsoletes: 2068 June 1999 +# Category: Standards Track +# +# URL: http://www.faqs.org/rfcs/rfc2616.html + +# Log files +# --------- +# +# Here's a quote from the NCSA httpd docs about log file format. +# +# | The logfile format is as follows. Each line consists of: +# | +# | host rfc931 authuser [DD/Mon/YYYY:hh:mm:ss] "request" ddd bbbb +# | +# | host: Either the DNS name or the IP number of the remote client +# | rfc931: Any information returned by identd for this person, +# | - otherwise. +# | authuser: If user sent a userid for authentication, the user name, +# | - otherwise. +# | DD: Day +# | Mon: Month (calendar name) +# | YYYY: Year +# | hh: hour (24-hour format, the machine's timezone) +# | mm: minutes +# | ss: seconds +# | request: The first line of the HTTP request as sent by the client. +# | ddd: the status code returned by the server, - if not available. +# | bbbb: the total number of bytes sent, +# | *not including the HTTP/1.0 header*, - if not available +# | +# | You can determine the name of the file accessed through request. +# +# (Actually, the latter is only true if you know the server configuration +# at the time the request was made!) + +__version__ = "0.3" + +__all__ = ["HTTPServer", "BaseHTTPRequestHandler"] + +import sys +from _pydev_imps._pydev_saved_modules import time +from _pydev_imps._pydev_saved_modules import socket +from warnings import filterwarnings, catch_warnings +with catch_warnings(): + if sys.py3kwarning: + filterwarnings("ignore", ".*mimetools has been removed", + DeprecationWarning) + import mimetools + +from _pydev_imps import _pydev_SocketServer as SocketServer + +# Default error message template +DEFAULT_ERROR_MESSAGE = """\ + +Error response + + +

Error response

+

Error code %(code)d. +

Message: %(message)s. +

Error code explanation: %(code)s = %(explain)s. + +""" + +DEFAULT_ERROR_CONTENT_TYPE = "text/html" + +def _quote_html(html): + return html.replace("&", "&").replace("<", "<").replace(">", ">") + +class HTTPServer(SocketServer.TCPServer): + + allow_reuse_address = 1 # Seems to make sense in testing environment + + def server_bind(self): + """Override server_bind to store the server name.""" + SocketServer.TCPServer.server_bind(self) + host, port = self.socket.getsockname()[:2] + self.server_name = socket.getfqdn(host) + self.server_port = port + + +class BaseHTTPRequestHandler(SocketServer.StreamRequestHandler): + + """HTTP request handler base class. + + The following explanation of HTTP serves to guide you through the + code as well as to expose any misunderstandings I may have about + HTTP (so you don't need to read the code to figure out I'm wrong + :-). + + HTTP (HyperText Transfer Protocol) is an extensible protocol on + top of a reliable stream transport (e.g. TCP/IP). The protocol + recognizes three parts to a request: + + 1. One line identifying the request type and path + 2. An optional set of RFC-822-style headers + 3. An optional data part + + The headers and data are separated by a blank line. + + The first line of the request has the form + + + + where is a (case-sensitive) keyword such as GET or POST, + is a string containing path information for the request, + and should be the string "HTTP/1.0" or "HTTP/1.1". + is encoded using the URL encoding scheme (using %xx to signify + the ASCII character with hex code xx). + + The specification specifies that lines are separated by CRLF but + for compatibility with the widest range of clients recommends + servers also handle LF. Similarly, whitespace in the request line + is treated sensibly (allowing multiple spaces between components + and allowing trailing whitespace). + + Similarly, for output, lines ought to be separated by CRLF pairs + but most clients grok LF characters just fine. + + If the first line of the request has the form + + + + (i.e. is left out) then this is assumed to be an HTTP + 0.9 request; this form has no optional headers and data part and + the reply consists of just the data. + + The reply form of the HTTP 1.x protocol again has three parts: + + 1. One line giving the response code + 2. An optional set of RFC-822-style headers + 3. The data + + Again, the headers and data are separated by a blank line. + + The response code line has the form + + + + where is the protocol version ("HTTP/1.0" or "HTTP/1.1"), + is a 3-digit response code indicating success or + failure of the request, and is an optional + human-readable string explaining what the response code means. + + This server parses the request and the headers, and then calls a + function specific to the request type (). Specifically, + a request SPAM will be handled by a method do_SPAM(). If no + such method exists the server sends an error response to the + client. If it exists, it is called with no arguments: + + do_SPAM() + + Note that the request name is case sensitive (i.e. SPAM and spam + are different requests). + + The various request details are stored in instance variables: + + - client_address is the client IP address in the form (host, + port); + + - command, path and version are the broken-down request line; + + - headers is an instance of mimetools.Message (or a derived + class) containing the header information; + + - rfile is a file object open for reading positioned at the + start of the optional input data part; + + - wfile is a file object open for writing. + + IT IS IMPORTANT TO ADHERE TO THE PROTOCOL FOR WRITING! + + The first thing to be written must be the response line. Then + follow 0 or more header lines, then a blank line, and then the + actual data (if any). The meaning of the header lines depends on + the command executed by the server; in most cases, when data is + returned, there should be at least one header line of the form + + Content-type: / + + where and should be registered MIME types, + e.g. "text/html" or "text/plain". + + """ + + # The Python system version, truncated to its first component. + sys_version = "Python/" + sys.version.split()[0] + + # The server software version. You may want to override this. + # The format is multiple whitespace-separated strings, + # where each string is of the form name[/version]. + server_version = "BaseHTTP/" + __version__ + + # The default request version. This only affects responses up until + # the point where the request line is parsed, so it mainly decides what + # the client gets back when sending a malformed request line. + # Most web servers default to HTTP 0.9, i.e. don't send a status line. + default_request_version = "HTTP/0.9" + + def parse_request(self): + """Parse a request (internal). + + The request should be stored in self.raw_requestline; the results + are in self.command, self.path, self.request_version and + self.headers. + + Return True for success, False for failure; on failure, an + error is sent back. + + """ + self.command = None # set in case of error on the first line + self.request_version = version = self.default_request_version + self.close_connection = 1 + requestline = self.raw_requestline + requestline = requestline.rstrip('\r\n') + self.requestline = requestline + words = requestline.split() + if len(words) == 3: + command, path, version = words + if version[:5] != 'HTTP/': + self.send_error(400, "Bad request version (%r)" % version) + return False + try: + base_version_number = version.split('/', 1)[1] + version_number = base_version_number.split(".") + # RFC 2145 section 3.1 says there can be only one "." and + # - major and minor numbers MUST be treated as + # separate integers; + # - HTTP/2.4 is a lower version than HTTP/2.13, which in + # turn is lower than HTTP/12.3; + # - Leading zeros MUST be ignored by recipients. + if len(version_number) != 2: + raise ValueError + version_number = int(version_number[0]), int(version_number[1]) + except (ValueError, IndexError): + self.send_error(400, "Bad request version (%r)" % version) + return False + if version_number >= (1, 1) and self.protocol_version >= "HTTP/1.1": + self.close_connection = 0 + if version_number >= (2, 0): + self.send_error(505, + "Invalid HTTP Version (%s)" % base_version_number) + return False + elif len(words) == 2: + command, path = words + self.close_connection = 1 + if command != 'GET': + self.send_error(400, + "Bad HTTP/0.9 request type (%r)" % command) + return False + elif not words: + return False + else: + self.send_error(400, "Bad request syntax (%r)" % requestline) + return False + self.command, self.path, self.request_version = command, path, version + + # Examine the headers and look for a Connection directive + self.headers = self.MessageClass(self.rfile, 0) + + conntype = self.headers.get('Connection', "") + if conntype.lower() == 'close': + self.close_connection = 1 + elif (conntype.lower() == 'keep-alive' and + self.protocol_version >= "HTTP/1.1"): + self.close_connection = 0 + return True + + def handle_one_request(self): + """Handle a single HTTP request. + + You normally don't need to override this method; see the class + __doc__ string for information on how to handle specific HTTP + commands such as GET and POST. + + """ + try: + self.raw_requestline = self.rfile.readline(65537) + if len(self.raw_requestline) > 65536: + self.requestline = '' + self.request_version = '' + self.command = '' + self.send_error(414) + return + if not self.raw_requestline: + self.close_connection = 1 + return + if not self.parse_request(): + # An error code has been sent, just exit + return + mname = 'do_' + self.command + if not hasattr(self, mname): + self.send_error(501, "Unsupported method (%r)" % self.command) + return + method = getattr(self, mname) + method() + self.wfile.flush() #actually send the response if not already done. + except socket.timeout: + #a read or a write timed out. Discard this connection + self.log_error("Request timed out: %r", sys.exc_info()[1]) + self.close_connection = 1 + return + + def handle(self): + """Handle multiple requests if necessary.""" + self.close_connection = 1 + + self.handle_one_request() + while not self.close_connection: + self.handle_one_request() + + def send_error(self, code, message=None): + """Send and log an error reply. + + Arguments are the error code, and a detailed message. + The detailed message defaults to the short entry matching the + response code. + + This sends an error response (so it must be called before any + output has been generated), logs the error, and finally sends + a piece of HTML explaining the error to the user. + + """ + + try: + short, long = self.responses[code] + except KeyError: + short, long = '???', '???' + if message is None: + message = short + explain = long + self.log_error("code %d, message %s", code, message) + # using _quote_html to prevent Cross Site Scripting attacks (see bug #1100201) + content = (self.error_message_format % + {'code': code, 'message': _quote_html(message), 'explain': explain}) + self.send_response(code, message) + self.send_header("Content-Type", self.error_content_type) + self.send_header('Connection', 'close') + self.end_headers() + if self.command != 'HEAD' and code >= 200 and code not in (204, 304): + self.wfile.write(content) + + error_message_format = DEFAULT_ERROR_MESSAGE + error_content_type = DEFAULT_ERROR_CONTENT_TYPE + + def send_response(self, code, message=None): + """Send the response header and log the response code. + + Also send two standard headers with the server software + version and the current date. + + """ + self.log_request(code) + if message is None: + if code in self.responses: + message = self.responses[code][0] + else: + message = '' + if self.request_version != 'HTTP/0.9': + self.wfile.write("%s %d %s\r\n" % + (self.protocol_version, code, message)) + # print (self.protocol_version, code, message) + self.send_header('Server', self.version_string()) + self.send_header('Date', self.date_time_string()) + + def send_header(self, keyword, value): + """Send a MIME header.""" + if self.request_version != 'HTTP/0.9': + self.wfile.write("%s: %s\r\n" % (keyword, value)) + + if keyword.lower() == 'connection': + if value.lower() == 'close': + self.close_connection = 1 + elif value.lower() == 'keep-alive': + self.close_connection = 0 + + def end_headers(self): + """Send the blank line ending the MIME headers.""" + if self.request_version != 'HTTP/0.9': + self.wfile.write("\r\n") + + def log_request(self, code='-', size='-'): + """Log an accepted request. + + This is called by send_response(). + + """ + + self.log_message('"%s" %s %s', + self.requestline, str(code), str(size)) + + def log_error(self, format, *args): + """Log an error. + + This is called when a request cannot be fulfilled. By + default it passes the message on to log_message(). + + Arguments are the same as for log_message(). + + XXX This should go to the separate error log. + + """ + + self.log_message(format, *args) + + def log_message(self, format, *args): + """Log an arbitrary message. + + This is used by all other logging functions. Override + it if you have specific logging wishes. + + The first argument, FORMAT, is a format string for the + message to be logged. If the format string contains + any % escapes requiring parameters, they should be + specified as subsequent arguments (it's just like + printf!). + + The client host and current date/time are prefixed to + every message. + + """ + + sys.stderr.write("%s - - [%s] %s\n" % + (self.address_string(), + self.log_date_time_string(), + format%args)) + + def version_string(self): + """Return the server software version string.""" + return self.server_version + ' ' + self.sys_version + + def date_time_string(self, timestamp=None): + """Return the current date and time formatted for a message header.""" + if timestamp is None: + timestamp = time.time() + year, month, day, hh, mm, ss, wd, y, z = time.gmtime(timestamp) + s = "%s, %02d %3s %4d %02d:%02d:%02d GMT" % ( + self.weekdayname[wd], + day, self.monthname[month], year, + hh, mm, ss) + return s + + def log_date_time_string(self): + """Return the current time formatted for logging.""" + now = time.time() + year, month, day, hh, mm, ss, x, y, z = time.localtime(now) + s = "%02d/%3s/%04d %02d:%02d:%02d" % ( + day, self.monthname[month], year, hh, mm, ss) + return s + + weekdayname = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun'] + + monthname = [None, + 'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', + 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'] + + def address_string(self): + """Return the client address formatted for logging. + + This version looks up the full hostname using gethostbyaddr(), + and tries to find a name that contains at least one dot. + + """ + + host, port = self.client_address[:2] + return socket.getfqdn(host) + + # Essentially static class variables + + # The version of the HTTP protocol we support. + # Set this to HTTP/1.1 to enable automatic keepalive + protocol_version = "HTTP/1.0" + + # The Message-like class used to parse headers + MessageClass = mimetools.Message + + # Table mapping response codes to messages; entries have the + # form {code: (shortmessage, longmessage)}. + # See RFC 2616. + responses = { + 100: ('Continue', 'Request received, please continue'), + 101: ('Switching Protocols', + 'Switching to new protocol; obey Upgrade header'), + + 200: ('OK', 'Request fulfilled, document follows'), + 201: ('Created', 'Document created, URL follows'), + 202: ('Accepted', + 'Request accepted, processing continues off-line'), + 203: ('Non-Authoritative Information', 'Request fulfilled from cache'), + 204: ('No Content', 'Request fulfilled, nothing follows'), + 205: ('Reset Content', 'Clear input form for further input.'), + 206: ('Partial Content', 'Partial content follows.'), + + 300: ('Multiple Choices', + 'Object has several resources -- see URI list'), + 301: ('Moved Permanently', 'Object moved permanently -- see URI list'), + 302: ('Found', 'Object moved temporarily -- see URI list'), + 303: ('See Other', 'Object moved -- see Method and URL list'), + 304: ('Not Modified', + 'Document has not changed since given time'), + 305: ('Use Proxy', + 'You must use proxy specified in Location to access this ' + 'resource.'), + 307: ('Temporary Redirect', + 'Object moved temporarily -- see URI list'), + + 400: ('Bad Request', + 'Bad request syntax or unsupported method'), + 401: ('Unauthorized', + 'No permission -- see authorization schemes'), + 402: ('Payment Required', + 'No payment -- see charging schemes'), + 403: ('Forbidden', + 'Request forbidden -- authorization will not help'), + 404: ('Not Found', 'Nothing matches the given URI'), + 405: ('Method Not Allowed', + 'Specified method is invalid for this resource.'), + 406: ('Not Acceptable', 'URI not available in preferred format.'), + 407: ('Proxy Authentication Required', 'You must authenticate with ' + 'this proxy before proceeding.'), + 408: ('Request Timeout', 'Request timed out; try again later.'), + 409: ('Conflict', 'Request conflict.'), + 410: ('Gone', + 'URI no longer exists and has been permanently removed.'), + 411: ('Length Required', 'Client must specify Content-Length.'), + 412: ('Precondition Failed', 'Precondition in headers is false.'), + 413: ('Request Entity Too Large', 'Entity is too large.'), + 414: ('Request-URI Too Long', 'URI is too long.'), + 415: ('Unsupported Media Type', 'Entity body in unsupported format.'), + 416: ('Requested Range Not Satisfiable', + 'Cannot satisfy request range.'), + 417: ('Expectation Failed', + 'Expect condition could not be satisfied.'), + + 500: ('Internal Server Error', 'Server got itself in trouble'), + 501: ('Not Implemented', + 'Server does not support this operation'), + 502: ('Bad Gateway', 'Invalid responses from another server/proxy.'), + 503: ('Service Unavailable', + 'The server cannot process the request due to a high load'), + 504: ('Gateway Timeout', + 'The gateway server did not receive a timely response'), + 505: ('HTTP Version Not Supported', 'Cannot fulfill request.'), + } + + +def test(HandlerClass = BaseHTTPRequestHandler, + ServerClass = HTTPServer, protocol="HTTP/1.0"): + """Test the HTTP request handler class. + + This runs an HTTP server on port 8000 (or the first command line + argument). + + """ + + if sys.argv[1:]: + port = int(sys.argv[1]) + else: + port = 8000 + server_address = ('', port) + + HandlerClass.protocol_version = protocol + httpd = ServerClass(server_address, HandlerClass) + + sa = httpd.socket.getsockname() + print ("Serving HTTP on", sa[0], "port", sa[1], "...") + httpd.serve_forever() + + +if __name__ == '__main__': + test() diff --git a/ptvsd/pydevd/_pydev_imps/_pydev_SimpleXMLRPCServer.py b/ptvsd/pydevd/_pydev_imps/_pydev_SimpleXMLRPCServer.py new file mode 100644 index 00000000..c5f77426 --- /dev/null +++ b/ptvsd/pydevd/_pydev_imps/_pydev_SimpleXMLRPCServer.py @@ -0,0 +1,601 @@ +#Just a copy of the version in python 2.5 to be used if it's not available in jython 2.1 + +"""Simple XML-RPC Server. + +This module can be used to create simple XML-RPC servers +by creating a server and either installing functions, a +class instance, or by extending the SimpleXMLRPCServer +class. + +It can also be used to handle XML-RPC requests in a CGI +environment using CGIXMLRPCRequestHandler. + +A list of possible usage patterns follows: + +1. Install functions: + +server = SimpleXMLRPCServer(("localhost", 8000)) +server.register_function(pow) +server.register_function(lambda x,y: x+y, 'add') +server.serve_forever() + +2. Install an instance: + +class MyFuncs: + def __init__(self): + # make all of the string functions available through + # string.func_name + import string + self.string = string + def _listMethods(self): + # implement this method so that system.listMethods + # knows to advertise the strings methods + return list_public_methods(self) + \ + ['string.' + method for method in list_public_methods(self.string)] + def pow(self, x, y): return pow(x, y) + def add(self, x, y) : return x + y + +server = SimpleXMLRPCServer(("localhost", 8000)) +server.register_introspection_functions() +server.register_instance(MyFuncs()) +server.serve_forever() + +3. Install an instance with custom dispatch method: + +class Math: + def _listMethods(self): + # this method must be present for system.listMethods + # to work + return ['add', 'pow'] + def _methodHelp(self, method): + # this method must be present for system.methodHelp + # to work + if method == 'add': + return "add(2,3) => 5" + elif method == 'pow': + return "pow(x, y[, z]) => number" + else: + # By convention, return empty + # string if no help is available + return "" + def _dispatch(self, method, params): + if method == 'pow': + return pow(*params) + elif method == 'add': + return params[0] + params[1] + else: + raise 'bad method' + +server = SimpleXMLRPCServer(("localhost", 8000)) +server.register_introspection_functions() +server.register_instance(Math()) +server.serve_forever() + +4. Subclass SimpleXMLRPCServer: + +class MathServer(SimpleXMLRPCServer): + def _dispatch(self, method, params): + try: + # We are forcing the 'export_' prefix on methods that are + # callable through XML-RPC to prevent potential security + # problems + func = getattr(self, 'export_' + method) + except AttributeError: + raise Exception('method "%s" is not supported' % method) + else: + return func(*params) + + def export_add(self, x, y): + return x + y + +server = MathServer(("localhost", 8000)) +server.serve_forever() + +5. CGI script: + +server = CGIXMLRPCRequestHandler() +server.register_function(pow) +server.handle_request() +""" + +# Written by Brian Quinlan (brian@sweetapp.com). +# Based on code written by Fredrik Lundh. + +from _pydev_imps import _pydev_xmlrpclib as xmlrpclib +from _pydev_imps._pydev_xmlrpclib import Fault +from _pydev_imps import _pydev_SocketServer as SocketServer +from _pydev_imps import _pydev_BaseHTTPServer as BaseHTTPServer +import sys +import os +try: + import fcntl +except ImportError: + fcntl = None + +def resolve_dotted_attribute(obj, attr, allow_dotted_names=True): + """resolve_dotted_attribute(a, 'b.c.d') => a.b.c.d + + Resolves a dotted attribute name to an object. Raises + an AttributeError if any attribute in the chain starts with a '_'. + + If the optional allow_dotted_names argument is false, dots are not + supported and this function operates similar to getattr(obj, attr). + """ + + if allow_dotted_names: + attrs = attr.split('.') + else: + attrs = [attr] + + for i in attrs: + if i.startswith('_'): + raise AttributeError( + 'attempt to access private attribute "%s"' % i + ) + else: + obj = getattr(obj, i) + return obj + +def list_public_methods(obj): + """Returns a list of attribute strings, found in the specified + object, which represent callable attributes""" + + return [member for member in dir(obj) + if not member.startswith('_') and + callable(getattr(obj, member))] + +def remove_duplicates(lst): + """remove_duplicates([2,2,2,1,3,3]) => [3,1,2] + + Returns a copy of a list without duplicates. Every list + item must be hashable and the order of the items in the + resulting list is not defined. + """ + u = {} + for x in lst: + u[x] = 1 + + return u.keys() + +class SimpleXMLRPCDispatcher: + """Mix-in class that dispatches XML-RPC requests. + + This class is used to register XML-RPC method handlers + and then to dispatch them. There should never be any + reason to instantiate this class directly. + """ + + def __init__(self, allow_none, encoding): + self.funcs = {} + self.instance = None + self.allow_none = allow_none + self.encoding = encoding + + def register_instance(self, instance, allow_dotted_names=False): + """Registers an instance to respond to XML-RPC requests. + + Only one instance can be installed at a time. + + If the registered instance has a _dispatch method then that + method will be called with the name of the XML-RPC method and + its parameters as a tuple + e.g. instance._dispatch('add',(2,3)) + + If the registered instance does not have a _dispatch method + then the instance will be searched to find a matching method + and, if found, will be called. Methods beginning with an '_' + are considered private and will not be called by + SimpleXMLRPCServer. + + If a registered function matches a XML-RPC request, then it + will be called instead of the registered instance. + + If the optional allow_dotted_names argument is true and the + instance does not have a _dispatch method, method names + containing dots are supported and resolved, as long as none of + the name segments start with an '_'. + + *** SECURITY WARNING: *** + + Enabling the allow_dotted_names options allows intruders + to access your module's global variables and may allow + intruders to execute arbitrary code on your machine. Only + use this option on a secure, closed network. + + """ + + self.instance = instance + self.allow_dotted_names = allow_dotted_names + + def register_function(self, function, name=None): + """Registers a function to respond to XML-RPC requests. + + The optional name argument can be used to set a Unicode name + for the function. + """ + + if name is None: + name = function.__name__ + self.funcs[name] = function + + def register_introspection_functions(self): + """Registers the XML-RPC introspection methods in the system + namespace. + + see http://xmlrpc.usefulinc.com/doc/reserved.html + """ + + self.funcs.update({'system.listMethods' : self.system_listMethods, + 'system.methodSignature' : self.system_methodSignature, + 'system.methodHelp' : self.system_methodHelp}) + + def register_multicall_functions(self): + """Registers the XML-RPC multicall method in the system + namespace. + + see http://www.xmlrpc.com/discuss/msgReader$1208""" + + self.funcs.update({'system.multicall' : self.system_multicall}) + + def _marshaled_dispatch(self, data, dispatch_method=None): + """Dispatches an XML-RPC method from marshalled (XML) data. + + XML-RPC methods are dispatched from the marshalled (XML) data + using the _dispatch method and the result is returned as + marshalled data. For backwards compatibility, a dispatch + function can be provided as an argument (see comment in + SimpleXMLRPCRequestHandler.do_POST) but overriding the + existing method through subclassing is the prefered means + of changing method dispatch behavior. + """ + try: + params, method = xmlrpclib.loads(data) + + # generate response + if dispatch_method is not None: + response = dispatch_method(method, params) + else: + response = self._dispatch(method, params) + # wrap response in a singleton tuple + response = (response,) + response = xmlrpclib.dumps(response, methodresponse=1, + allow_none=self.allow_none, encoding=self.encoding) + except Fault, fault: + response = xmlrpclib.dumps(fault, allow_none=self.allow_none, + encoding=self.encoding) + except: + # report exception back to server + response = xmlrpclib.dumps( + xmlrpclib.Fault(1, "%s:%s" % (sys.exc_type, sys.exc_value)), #@UndefinedVariable exc_value only available when we actually have an exception + encoding=self.encoding, allow_none=self.allow_none, + ) + + return response + + def system_listMethods(self): + """system.listMethods() => ['add', 'subtract', 'multiple'] + + Returns a list of the methods supported by the server.""" + + methods = self.funcs.keys() + if self.instance is not None: + # Instance can implement _listMethod to return a list of + # methods + if hasattr(self.instance, '_listMethods'): + methods = remove_duplicates( + methods + self.instance._listMethods() + ) + # if the instance has a _dispatch method then we + # don't have enough information to provide a list + # of methods + elif not hasattr(self.instance, '_dispatch'): + methods = remove_duplicates( + methods + list_public_methods(self.instance) + ) + methods.sort() + return methods + + def system_methodSignature(self, method_name): + """system.methodSignature('add') => [double, int, int] + + Returns a list describing the signature of the method. In the + above example, the add method takes two integers as arguments + and returns a double result. + + This server does NOT support system.methodSignature.""" + + # See http://xmlrpc.usefulinc.com/doc/sysmethodsig.html + + return 'signatures not supported' + + def system_methodHelp(self, method_name): + """system.methodHelp('add') => "Adds two integers together" + + Returns a string containing documentation for the specified method.""" + + method = None + if self.funcs.has_key(method_name): + method = self.funcs[method_name] + elif self.instance is not None: + # Instance can implement _methodHelp to return help for a method + if hasattr(self.instance, '_methodHelp'): + return self.instance._methodHelp(method_name) + # if the instance has a _dispatch method then we + # don't have enough information to provide help + elif not hasattr(self.instance, '_dispatch'): + try: + method = resolve_dotted_attribute( + self.instance, + method_name, + self.allow_dotted_names + ) + except AttributeError: + pass + + # Note that we aren't checking that the method actually + # be a callable object of some kind + if method is None: + return "" + else: + try: + import pydoc + except ImportError: + return "" #not there for jython + else: + return pydoc.getdoc(method) + + def system_multicall(self, call_list): + """system.multicall([{'methodName': 'add', 'params': [2, 2]}, ...]) => \ +[[4], ...] + + Allows the caller to package multiple XML-RPC calls into a single + request. + + See http://www.xmlrpc.com/discuss/msgReader$1208 + """ + + results = [] + for call in call_list: + method_name = call['methodName'] + params = call['params'] + + try: + # XXX A marshalling error in any response will fail the entire + # multicall. If someone cares they should fix this. + results.append([self._dispatch(method_name, params)]) + except Fault, fault: + results.append( + {'faultCode' : fault.faultCode, + 'faultString' : fault.faultString} + ) + except: + results.append( + {'faultCode' : 1, + 'faultString' : "%s:%s" % (sys.exc_type, sys.exc_value)} #@UndefinedVariable exc_value only available when we actually have an exception + ) + return results + + def _dispatch(self, method, params): + """Dispatches the XML-RPC method. + + XML-RPC calls are forwarded to a registered function that + matches the called XML-RPC method name. If no such function + exists then the call is forwarded to the registered instance, + if available. + + If the registered instance has a _dispatch method then that + method will be called with the name of the XML-RPC method and + its parameters as a tuple + e.g. instance._dispatch('add',(2,3)) + + If the registered instance does not have a _dispatch method + then the instance will be searched to find a matching method + and, if found, will be called. + + Methods beginning with an '_' are considered private and will + not be called. + """ + + func = None + try: + # check to see if a matching function has been registered + func = self.funcs[method] + except KeyError: + if self.instance is not None: + # check for a _dispatch method + if hasattr(self.instance, '_dispatch'): + return self.instance._dispatch(method, params) + else: + # call instance method directly + try: + func = resolve_dotted_attribute( + self.instance, + method, + self.allow_dotted_names + ) + except AttributeError: + pass + + if func is not None: + return func(*params) + else: + raise Exception('method "%s" is not supported' % method) + +class SimpleXMLRPCRequestHandler(BaseHTTPServer.BaseHTTPRequestHandler): + """Simple XML-RPC request handler class. + + Handles all HTTP POST requests and attempts to decode them as + XML-RPC requests. + """ + + # Class attribute listing the accessible path components; + # paths not on this list will result in a 404 error. + rpc_paths = ('/', '/RPC2') + + def is_rpc_path_valid(self): + if self.rpc_paths: + return self.path in self.rpc_paths + else: + # If .rpc_paths is empty, just assume all paths are legal + return True + + def do_POST(self): + """Handles the HTTP POST request. + + Attempts to interpret all HTTP POST requests as XML-RPC calls, + which are forwarded to the server's _dispatch method for handling. + """ + + # Check that the path is legal + if not self.is_rpc_path_valid(): + self.report_404() + return + + try: + # Get arguments by reading body of request. + # We read this in chunks to avoid straining + # socket.read(); around the 10 or 15Mb mark, some platforms + # begin to have problems (bug #792570). + max_chunk_size = 10 * 1024 * 1024 + size_remaining = int(self.headers["content-length"]) + L = [] + while size_remaining: + chunk_size = min(size_remaining, max_chunk_size) + L.append(self.rfile.read(chunk_size)) + size_remaining -= len(L[-1]) + data = ''.join(L) + + # In previous versions of SimpleXMLRPCServer, _dispatch + # could be overridden in this class, instead of in + # SimpleXMLRPCDispatcher. To maintain backwards compatibility, + # check to see if a subclass implements _dispatch and dispatch + # using that method if present. + response = self.server._marshaled_dispatch( + data, getattr(self, '_dispatch', None) + ) + except: # This should only happen if the module is buggy + # internal error, report as HTTP server error + self.send_response(500) + self.end_headers() + else: + # got a valid XML RPC response + self.send_response(200) + self.send_header("Content-type", "text/xml") + self.send_header("Content-length", str(len(response))) + self.end_headers() + self.wfile.write(response) + + # shut down the connection + self.wfile.flush() + self.connection.shutdown(1) + + def report_404 (self): + # Report a 404 error + self.send_response(404) + response = 'No such page' + self.send_header("Content-type", "text/plain") + self.send_header("Content-length", str(len(response))) + self.end_headers() + self.wfile.write(response) + # shut down the connection + self.wfile.flush() + self.connection.shutdown(1) + + def log_request(self, code='-', size='-'): + """Selectively log an accepted request.""" + + if self.server.logRequests: + BaseHTTPServer.BaseHTTPRequestHandler.log_request(self, code, size) + +class SimpleXMLRPCServer(SocketServer.TCPServer, + SimpleXMLRPCDispatcher): + """Simple XML-RPC server. + + Simple XML-RPC server that allows functions and a single instance + to be installed to handle requests. The default implementation + attempts to dispatch XML-RPC calls to the functions or instance + installed in the server. Override the _dispatch method inhereted + from SimpleXMLRPCDispatcher to change this behavior. + """ + + allow_reuse_address = True + + def __init__(self, addr, requestHandler=SimpleXMLRPCRequestHandler, + logRequests=True, allow_none=False, encoding=None): + self.logRequests = logRequests + + SimpleXMLRPCDispatcher.__init__(self, allow_none, encoding) + SocketServer.TCPServer.__init__(self, addr, requestHandler) + + # [Bug #1222790] If possible, set close-on-exec flag; if a + # method spawns a subprocess, the subprocess shouldn't have + # the listening socket open. + if fcntl is not None and hasattr(fcntl, 'FD_CLOEXEC'): + flags = fcntl.fcntl(self.fileno(), fcntl.F_GETFD) + flags |= fcntl.FD_CLOEXEC + fcntl.fcntl(self.fileno(), fcntl.F_SETFD, flags) + +class CGIXMLRPCRequestHandler(SimpleXMLRPCDispatcher): + """Simple handler for XML-RPC data passed through CGI.""" + + def __init__(self, allow_none=False, encoding=None): + SimpleXMLRPCDispatcher.__init__(self, allow_none, encoding) + + def handle_xmlrpc(self, request_text): + """Handle a single XML-RPC request""" + + response = self._marshaled_dispatch(request_text) + + sys.stdout.write('Content-Type: text/xml\n') + sys.stdout.write('Content-Length: %d\n' % len(response)) + sys.stdout.write('\n') + + sys.stdout.write(response) + + def handle_get(self): + """Handle a single HTTP GET request. + + Default implementation indicates an error because + XML-RPC uses the POST method. + """ + + code = 400 + message, explain = \ + BaseHTTPServer.BaseHTTPRequestHandler.responses[code] + + response = BaseHTTPServer.DEFAULT_ERROR_MESSAGE % { #@UndefinedVariable + 'code' : code, + 'message' : message, + 'explain' : explain + } + sys.stdout.write('Status: %d %s\n' % (code, message)) + sys.stdout.write('Content-Type: text/html\n') + sys.stdout.write('Content-Length: %d\n' % len(response)) + sys.stdout.write('\n') + + sys.stdout.write(response) + + def handle_request(self, request_text=None): + """Handle a single XML-RPC request passed through a CGI post method. + + If no XML data is given then it is read from stdin. The resulting + XML-RPC response is printed to stdout along with the correct HTTP + headers. + """ + + if request_text is None and \ + os.environ.get('REQUEST_METHOD', None) == 'GET': + self.handle_get() + else: + # POST data is normally available through stdin + if request_text is None: + request_text = sys.stdin.read() + + self.handle_xmlrpc(request_text) + +if __name__ == '__main__': + sys.stdout.write('Running XML-RPC server on port 8000\n') + server = SimpleXMLRPCServer(("localhost", 8000)) + server.register_function(pow) + server.register_function(lambda x, y: x + y, 'add') + server.serve_forever() diff --git a/ptvsd/pydevd/_pydev_imps/_pydev_SocketServer.py b/ptvsd/pydevd/_pydev_imps/_pydev_SocketServer.py new file mode 100644 index 00000000..7af2777a --- /dev/null +++ b/ptvsd/pydevd/_pydev_imps/_pydev_SocketServer.py @@ -0,0 +1,715 @@ +"""Generic socket server classes. + +This module tries to capture the various aspects of defining a server: + +For socket-based servers: + +- address family: + - AF_INET{,6}: IP (Internet Protocol) sockets (default) + - AF_UNIX: Unix domain sockets + - others, e.g. AF_DECNET are conceivable (see +- socket type: + - SOCK_STREAM (reliable stream, e.g. TCP) + - SOCK_DGRAM (datagrams, e.g. UDP) + +For request-based servers (including socket-based): + +- client address verification before further looking at the request + (This is actually a hook for any processing that needs to look + at the request before anything else, e.g. logging) +- how to handle multiple requests: + - synchronous (one request is handled at a time) + - forking (each request is handled by a new process) + - threading (each request is handled by a new thread) + +The classes in this module favor the server type that is simplest to +write: a synchronous TCP/IP server. This is bad class design, but +save some typing. (There's also the issue that a deep class hierarchy +slows down method lookups.) + +There are five classes in an inheritance diagram, four of which represent +synchronous servers of four types: + + +------------+ + | BaseServer | + +------------+ + | + v + +-----------+ +------------------+ + | TCPServer |------->| UnixStreamServer | + +-----------+ +------------------+ + | + v + +-----------+ +--------------------+ + | UDPServer |------->| UnixDatagramServer | + +-----------+ +--------------------+ + +Note that UnixDatagramServer derives from UDPServer, not from +UnixStreamServer -- the only difference between an IP and a Unix +stream server is the address family, which is simply repeated in both +unix server classes. + +Forking and threading versions of each type of server can be created +using the ForkingMixIn and ThreadingMixIn mix-in classes. For +instance, a threading UDP server class is created as follows: + + class ThreadingUDPServer(ThreadingMixIn, UDPServer): pass + +The Mix-in class must come first, since it overrides a method defined +in UDPServer! Setting the various member variables also changes +the behavior of the underlying server mechanism. + +To implement a service, you must derive a class from +BaseRequestHandler and redefine its handle() method. You can then run +various versions of the service by combining one of the server classes +with your request handler class. + +The request handler class must be different for datagram or stream +services. This can be hidden by using the request handler +subclasses StreamRequestHandler or DatagramRequestHandler. + +Of course, you still have to use your head! + +For instance, it makes no sense to use a forking server if the service +contains state in memory that can be modified by requests (since the +modifications in the child process would never reach the initial state +kept in the parent process and passed to each child). In this case, +you can use a threading server, but you will probably have to use +locks to avoid two requests that come in nearly simultaneous to apply +conflicting changes to the server state. + +On the other hand, if you are building e.g. an HTTP server, where all +data is stored externally (e.g. in the file system), a synchronous +class will essentially render the service "deaf" while one request is +being handled -- which may be for a very long time if a client is slow +to read all the data it has requested. Here a threading or forking +server is appropriate. + +In some cases, it may be appropriate to process part of a request +synchronously, but to finish processing in a forked child depending on +the request data. This can be implemented by using a synchronous +server and doing an explicit fork in the request handler class +handle() method. + +Another approach to handling multiple simultaneous requests in an +environment that supports neither threads nor fork (or where these are +too expensive or inappropriate for the service) is to maintain an +explicit table of partially finished requests and to use select() to +decide which request to work on next (or whether to handle a new +incoming request). This is particularly important for stream services +where each client can potentially be connected for a long time (if +threads or subprocesses cannot be used). + +Future work: +- Standard classes for Sun RPC (which uses either UDP or TCP) +- Standard mix-in classes to implement various authentication + and encryption schemes +- Standard framework for select-based multiplexing + +XXX Open problems: +- What to do with out-of-band data? + +BaseServer: +- split generic "request" functionality out into BaseServer class. + Copyright (C) 2000 Luke Kenneth Casson Leighton + + example: read entries from a SQL database (requires overriding + get_request() to return a table entry from the database). + entry is processed by a RequestHandlerClass. + +""" + +# Author of the BaseServer patch: Luke Kenneth Casson Leighton + +# XXX Warning! +# There is a test suite for this module, but it cannot be run by the +# standard regression test. +# To run it manually, run Lib/test/test_socketserver.py. + +__version__ = "0.4" + + +from _pydev_imps._pydev_saved_modules import socket +from _pydev_imps._pydev_saved_modules import select +import sys +import os +try: + from _pydev_imps._pydev_saved_modules import threading +except ImportError: + import dummy_threading as threading + +__all__ = ["TCPServer","UDPServer","ForkingUDPServer","ForkingTCPServer", + "ThreadingUDPServer","ThreadingTCPServer","BaseRequestHandler", + "StreamRequestHandler","DatagramRequestHandler", + "ThreadingMixIn", "ForkingMixIn"] +if hasattr(socket, "AF_UNIX"): + __all__.extend(["UnixStreamServer","UnixDatagramServer", + "ThreadingUnixStreamServer", + "ThreadingUnixDatagramServer"]) + +class BaseServer: + + """Base class for server classes. + + Methods for the caller: + + - __init__(server_address, RequestHandlerClass) + - serve_forever(poll_interval=0.5) + - shutdown() + - handle_request() # if you do not use serve_forever() + - fileno() -> int # for select() + + Methods that may be overridden: + + - server_bind() + - server_activate() + - get_request() -> request, client_address + - handle_timeout() + - verify_request(request, client_address) + - server_close() + - process_request(request, client_address) + - shutdown_request(request) + - close_request(request) + - handle_error() + + Methods for derived classes: + + - finish_request(request, client_address) + + Class variables that may be overridden by derived classes or + instances: + + - timeout + - address_family + - socket_type + - allow_reuse_address + + Instance variables: + + - RequestHandlerClass + - socket + + """ + + timeout = None + + def __init__(self, server_address, RequestHandlerClass): + """Constructor. May be extended, do not override.""" + self.server_address = server_address + self.RequestHandlerClass = RequestHandlerClass + self.__is_shut_down = threading.Event() # @UndefinedVariable + self.__shutdown_request = False + + def server_activate(self): + """Called by constructor to activate the server. + + May be overridden. + + """ + pass + + def serve_forever(self, poll_interval=0.5): + """Handle one request at a time until shutdown. + + Polls for shutdown every poll_interval seconds. Ignores + self.timeout. If you need to do periodic tasks, do them in + another thread. + """ + self.__is_shut_down.clear() + try: + while not self.__shutdown_request: + # XXX: Consider using another file descriptor or + # connecting to the socket to wake this up instead of + # polling. Polling reduces our responsiveness to a + # shutdown request and wastes cpu at all other times. + r, w, e = select.select([self], [], [], poll_interval) + if self in r: + self._handle_request_noblock() + finally: + self.__shutdown_request = False + self.__is_shut_down.set() + + def shutdown(self): + """Stops the serve_forever loop. + + Blocks until the loop has finished. This must be called while + serve_forever() is running in another thread, or it will + deadlock. + """ + self.__shutdown_request = True + self.__is_shut_down.wait() + + # The distinction between handling, getting, processing and + # finishing a request is fairly arbitrary. Remember: + # + # - handle_request() is the top-level call. It calls + # select, get_request(), verify_request() and process_request() + # - get_request() is different for stream or datagram sockets + # - process_request() is the place that may fork a new process + # or create a new thread to finish the request + # - finish_request() instantiates the request handler class; + # this constructor will handle the request all by itself + + def handle_request(self): + """Handle one request, possibly blocking. + + Respects self.timeout. + """ + # Support people who used socket.settimeout() to escape + # handle_request before self.timeout was available. + timeout = self.socket.gettimeout() + if timeout is None: + timeout = self.timeout + elif self.timeout is not None: + timeout = min(timeout, self.timeout) + fd_sets = select.select([self], [], [], timeout) + if not fd_sets[0]: + self.handle_timeout() + return + self._handle_request_noblock() + + def _handle_request_noblock(self): + """Handle one request, without blocking. + + I assume that select.select has returned that the socket is + readable before this function was called, so there should be + no risk of blocking in get_request(). + """ + try: + request, client_address = self.get_request() + except socket.error: + return + if self.verify_request(request, client_address): + try: + self.process_request(request, client_address) + except: + self.handle_error(request, client_address) + self.shutdown_request(request) + + def handle_timeout(self): + """Called if no new request arrives within self.timeout. + + Overridden by ForkingMixIn. + """ + pass + + def verify_request(self, request, client_address): + """Verify the request. May be overridden. + + Return True if we should proceed with this request. + + """ + return True + + def process_request(self, request, client_address): + """Call finish_request. + + Overridden by ForkingMixIn and ThreadingMixIn. + + """ + self.finish_request(request, client_address) + self.shutdown_request(request) + + def server_close(self): + """Called to clean-up the server. + + May be overridden. + + """ + pass + + def finish_request(self, request, client_address): + """Finish one request by instantiating RequestHandlerClass.""" + self.RequestHandlerClass(request, client_address, self) + + def shutdown_request(self, request): + """Called to shutdown and close an individual request.""" + self.close_request(request) + + def close_request(self, request): + """Called to clean up an individual request.""" + pass + + def handle_error(self, request, client_address): + """Handle an error gracefully. May be overridden. + + The default is to print a traceback and continue. + + """ + print '-'*40 + print 'Exception happened during processing of request from', + print client_address + import traceback + traceback.print_exc() # XXX But this goes to stderr! + print '-'*40 + + +class TCPServer(BaseServer): + + """Base class for various socket-based server classes. + + Defaults to synchronous IP stream (i.e., TCP). + + Methods for the caller: + + - __init__(server_address, RequestHandlerClass, bind_and_activate=True) + - serve_forever(poll_interval=0.5) + - shutdown() + - handle_request() # if you don't use serve_forever() + - fileno() -> int # for select() + + Methods that may be overridden: + + - server_bind() + - server_activate() + - get_request() -> request, client_address + - handle_timeout() + - verify_request(request, client_address) + - process_request(request, client_address) + - shutdown_request(request) + - close_request(request) + - handle_error() + + Methods for derived classes: + + - finish_request(request, client_address) + + Class variables that may be overridden by derived classes or + instances: + + - timeout + - address_family + - socket_type + - request_queue_size (only for stream sockets) + - allow_reuse_address + + Instance variables: + + - server_address + - RequestHandlerClass + - socket + + """ + + address_family = socket.AF_INET + + socket_type = socket.SOCK_STREAM + + request_queue_size = 5 + + allow_reuse_address = False + + def __init__(self, server_address, RequestHandlerClass, bind_and_activate=True): + """Constructor. May be extended, do not override.""" + BaseServer.__init__(self, server_address, RequestHandlerClass) + self.socket = socket.socket(self.address_family, + self.socket_type) + if bind_and_activate: + self.server_bind() + self.server_activate() + + def server_bind(self): + """Called by constructor to bind the socket. + + May be overridden. + + """ + if self.allow_reuse_address: + self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + self.socket.bind(self.server_address) + self.server_address = self.socket.getsockname() + + def server_activate(self): + """Called by constructor to activate the server. + + May be overridden. + + """ + self.socket.listen(self.request_queue_size) + + def server_close(self): + """Called to clean-up the server. + + May be overridden. + + """ + self.socket.close() + + def fileno(self): + """Return socket file number. + + Interface required by select(). + + """ + return self.socket.fileno() + + def get_request(self): + """Get the request and client address from the socket. + + May be overridden. + + """ + return self.socket.accept() + + def shutdown_request(self, request): + """Called to shutdown and close an individual request.""" + try: + #explicitly shutdown. socket.close() merely releases + #the socket and waits for GC to perform the actual close. + request.shutdown(socket.SHUT_WR) + except socket.error: + pass #some platforms may raise ENOTCONN here + self.close_request(request) + + def close_request(self, request): + """Called to clean up an individual request.""" + request.close() + + +class UDPServer(TCPServer): + + """UDP server class.""" + + allow_reuse_address = False + + socket_type = socket.SOCK_DGRAM + + max_packet_size = 8192 + + def get_request(self): + data, client_addr = self.socket.recvfrom(self.max_packet_size) + return (data, self.socket), client_addr + + def server_activate(self): + # No need to call listen() for UDP. + pass + + def shutdown_request(self, request): + # No need to shutdown anything. + self.close_request(request) + + def close_request(self, request): + # No need to close anything. + pass + +class ForkingMixIn: + + """Mix-in class to handle each request in a new process.""" + + timeout = 300 + active_children = None + max_children = 40 + + def collect_children(self): + """Internal routine to wait for children that have exited.""" + if self.active_children is None: return + while len(self.active_children) >= self.max_children: + # XXX: This will wait for any child process, not just ones + # spawned by this library. This could confuse other + # libraries that expect to be able to wait for their own + # children. + try: + pid, status = os.waitpid(0, 0) + except os.error: + pid = None + if pid not in self.active_children: continue + self.active_children.remove(pid) + + # XXX: This loop runs more system calls than it ought + # to. There should be a way to put the active_children into a + # process group and then use os.waitpid(-pgid) to wait for any + # of that set, but I couldn't find a way to allocate pgids + # that couldn't collide. + for child in self.active_children: + try: + pid, status = os.waitpid(child, os.WNOHANG) # @UndefinedVariable + except os.error: + pid = None + if not pid: continue + try: + self.active_children.remove(pid) + except ValueError, e: + raise ValueError('%s. x=%d and list=%r' % (e.message, pid, + self.active_children)) + + def handle_timeout(self): + """Wait for zombies after self.timeout seconds of inactivity. + + May be extended, do not override. + """ + self.collect_children() + + def process_request(self, request, client_address): + """Fork a new subprocess to process the request.""" + self.collect_children() + pid = os.fork() # @UndefinedVariable + if pid: + # Parent process + if self.active_children is None: + self.active_children = [] + self.active_children.append(pid) + self.close_request(request) #close handle in parent process + return + else: + # Child process. + # This must never return, hence os._exit()! + try: + self.finish_request(request, client_address) + self.shutdown_request(request) + os._exit(0) + except: + try: + self.handle_error(request, client_address) + self.shutdown_request(request) + finally: + os._exit(1) + + +class ThreadingMixIn: + """Mix-in class to handle each request in a new thread.""" + + # Decides how threads will act upon termination of the + # main process + daemon_threads = False + + def process_request_thread(self, request, client_address): + """Same as in BaseServer but as a thread. + + In addition, exception handling is done here. + + """ + try: + self.finish_request(request, client_address) + self.shutdown_request(request) + except: + self.handle_error(request, client_address) + self.shutdown_request(request) + + def process_request(self, request, client_address): + """Start a new thread to process the request.""" + t = threading.Thread(target = self.process_request_thread, # @UndefinedVariable + args = (request, client_address)) + t.daemon = self.daemon_threads + t.start() + + +class ForkingUDPServer(ForkingMixIn, UDPServer): pass +class ForkingTCPServer(ForkingMixIn, TCPServer): pass + +class ThreadingUDPServer(ThreadingMixIn, UDPServer): pass +class ThreadingTCPServer(ThreadingMixIn, TCPServer): pass + +if hasattr(socket, 'AF_UNIX'): + + class UnixStreamServer(TCPServer): + address_family = socket.AF_UNIX # @UndefinedVariable + + class UnixDatagramServer(UDPServer): + address_family = socket.AF_UNIX # @UndefinedVariable + + class ThreadingUnixStreamServer(ThreadingMixIn, UnixStreamServer): pass + + class ThreadingUnixDatagramServer(ThreadingMixIn, UnixDatagramServer): pass + +class BaseRequestHandler: + + """Base class for request handler classes. + + This class is instantiated for each request to be handled. The + constructor sets the instance variables request, client_address + and server, and then calls the handle() method. To implement a + specific service, all you need to do is to derive a class which + defines a handle() method. + + The handle() method can find the request as self.request, the + client address as self.client_address, and the server (in case it + needs access to per-server information) as self.server. Since a + separate instance is created for each request, the handle() method + can define arbitrary other instance variariables. + + """ + + def __init__(self, request, client_address, server): + self.request = request + self.client_address = client_address + self.server = server + self.setup() + try: + self.handle() + finally: + self.finish() + + def setup(self): + pass + + def handle(self): + pass + + def finish(self): + pass + + +# The following two classes make it possible to use the same service +# class for stream or datagram servers. +# Each class sets up these instance variables: +# - rfile: a file object from which receives the request is read +# - wfile: a file object to which the reply is written +# When the handle() method returns, wfile is flushed properly + + +class StreamRequestHandler(BaseRequestHandler): + + """Define self.rfile and self.wfile for stream sockets.""" + + # Default buffer sizes for rfile, wfile. + # We default rfile to buffered because otherwise it could be + # really slow for large data (a getc() call per byte); we make + # wfile unbuffered because (a) often after a write() we want to + # read and we need to flush the line; (b) big writes to unbuffered + # files are typically optimized by stdio even when big reads + # aren't. + rbufsize = -1 + wbufsize = 0 + + # A timeout to apply to the request socket, if not None. + timeout = None + + # Disable nagle algorithm for this socket, if True. + # Use only when wbufsize != 0, to avoid small packets. + disable_nagle_algorithm = False + + def setup(self): + self.connection = self.request + if self.timeout is not None: + self.connection.settimeout(self.timeout) + if self.disable_nagle_algorithm: + self.connection.setsockopt(socket.IPPROTO_TCP, + socket.TCP_NODELAY, True) + self.rfile = self.connection.makefile('rb', self.rbufsize) + self.wfile = self.connection.makefile('wb', self.wbufsize) + + def finish(self): + if not self.wfile.closed: + self.wfile.flush() + self.wfile.close() + self.rfile.close() + + +class DatagramRequestHandler(BaseRequestHandler): + + # XXX Regrettably, I cannot get this working on Linux; + # s.recvfrom() doesn't return a meaningful client address. + + """Define self.rfile and self.wfile for datagram sockets.""" + + def setup(self): + try: + from cStringIO import StringIO + except ImportError: + from StringIO import StringIO + self.packet, self.socket = self.request + self.rfile = StringIO(self.packet) + self.wfile = StringIO() + + def finish(self): + self.socket.sendto(self.wfile.getvalue(), self.client_address) diff --git a/ptvsd/pydevd/_pydev_imps/_pydev_execfile.py b/ptvsd/pydevd/_pydev_imps/_pydev_execfile.py new file mode 100644 index 00000000..c02f8ecf --- /dev/null +++ b/ptvsd/pydevd/_pydev_imps/_pydev_execfile.py @@ -0,0 +1,25 @@ +#We must redefine it in Py3k if it's not already there +def execfile(file, glob=None, loc=None): + if glob is None: + import sys + glob = sys._getframe().f_back.f_globals + if loc is None: + loc = glob + + # It seems that the best way is using tokenize.open(): http://code.activestate.com/lists/python-dev/131251/ + # (but tokenize.open() is only available for python 3.2) + import tokenize + if hasattr(tokenize, 'open'): + # version 3.2 + stream = tokenize.open(file) # @UndefinedVariable + else: + # version 3.0 or 3.1 + detect_encoding = tokenize.detect_encoding(open(file, mode="rb" ).readline) + stream = open(file, encoding=detect_encoding[0]) + try: + contents = stream.read() + finally: + stream.close() + + #execute the script (note: it's important to compile first to have the filename set in debug mode) + exec(compile(contents+"\n", file, 'exec'), glob, loc) \ No newline at end of file diff --git a/ptvsd/pydevd/_pydev_imps/_pydev_inspect.py b/ptvsd/pydevd/_pydev_imps/_pydev_inspect.py new file mode 100644 index 00000000..5fd33d87 --- /dev/null +++ b/ptvsd/pydevd/_pydev_imps/_pydev_inspect.py @@ -0,0 +1,788 @@ +"""Get useful information from live Python objects. + +This module encapsulates the interface provided by the internal special +attributes (func_*, co_*, im_*, tb_*, etc.) in a friendlier fashion. +It also provides some help for examining source code and class layout. + +Here are some of the useful functions provided by this module: + + ismodule(), isclass(), ismethod(), isfunction(), istraceback(), + isframe(), iscode(), isbuiltin(), isroutine() - check object types + getmembers() - get members of an object that satisfy a given condition + + getfile(), getsourcefile(), getsource() - find an object's source code + getdoc(), getcomments() - get documentation on an object + getmodule() - determine the module that an object came from + getclasstree() - arrange classes so as to represent their hierarchy + + getargspec(), getargvalues() - get info about function arguments + formatargspec(), formatargvalues() - format an argument spec + getouterframes(), getinnerframes() - get info about frames + currentframe() - get the current stack frame + stack(), trace() - get info about frames on the stack or in a traceback +""" + +# This module is in the public domain. No warranties. + +__author__ = 'Ka-Ping Yee ' +__date__ = '1 Jan 2001' + +import sys, os, types, string, re, imp, tokenize + +# ----------------------------------------------------------- type-checking +def ismodule(object): + """Return true if the object is a module. + + Module objects provide these attributes: + __doc__ documentation string + __file__ filename (missing for built-in modules)""" + return isinstance(object, types.ModuleType) + +def isclass(object): + """Return true if the object is a class. + + Class objects provide these attributes: + __doc__ documentation string + __module__ name of module in which this class was defined""" + return isinstance(object, types.ClassType) or hasattr(object, '__bases__') + +def ismethod(object): + """Return true if the object is an instance method. + + Instance method objects provide these attributes: + __doc__ documentation string + __name__ name with which this method was defined + im_class class object in which this method belongs + im_func function object containing implementation of method + im_self instance to which this method is bound, or None""" + return isinstance(object, types.MethodType) + +def ismethoddescriptor(object): + """Return true if the object is a method descriptor. + + But not if ismethod() or isclass() or isfunction() are true. + + This is new in Python 2.2, and, for example, is true of int.__add__. + An object passing this test has a __get__ attribute but not a __set__ + attribute, but beyond that the set of attributes varies. __name__ is + usually sensible, and __doc__ often is. + + Methods implemented via descriptors that also pass one of the other + tests return false from the ismethoddescriptor() test, simply because + the other tests promise more -- you can, e.g., count on having the + im_func attribute (etc) when an object passes ismethod().""" + return (hasattr(object, "__get__") + and not hasattr(object, "__set__") # else it's a data descriptor + and not ismethod(object) # mutual exclusion + and not isfunction(object) + and not isclass(object)) + +def isfunction(object): + """Return true if the object is a user-defined function. + + Function objects provide these attributes: + __doc__ documentation string + __name__ name with which this function was defined + func_code code object containing compiled function bytecode + func_defaults tuple of any default values for arguments + func_doc (same as __doc__) + func_globals global namespace in which this function was defined + func_name (same as __name__)""" + return isinstance(object, types.FunctionType) + +def istraceback(object): + """Return true if the object is a traceback. + + Traceback objects provide these attributes: + tb_frame frame object at this level + tb_lasti index of last attempted instruction in bytecode + tb_lineno current line number in Python source code + tb_next next inner traceback object (called by this level)""" + return isinstance(object, types.TracebackType) + +def isframe(object): + """Return true if the object is a frame object. + + Frame objects provide these attributes: + f_back next outer frame object (this frame's caller) + f_builtins built-in namespace seen by this frame + f_code code object being executed in this frame + f_exc_traceback traceback if raised in this frame, or None + f_exc_type exception type if raised in this frame, or None + f_exc_value exception value if raised in this frame, or None + f_globals global namespace seen by this frame + f_lasti index of last attempted instruction in bytecode + f_lineno current line number in Python source code + f_locals local namespace seen by this frame + f_restricted 0 or 1 if frame is in restricted execution mode + f_trace tracing function for this frame, or None""" + return isinstance(object, types.FrameType) + +def iscode(object): + """Return true if the object is a code object. + + Code objects provide these attributes: + co_argcount number of arguments (not including * or ** args) + co_code string of raw compiled bytecode + co_consts tuple of constants used in the bytecode + co_filename name of file in which this code object was created + co_firstlineno number of first line in Python source code + co_flags bitmap: 1=optimized | 2=newlocals | 4=*arg | 8=**arg + co_lnotab encoded mapping of line numbers to bytecode indices + co_name name with which this code object was defined + co_names tuple of names of local variables + co_nlocals number of local variables + co_stacksize virtual machine stack space required + co_varnames tuple of names of arguments and local variables""" + return isinstance(object, types.CodeType) + +def isbuiltin(object): + """Return true if the object is a built-in function or method. + + Built-in functions and methods provide these attributes: + __doc__ documentation string + __name__ original name of this function or method + __self__ instance to which a method is bound, or None""" + return isinstance(object, types.BuiltinFunctionType) + +def isroutine(object): + """Return true if the object is any kind of function or method.""" + return (isbuiltin(object) + or isfunction(object) + or ismethod(object) + or ismethoddescriptor(object)) + +def getmembers(object, predicate=None): + """Return all members of an object as (name, value) pairs sorted by name. + Optionally, only return members that satisfy a given predicate.""" + results = [] + for key in dir(object): + value = getattr(object, key) + if not predicate or predicate(value): + results.append((key, value)) + results.sort() + return results + +def classify_class_attrs(cls): + """Return list of attribute-descriptor tuples. + + For each name in dir(cls), the return list contains a 4-tuple + with these elements: + + 0. The name (a string). + + 1. The kind of attribute this is, one of these strings: + 'class method' created via classmethod() + 'static method' created via staticmethod() + 'property' created via property() + 'method' any other flavor of method + 'data' not a method + + 2. The class which defined this attribute (a class). + + 3. The object as obtained directly from the defining class's + __dict__, not via getattr. This is especially important for + data attributes: C.data is just a data object, but + C.__dict__['data'] may be a data descriptor with additional + info, like a __doc__ string. + """ + + mro = getmro(cls) + names = dir(cls) + result = [] + for name in names: + # Get the object associated with the name. + # Getting an obj from the __dict__ sometimes reveals more than + # using getattr. Static and class methods are dramatic examples. + if name in cls.__dict__: + obj = cls.__dict__[name] + else: + obj = getattr(cls, name) + + # Figure out where it was defined. + homecls = getattr(obj, "__objclass__", None) + if homecls is None: + # search the dicts. + for base in mro: + if name in base.__dict__: + homecls = base + break + + # Get the object again, in order to get it from the defining + # __dict__ instead of via getattr (if possible). + if homecls is not None and name in homecls.__dict__: + obj = homecls.__dict__[name] + + # Also get the object via getattr. + obj_via_getattr = getattr(cls, name) + + # Classify the object. + if isinstance(obj, staticmethod): + kind = "static method" + elif isinstance(obj, classmethod): + kind = "class method" + elif isinstance(obj, property): + kind = "property" + elif (ismethod(obj_via_getattr) or + ismethoddescriptor(obj_via_getattr)): + kind = "method" + else: + kind = "data" + + result.append((name, kind, homecls, obj)) + + return result + +# ----------------------------------------------------------- class helpers +def _searchbases(cls, accum): + # Simulate the "classic class" search order. + if cls in accum: + return + accum.append(cls) + for base in cls.__bases__: + _searchbases(base, accum) + +def getmro(cls): + "Return tuple of base classes (including cls) in method resolution order." + if hasattr(cls, "__mro__"): + return cls.__mro__ + else: + result = [] + _searchbases(cls, result) + return tuple(result) + +# -------------------------------------------------- source code extraction +def indentsize(line): + """Return the indent size, in spaces, at the start of a line of text.""" + expline = string.expandtabs(line) + return len(expline) - len(string.lstrip(expline)) + +def getdoc(object): + """Get the documentation string for an object. + + All tabs are expanded to spaces. To clean up docstrings that are + indented to line up with blocks of code, any whitespace than can be + uniformly removed from the second line onwards is removed.""" + try: + doc = object.__doc__ + except AttributeError: + return None + if not isinstance(doc, (str, unicode)): + return None + try: + lines = string.split(string.expandtabs(doc), '\n') + except UnicodeError: + return None + else: + margin = None + for line in lines[1:]: + content = len(string.lstrip(line)) + if not content: continue + indent = len(line) - content + if margin is None: margin = indent + else: margin = min(margin, indent) + if margin is not None: + for i in range(1, len(lines)): lines[i] = lines[i][margin:] + return string.join(lines, '\n') + +def getfile(object): + """Work out which source or compiled file an object was defined in.""" + if ismodule(object): + if hasattr(object, '__file__'): + return object.__file__ + raise TypeError, 'arg is a built-in module' + if isclass(object): + object = sys.modules.get(object.__module__) + if hasattr(object, '__file__'): + return object.__file__ + raise TypeError, 'arg is a built-in class' + if ismethod(object): + object = object.im_func + if isfunction(object): + object = object.func_code + if istraceback(object): + object = object.tb_frame + if isframe(object): + object = object.f_code + if iscode(object): + return object.co_filename + raise TypeError, 'arg is not a module, class, method, ' \ + 'function, traceback, frame, or code object' + +def getmoduleinfo(path): + """Get the module name, suffix, mode, and module type for a given file.""" + filename = os.path.basename(path) + suffixes = map(lambda (suffix, mode, mtype): + (-len(suffix), suffix, mode, mtype), imp.get_suffixes()) + suffixes.sort() # try longest suffixes first, in case they overlap + for neglen, suffix, mode, mtype in suffixes: + if filename[neglen:] == suffix: + return filename[:neglen], suffix, mode, mtype + +def getmodulename(path): + """Return the module name for a given file, or None.""" + info = getmoduleinfo(path) + if info: return info[0] + +def getsourcefile(object): + """Return the Python source file an object was defined in, if it exists.""" + filename = getfile(object) + if string.lower(filename[-4:]) in ['.pyc', '.pyo']: + filename = filename[:-4] + '.py' + for suffix, mode, kind in imp.get_suffixes(): + if 'b' in mode and string.lower(filename[-len(suffix):]) == suffix: + # Looks like a binary file. We want to only return a text file. + return None + if os.path.exists(filename): + return filename + +def getabsfile(object): + """Return an absolute path to the source or compiled file for an object. + + The idea is for each object to have a unique origin, so this routine + normalizes the result as much as possible.""" + return os.path.normcase( + os.path.abspath(getsourcefile(object) or getfile(object))) + +modulesbyfile = {} + +def getmodule(object): + """Return the module an object was defined in, or None if not found.""" + if ismodule(object): + return object + if isclass(object): + return sys.modules.get(object.__module__) + try: + file = getabsfile(object) + except TypeError: + return None + if modulesbyfile.has_key(file): + return sys.modules[modulesbyfile[file]] + for module in sys.modules.values(): + if hasattr(module, '__file__'): + modulesbyfile[getabsfile(module)] = module.__name__ + if modulesbyfile.has_key(file): + return sys.modules[modulesbyfile[file]] + main = sys.modules['__main__'] + if hasattr(main, object.__name__): + mainobject = getattr(main, object.__name__) + if mainobject is object: + return main + builtin = sys.modules['__builtin__'] + if hasattr(builtin, object.__name__): + builtinobject = getattr(builtin, object.__name__) + if builtinobject is object: + return builtin + +def findsource(object): + """Return the entire source file and starting line number for an object. + + The argument may be a module, class, method, function, traceback, frame, + or code object. The source code is returned as a list of all the lines + in the file and the line number indexes a line in that list. An IOError + is raised if the source code cannot be retrieved.""" + try: + file = open(getsourcefile(object)) + except (TypeError, IOError): + raise IOError, 'could not get source code' + lines = file.readlines() + file.close() + + if ismodule(object): + return lines, 0 + + if isclass(object): + name = object.__name__ + pat = re.compile(r'^\s*class\s*' + name + r'\b') + for i in range(len(lines)): + if pat.match(lines[i]): return lines, i + else: raise IOError, 'could not find class definition' + + if ismethod(object): + object = object.im_func + if isfunction(object): + object = object.func_code + if istraceback(object): + object = object.tb_frame + if isframe(object): + object = object.f_code + if iscode(object): + if not hasattr(object, 'co_firstlineno'): + raise IOError, 'could not find function definition' + lnum = object.co_firstlineno - 1 + pat = re.compile(r'^(\s*def\s)|(.*\slambda(:|\s))') + while lnum > 0: + if pat.match(lines[lnum]): break + lnum = lnum - 1 + return lines, lnum + raise IOError, 'could not find code object' + +def getcomments(object): + """Get lines of comments immediately preceding an object's source code.""" + try: lines, lnum = findsource(object) + except IOError: return None + + if ismodule(object): + # Look for a comment block at the top of the file. + start = 0 + if lines and lines[0][:2] == '#!': start = 1 + while start < len(lines) and string.strip(lines[start]) in ['', '#']: + start = start + 1 + if start < len(lines) and lines[start][:1] == '#': + comments = [] + end = start + while end < len(lines) and lines[end][:1] == '#': + comments.append(string.expandtabs(lines[end])) + end = end + 1 + return string.join(comments, '') + + # Look for a preceding block of comments at the same indentation. + elif lnum > 0: + indent = indentsize(lines[lnum]) + end = lnum - 1 + if end >= 0 and string.lstrip(lines[end])[:1] == '#' and \ + indentsize(lines[end]) == indent: + comments = [string.lstrip(string.expandtabs(lines[end]))] + if end > 0: + end = end - 1 + comment = string.lstrip(string.expandtabs(lines[end])) + while comment[:1] == '#' and indentsize(lines[end]) == indent: + comments[:0] = [comment] + end = end - 1 + if end < 0: break + comment = string.lstrip(string.expandtabs(lines[end])) + while comments and string.strip(comments[0]) == '#': + comments[:1] = [] + while comments and string.strip(comments[-1]) == '#': + comments[-1:] = [] + return string.join(comments, '') + +class ListReader: + """Provide a readline() method to return lines from a list of strings.""" + def __init__(self, lines): + self.lines = lines + self.index = 0 + + def readline(self): + i = self.index + if i < len(self.lines): + self.index = i + 1 + return self.lines[i] + else: return '' + +class EndOfBlock(Exception): pass + +class BlockFinder: + """Provide a tokeneater() method to detect the end of a code block.""" + def __init__(self): + self.indent = 0 + self.started = 0 + self.last = 0 + + def tokeneater(self, type, token, (srow, scol), (erow, ecol), line): + if not self.started: + if type == tokenize.NAME: self.started = 1 + elif type == tokenize.NEWLINE: + self.last = srow + elif type == tokenize.INDENT: + self.indent = self.indent + 1 + elif type == tokenize.DEDENT: + self.indent = self.indent - 1 + if self.indent == 0: raise EndOfBlock, self.last + elif type == tokenize.NAME and scol == 0: + raise EndOfBlock, self.last + +def getblock(lines): + """Extract the block of code at the top of the given list of lines.""" + try: + tokenize.tokenize(ListReader(lines).readline, BlockFinder().tokeneater) + except EndOfBlock, eob: + return lines[:eob.args[0]] + # Fooling the indent/dedent logic implies a one-line definition + return lines[:1] + +def getsourcelines(object): + """Return a list of source lines and starting line number for an object. + + The argument may be a module, class, method, function, traceback, frame, + or code object. The source code is returned as a list of the lines + corresponding to the object and the line number indicates where in the + original source file the first line of code was found. An IOError is + raised if the source code cannot be retrieved.""" + lines, lnum = findsource(object) + + if ismodule(object): return lines, 0 + else: return getblock(lines[lnum:]), lnum + 1 + +def getsource(object): + """Return the text of the source code for an object. + + The argument may be a module, class, method, function, traceback, frame, + or code object. The source code is returned as a single string. An + IOError is raised if the source code cannot be retrieved.""" + lines, lnum = getsourcelines(object) + return string.join(lines, '') + +# --------------------------------------------------- class tree extraction +def walktree(classes, children, parent): + """Recursive helper function for getclasstree().""" + results = [] + classes.sort(lambda a, b: cmp(a.__name__, b.__name__)) + for c in classes: + results.append((c, c.__bases__)) + if children.has_key(c): + results.append(walktree(children[c], children, c)) + return results + +def getclasstree(classes, unique=0): + """Arrange the given list of classes into a hierarchy of nested lists. + + Where a nested list appears, it contains classes derived from the class + whose entry immediately precedes the list. Each entry is a 2-tuple + containing a class and a tuple of its base classes. If the 'unique' + argument is true, exactly one entry appears in the returned structure + for each class in the given list. Otherwise, classes using multiple + inheritance and their descendants will appear multiple times.""" + children = {} + roots = [] + for c in classes: + if c.__bases__: + for parent in c.__bases__: + if not children.has_key(parent): + children[parent] = [] + children[parent].append(c) + if unique and parent in classes: break + elif c not in roots: + roots.append(c) + for parent in children.keys(): + if parent not in classes: + roots.append(parent) + return walktree(roots, children, None) + +# ------------------------------------------------ argument list extraction +# These constants are from Python's compile.h. +CO_OPTIMIZED, CO_NEWLOCALS, CO_VARARGS, CO_VARKEYWORDS = 1, 2, 4, 8 + +def getargs(co): + """Get information about the arguments accepted by a code object. + + Three things are returned: (args, varargs, varkw), where 'args' is + a list of argument names (possibly containing nested lists), and + 'varargs' and 'varkw' are the names of the * and ** arguments or None.""" + if not iscode(co): raise TypeError, 'arg is not a code object' + + nargs = co.co_argcount + names = co.co_varnames + args = list(names[:nargs]) + step = 0 + + # The following acrobatics are for anonymous (tuple) arguments. + if not sys.platform.startswith('java'):#Jython doesn't have co_code + code = co.co_code + import dis + for i in range(nargs): + if args[i][:1] in ['', '.']: + stack, remain, count = [], [], [] + while step < len(code): + op = ord(code[step]) + step = step + 1 + if op >= dis.HAVE_ARGUMENT: + opname = dis.opname[op] + value = ord(code[step]) + ord(code[step + 1]) * 256 + step = step + 2 + if opname in ['UNPACK_TUPLE', 'UNPACK_SEQUENCE']: + remain.append(value) + count.append(value) + elif opname == 'STORE_FAST': + stack.append(names[value]) + remain[-1] = remain[-1] - 1 + while remain[-1] == 0: + remain.pop() + size = count.pop() + stack[-size:] = [stack[-size:]] + if not remain: break + remain[-1] = remain[-1] - 1 + if not remain: break + args[i] = stack[0] + + varargs = None + if co.co_flags & CO_VARARGS: + varargs = co.co_varnames[nargs] + nargs = nargs + 1 + varkw = None + if co.co_flags & CO_VARKEYWORDS: + varkw = co.co_varnames[nargs] + return args, varargs, varkw + +def getargspec(func): + """Get the names and default values of a function's arguments. + + A tuple of four things is returned: (args, varargs, varkw, defaults). + 'args' is a list of the argument names (it may contain nested lists). + 'varargs' and 'varkw' are the names of the * and ** arguments or None. + 'defaults' is an n-tuple of the default values of the last n arguments.""" + if ismethod(func): + func = func.im_func + if not isfunction(func): raise TypeError, 'arg is not a Python function' + args, varargs, varkw = getargs(func.func_code) + return args, varargs, varkw, func.func_defaults + +def getargvalues(frame): + """Get information about arguments passed into a particular frame. + + A tuple of four things is returned: (args, varargs, varkw, locals). + 'args' is a list of the argument names (it may contain nested lists). + 'varargs' and 'varkw' are the names of the * and ** arguments or None. + 'locals' is the locals dictionary of the given frame.""" + args, varargs, varkw = getargs(frame.f_code) + return args, varargs, varkw, frame.f_locals + +def joinseq(seq): + if len(seq) == 1: + return '(' + seq[0] + ',)' + else: + return '(' + string.join(seq, ', ') + ')' + +def strseq(object, convert, join=joinseq): + """Recursively walk a sequence, stringifying each element.""" + if type(object) in [types.ListType, types.TupleType]: + return join(map(lambda o, c=convert, j=join: strseq(o, c, j), object)) + else: + return convert(object) + +def formatargspec(args, varargs=None, varkw=None, defaults=None, + formatarg=str, + formatvarargs=lambda name: '*' + name, + formatvarkw=lambda name: '**' + name, + formatvalue=lambda value: '=' + repr(value), + join=joinseq): + """Format an argument spec from the 4 values returned by getargspec. + + The first four arguments are (args, varargs, varkw, defaults). The + other four arguments are the corresponding optional formatting functions + that are called to turn names and values into strings. The ninth + argument is an optional function to format the sequence of arguments.""" + specs = [] + if defaults: + firstdefault = len(args) - len(defaults) + for i in range(len(args)): + spec = strseq(args[i], formatarg, join) + if defaults and i >= firstdefault: + spec = spec + formatvalue(defaults[i - firstdefault]) + specs.append(spec) + if varargs: + specs.append(formatvarargs(varargs)) + if varkw: + specs.append(formatvarkw(varkw)) + return '(' + string.join(specs, ', ') + ')' + +def formatargvalues(args, varargs, varkw, locals, + formatarg=str, + formatvarargs=lambda name: '*' + name, + formatvarkw=lambda name: '**' + name, + formatvalue=lambda value: '=' + repr(value), + join=joinseq): + """Format an argument spec from the 4 values returned by getargvalues. + + The first four arguments are (args, varargs, varkw, locals). The + next four arguments are the corresponding optional formatting functions + that are called to turn names and values into strings. The ninth + argument is an optional function to format the sequence of arguments.""" + def convert(name, locals=locals, + formatarg=formatarg, formatvalue=formatvalue): + return formatarg(name) + formatvalue(locals[name]) + specs = [] + for i in range(len(args)): + specs.append(strseq(args[i], convert, join)) + if varargs: + specs.append(formatvarargs(varargs) + formatvalue(locals[varargs])) + if varkw: + specs.append(formatvarkw(varkw) + formatvalue(locals[varkw])) + return '(' + string.join(specs, ', ') + ')' + +# -------------------------------------------------- stack frame extraction +def getframeinfo(frame, context=1): + """Get information about a frame or traceback object. + + A tuple of five things is returned: the filename, the line number of + the current line, the function name, a list of lines of context from + the source code, and the index of the current line within that list. + The optional second argument specifies the number of lines of context + to return, which are centered around the current line.""" + raise NotImplementedError +# if istraceback(frame): +# frame = frame.tb_frame +# if not isframe(frame): +# raise TypeError, 'arg is not a frame or traceback object' +# +# filename = getsourcefile(frame) +# lineno = getlineno(frame) +# if context > 0: +# start = lineno - 1 - context//2 +# try: +# lines, lnum = findsource(frame) +# except IOError: +# lines = index = None +# else: +# start = max(start, 1) +# start = min(start, len(lines) - context) +# lines = lines[start:start+context] +# index = lineno - 1 - start +# else: +# lines = index = None +# +# return (filename, lineno, frame.f_code.co_name, lines, index) + +def getlineno(frame): + """Get the line number from a frame object, allowing for optimization.""" + # Written by Marc-Andr Lemburg; revised by Jim Hugunin and Fredrik Lundh. + lineno = frame.f_lineno + code = frame.f_code + if hasattr(code, 'co_lnotab'): + table = code.co_lnotab + lineno = code.co_firstlineno + addr = 0 + for i in range(0, len(table), 2): + addr = addr + ord(table[i]) + if addr > frame.f_lasti: break + lineno = lineno + ord(table[i + 1]) + return lineno + +def getouterframes(frame, context=1): + """Get a list of records for a frame and all higher (calling) frames. + + Each record contains a frame object, filename, line number, function + name, a list of lines of context, and index within the context.""" + framelist = [] + while frame: + framelist.append((frame,) + getframeinfo(frame, context)) + frame = frame.f_back + return framelist + +def getinnerframes(tb, context=1): + """Get a list of records for a traceback's frame and all lower frames. + + Each record contains a frame object, filename, line number, function + name, a list of lines of context, and index within the context.""" + framelist = [] + while tb: + framelist.append((tb.tb_frame,) + getframeinfo(tb, context)) + tb = tb.tb_next + return framelist + +def currentframe(): + """Return the frame object for the caller's stack frame.""" + try: + raise 'catch me' + except: + return sys.exc_traceback.tb_frame.f_back #@UndefinedVariable + +if hasattr(sys, '_getframe'): currentframe = sys._getframe + +def stack(context=1): + """Return a list of records for the stack above the caller's frame.""" + return getouterframes(currentframe().f_back, context) + +def trace(context=1): + """Return a list of records for the stack below the current exception.""" + return getinnerframes(sys.exc_traceback, context) #@UndefinedVariable diff --git a/ptvsd/pydevd/_pydev_imps/_pydev_pkgutil_old.py b/ptvsd/pydevd/_pydev_imps/_pydev_pkgutil_old.py new file mode 100644 index 00000000..ce072ec9 --- /dev/null +++ b/ptvsd/pydevd/_pydev_imps/_pydev_pkgutil_old.py @@ -0,0 +1,591 @@ +"""Utilities to support packages.""" + +# NOTE: This module must remain compatible with Python 2.3, as it is shared +# by setuptools for distribution with Python 2.3 and up. + +import os +import sys +import imp +import os.path +from types import ModuleType + +__all__ = [ + 'get_importer', 'iter_importers', 'get_loader', 'find_loader', + 'walk_packages', 'iter_modules', 'get_data', + 'ImpImporter', 'ImpLoader', 'read_code', 'extend_path', +] + +def read_code(stream): + # This helper is needed in order for the PEP 302 emulation to + # correctly handle compiled files + import marshal + + magic = stream.read(4) + if magic != imp.get_magic(): + return None + + stream.read(4) # Skip timestamp + return marshal.load(stream) + + +def simplegeneric(func): + """Make a trivial single-dispatch generic function""" + registry = {} + def wrapper(*args, **kw): + ob = args[0] + try: + cls = ob.__class__ + except AttributeError: + cls = type(ob) + try: + mro = cls.__mro__ + except AttributeError: + try: + class cls(cls, object): + pass + mro = cls.__mro__[1:] + except TypeError: + mro = object, # must be an ExtensionClass or some such :( + for t in mro: + if t in registry: + return registry[t](*args, **kw) + else: + return func(*args, **kw) + try: + wrapper.__name__ = func.__name__ + except (TypeError, AttributeError): + pass # Python 2.3 doesn't allow functions to be renamed + + def register(typ, func=None): + if func is None: + return lambda f: register(typ, f) + registry[typ] = func + return func + + wrapper.__dict__ = func.__dict__ + wrapper.__doc__ = func.__doc__ + wrapper.register = register + return wrapper + + +def walk_packages(path=None, prefix='', onerror=None): + """Yields (module_loader, name, ispkg) for all modules recursively + on path, or, if path is None, all accessible modules. + + 'path' should be either None or a list of paths to look for + modules in. + + 'prefix' is a string to output on the front of every module name + on output. + + Note that this function must import all *packages* (NOT all + modules!) on the given path, in order to access the __path__ + attribute to find submodules. + + 'onerror' is a function which gets called with one argument (the + name of the package which was being imported) if any exception + occurs while trying to import a package. If no onerror function is + supplied, ImportErrors are caught and ignored, while all other + exceptions are propagated, terminating the search. + + Examples: + + # list all modules python can access + walk_packages() + + # list all submodules of ctypes + walk_packages(ctypes.__path__, ctypes.__name__+'.') + """ + + def seen(p, m={}): + if p in m: + return True + m[p] = True + + for importer, name, ispkg in iter_modules(path, prefix): + yield importer, name, ispkg + + if ispkg: + try: + __import__(name) + except ImportError: + if onerror is not None: + onerror(name) + except Exception: + if onerror is not None: + onerror(name) + else: + raise + else: + path = getattr(sys.modules[name], '__path__', None) or [] + + # don't traverse path items we've seen before + path = [p for p in path if not seen(p)] + + for item in walk_packages(path, name+'.', onerror): + yield item + + +def iter_modules(path=None, prefix=''): + """Yields (module_loader, name, ispkg) for all submodules on path, + or, if path is None, all top-level modules on sys.path. + + 'path' should be either None or a list of paths to look for + modules in. + + 'prefix' is a string to output on the front of every module name + on output. + """ + + if path is None: + importers = iter_importers() + else: + importers = map(get_importer, path) + + yielded = {} + for i in importers: + for name, ispkg in iter_importer_modules(i, prefix): + if name not in yielded: + yielded[name] = 1 + yield i, name, ispkg + + +#@simplegeneric +def iter_importer_modules(importer, prefix=''): + if not hasattr(importer, 'iter_modules'): + return [] + return importer.iter_modules(prefix) + +iter_importer_modules = simplegeneric(iter_importer_modules) + + +class ImpImporter: + """PEP 302 Importer that wraps Python's "classic" import algorithm + + ImpImporter(dirname) produces a PEP 302 importer that searches that + directory. ImpImporter(None) produces a PEP 302 importer that searches + the current sys.path, plus any modules that are frozen or built-in. + + Note that ImpImporter does not currently support being used by placement + on sys.meta_path. + """ + + def __init__(self, path=None): + self.path = path + + def find_module(self, fullname, path=None): + # Note: we ignore 'path' argument since it is only used via meta_path + subname = fullname.split(".")[-1] + if subname != fullname and self.path is None: + return None + if self.path is None: + path = None + else: + path = [os.path.realpath(self.path)] + try: + file, filename, etc = imp.find_module(subname, path) + except ImportError: + return None + return ImpLoader(fullname, file, filename, etc) + + def iter_modules(self, prefix=''): + if self.path is None or not os.path.isdir(self.path): + return + + yielded = {} + import inspect + try: + filenames = os.listdir(self.path) + except OSError: + # ignore unreadable directories like import does + filenames = [] + filenames.sort() # handle packages before same-named modules + + for fn in filenames: + modname = inspect.getmodulename(fn) + if modname=='__init__' or modname in yielded: + continue + + path = os.path.join(self.path, fn) + ispkg = False + + if not modname and os.path.isdir(path) and '.' not in fn: + modname = fn + try: + dircontents = os.listdir(path) + except OSError: + # ignore unreadable directories like import does + dircontents = [] + for fn in dircontents: + subname = inspect.getmodulename(fn) + if subname=='__init__': + ispkg = True + break + else: + continue # not a package + + if modname and '.' not in modname: + yielded[modname] = 1 + yield prefix + modname, ispkg + + +class ImpLoader: + """PEP 302 Loader that wraps Python's "classic" import algorithm + """ + code = source = None + + def __init__(self, fullname, file, filename, etc): + self.file = file + self.filename = filename + self.fullname = fullname + self.etc = etc + + def load_module(self, fullname): + self._reopen() + try: + mod = imp.load_module(fullname, self.file, self.filename, self.etc) + finally: + if self.file: + self.file.close() + # Note: we don't set __loader__ because we want the module to look + # normal; i.e. this is just a wrapper for standard import machinery + return mod + + def get_data(self, pathname): + return open(pathname, "rb").read() + + def _reopen(self): + if self.file and self.file.closed: + mod_type = self.etc[2] + if mod_type==imp.PY_SOURCE: + self.file = open(self.filename, 'rU') + elif mod_type in (imp.PY_COMPILED, imp.C_EXTENSION): + self.file = open(self.filename, 'rb') + + def _fix_name(self, fullname): + if fullname is None: + fullname = self.fullname + elif fullname != self.fullname: + raise ImportError("Loader for module %s cannot handle " + "module %s" % (self.fullname, fullname)) + return fullname + + def is_package(self, fullname): + fullname = self._fix_name(fullname) + return self.etc[2]==imp.PKG_DIRECTORY + + def get_code(self, fullname=None): + fullname = self._fix_name(fullname) + if self.code is None: + mod_type = self.etc[2] + if mod_type==imp.PY_SOURCE: + source = self.get_source(fullname) + self.code = compile(source, self.filename, 'exec') + elif mod_type==imp.PY_COMPILED: + self._reopen() + try: + self.code = read_code(self.file) + finally: + self.file.close() + elif mod_type==imp.PKG_DIRECTORY: + self.code = self._get_delegate().get_code() + return self.code + + def get_source(self, fullname=None): + fullname = self._fix_name(fullname) + if self.source is None: + mod_type = self.etc[2] + if mod_type==imp.PY_SOURCE: + self._reopen() + try: + self.source = self.file.read() + finally: + self.file.close() + elif mod_type==imp.PY_COMPILED: + if os.path.exists(self.filename[:-1]): + f = open(self.filename[:-1], 'rU') + self.source = f.read() + f.close() + elif mod_type==imp.PKG_DIRECTORY: + self.source = self._get_delegate().get_source() + return self.source + + + def _get_delegate(self): + return ImpImporter(self.filename).find_module('__init__') + + def get_filename(self, fullname=None): + fullname = self._fix_name(fullname) + mod_type = self.etc[2] + if self.etc[2]==imp.PKG_DIRECTORY: + return self._get_delegate().get_filename() + elif self.etc[2] in (imp.PY_SOURCE, imp.PY_COMPILED, imp.C_EXTENSION): + return self.filename + return None + + +try: + import zipimport + from zipimport import zipimporter + + def iter_zipimport_modules(importer, prefix=''): + dirlist = zipimport._zip_directory_cache[importer.archive].keys() + dirlist.sort() + _prefix = importer.prefix + plen = len(_prefix) + yielded = {} + import inspect + for fn in dirlist: + if not fn.startswith(_prefix): + continue + + fn = fn[plen:].split(os.sep) + + if len(fn)==2 and fn[1].startswith('__init__.py'): + if fn[0] not in yielded: + yielded[fn[0]] = 1 + yield fn[0], True + + if len(fn)!=1: + continue + + modname = inspect.getmodulename(fn[0]) + if modname=='__init__': + continue + + if modname and '.' not in modname and modname not in yielded: + yielded[modname] = 1 + yield prefix + modname, False + + iter_importer_modules.register(zipimporter, iter_zipimport_modules) + +except ImportError: + pass + + +def get_importer(path_item): + """Retrieve a PEP 302 importer for the given path item + + The returned importer is cached in sys.path_importer_cache + if it was newly created by a path hook. + + If there is no importer, a wrapper around the basic import + machinery is returned. This wrapper is never inserted into + the importer cache (None is inserted instead). + + The cache (or part of it) can be cleared manually if a + rescan of sys.path_hooks is necessary. + """ + try: + importer = sys.path_importer_cache[path_item] + except KeyError: + for path_hook in sys.path_hooks: + try: + importer = path_hook(path_item) + break + except ImportError: + pass + else: + importer = None + sys.path_importer_cache.setdefault(path_item, importer) + + if importer is None: + try: + importer = ImpImporter(path_item) + except ImportError: + importer = None + return importer + + +def iter_importers(fullname=""): + """Yield PEP 302 importers for the given module name + + If fullname contains a '.', the importers will be for the package + containing fullname, otherwise they will be importers for sys.meta_path, + sys.path, and Python's "classic" import machinery, in that order. If + the named module is in a package, that package is imported as a side + effect of invoking this function. + + Non PEP 302 mechanisms (e.g. the Windows registry) used by the + standard import machinery to find files in alternative locations + are partially supported, but are searched AFTER sys.path. Normally, + these locations are searched BEFORE sys.path, preventing sys.path + entries from shadowing them. + + For this to cause a visible difference in behaviour, there must + be a module or package name that is accessible via both sys.path + and one of the non PEP 302 file system mechanisms. In this case, + the emulation will find the former version, while the builtin + import mechanism will find the latter. + + Items of the following types can be affected by this discrepancy: + imp.C_EXTENSION, imp.PY_SOURCE, imp.PY_COMPILED, imp.PKG_DIRECTORY + """ + if fullname.startswith('.'): + raise ImportError("Relative module names not supported") + if '.' in fullname: + # Get the containing package's __path__ + pkg = '.'.join(fullname.split('.')[:-1]) + if pkg not in sys.modules: + __import__(pkg) + path = getattr(sys.modules[pkg], '__path__', None) or [] + else: + for importer in sys.meta_path: + yield importer + path = sys.path + for item in path: + yield get_importer(item) + if '.' not in fullname: + yield ImpImporter() + +def get_loader(module_or_name): + """Get a PEP 302 "loader" object for module_or_name + + If the module or package is accessible via the normal import + mechanism, a wrapper around the relevant part of that machinery + is returned. Returns None if the module cannot be found or imported. + If the named module is not already imported, its containing package + (if any) is imported, in order to establish the package __path__. + + This function uses iter_importers(), and is thus subject to the same + limitations regarding platform-specific special import locations such + as the Windows registry. + """ + if module_or_name in sys.modules: + module_or_name = sys.modules[module_or_name] + if isinstance(module_or_name, ModuleType): + module = module_or_name + loader = getattr(module, '__loader__', None) + if loader is not None: + return loader + fullname = module.__name__ + else: + fullname = module_or_name + return find_loader(fullname) + +def find_loader(fullname): + """Find a PEP 302 "loader" object for fullname + + If fullname contains dots, path must be the containing package's __path__. + Returns None if the module cannot be found or imported. This function uses + iter_importers(), and is thus subject to the same limitations regarding + platform-specific special import locations such as the Windows registry. + """ + for importer in iter_importers(fullname): + loader = importer.find_module(fullname) + if loader is not None: + return loader + + return None + + +def extend_path(path, name): + """Extend a package's path. + + Intended use is to place the following code in a package's __init__.py: + + from pkgutil import extend_path + __path__ = extend_path(__path__, __name__) + + This will add to the package's __path__ all subdirectories of + directories on sys.path named after the package. This is useful + if one wants to distribute different parts of a single logical + package as multiple directories. + + It also looks for *.pkg files beginning where * matches the name + argument. This feature is similar to *.pth files (see site.py), + except that it doesn't special-case lines starting with 'import'. + A *.pkg file is trusted at face value: apart from checking for + duplicates, all entries found in a *.pkg file are added to the + path, regardless of whether they are exist the filesystem. (This + is a feature.) + + If the input path is not a list (as is the case for frozen + packages) it is returned unchanged. The input path is not + modified; an extended copy is returned. Items are only appended + to the copy at the end. + + It is assumed that sys.path is a sequence. Items of sys.path that + are not (unicode or 8-bit) strings referring to existing + directories are ignored. Unicode items of sys.path that cause + errors when used as filenames may cause this function to raise an + exception (in line with os.path.isdir() behavior). + """ + + if not isinstance(path, list): + # This could happen e.g. when this is called from inside a + # frozen package. Return the path unchanged in that case. + return path + + pname = os.path.join(*name.split('.')) # Reconstitute as relative path + # Just in case os.extsep != '.' + sname = os.extsep.join(name.split('.')) + sname_pkg = sname + os.extsep + "pkg" + init_py = "__init__" + os.extsep + "py" + + path = path[:] # Start with a copy of the existing path + + for dir in sys.path: + if not isinstance(dir, basestring) or not os.path.isdir(dir): + continue + subdir = os.path.join(dir, pname) + # XXX This may still add duplicate entries to path on + # case-insensitive filesystems + initfile = os.path.join(subdir, init_py) + if subdir not in path and os.path.isfile(initfile): + path.append(subdir) + # XXX Is this the right thing for subpackages like zope.app? + # It looks for a file named "zope.app.pkg" + pkgfile = os.path.join(dir, sname_pkg) + if os.path.isfile(pkgfile): + try: + f = open(pkgfile) + except IOError, msg: + sys.stderr.write("Can't open %s: %s\n" % + (pkgfile, msg)) + else: + for line in f: + line = line.rstrip('\n') + if not line or line.startswith('#'): + continue + path.append(line) # Don't check for existence! + f.close() + + return path + +def get_data(package, resource): + """Get a resource from a package. + + This is a wrapper round the PEP 302 loader get_data API. The package + argument should be the name of a package, in standard module format + (foo.bar). The resource argument should be in the form of a relative + filename, using '/' as the path separator. The parent directory name '..' + is not allowed, and nor is a rooted name (starting with a '/'). + + The function returns a binary string, which is the contents of the + specified resource. + + For packages located in the filesystem, which have already been imported, + this is the rough equivalent of + + d = os.path.dirname(sys.modules[package].__file__) + data = open(os.path.join(d, resource), 'rb').read() + + If the package cannot be located or loaded, or it uses a PEP 302 loader + which does not support get_data(), then None is returned. + """ + + loader = get_loader(package) + if loader is None or not hasattr(loader, 'get_data'): + return None + mod = sys.modules.get(package) or loader.load_module(package) + if mod is None or not hasattr(mod, '__file__'): + return None + + # Modify the resource name to be compatible with the loader.get_data + # signature - an os.path format "filename" starting with the dirname of + # the package's __file__ + parts = resource.split('/') + parts.insert(0, os.path.dirname(mod.__file__)) + resource_name = os.path.join(*parts) + return loader.get_data(resource_name) diff --git a/ptvsd/pydevd/_pydev_imps/_pydev_saved_modules.py b/ptvsd/pydevd/_pydev_imps/_pydev_saved_modules.py new file mode 100644 index 00000000..6ff3939d --- /dev/null +++ b/ptvsd/pydevd/_pydev_imps/_pydev_saved_modules.py @@ -0,0 +1,23 @@ +import sys +IS_PY2 = sys.version_info < (3,) + +import threading + +import time + +import socket + +import select + +if IS_PY2: + import thread + import Queue as _queue + import xmlrpclib + import SimpleXMLRPCServer as _pydev_SimpleXMLRPCServer + import BaseHTTPServer +else: + import _thread as thread + import queue as _queue + import xmlrpc.client as xmlrpclib + import xmlrpc.server as _pydev_SimpleXMLRPCServer + import http.server as BaseHTTPServer \ No newline at end of file diff --git a/ptvsd/pydevd/_pydev_imps/_pydev_sys_patch.py b/ptvsd/pydevd/_pydev_imps/_pydev_sys_patch.py new file mode 100644 index 00000000..0220ad0d --- /dev/null +++ b/ptvsd/pydevd/_pydev_imps/_pydev_sys_patch.py @@ -0,0 +1,75 @@ + +import sys + + +def patch_sys_module(): + def patched_exc_info(fun): + def pydev_debugger_exc_info(): + type, value, traceback = fun() + if type == ImportError: + #we should not show frame added by plugin_import call + if traceback and hasattr(traceback, "tb_next"): + return type, value, traceback.tb_next + return type, value, traceback + return pydev_debugger_exc_info + + system_exc_info = sys.exc_info + sys.exc_info = patched_exc_info(system_exc_info) + if not hasattr(sys, "system_exc_info"): + sys.system_exc_info = system_exc_info + + +def patched_reload(orig_reload): + def pydev_debugger_reload(module): + orig_reload(module) + if module.__name__ == "sys": + # if sys module was reloaded we should patch it again + patch_sys_module() + return pydev_debugger_reload + + +def patch_reload(): + if sys.version_info[0] >= 3: + import builtins # Py3 + else: + import __builtin__ as builtins + + if hasattr(builtins, "reload"): + sys.builtin_orig_reload = builtins.reload + builtins.reload = patched_reload(sys.builtin_orig_reload) # @UndefinedVariable + try: + import imp + sys.imp_orig_reload = imp.reload + imp.reload = patched_reload(sys.imp_orig_reload) # @UndefinedVariable + except: + pass + else: + try: + import importlib + sys.importlib_orig_reload = importlib.reload # @UndefinedVariable + importlib.reload = patched_reload(sys.importlib_orig_reload) # @UndefinedVariable + except: + pass + + del builtins + + +def cancel_patches_in_sys_module(): + sys.exc_info = sys.system_exc_info # @UndefinedVariable + if sys.version_info[0] >= 3: + import builtins # Py3 + else: + import __builtin__ as builtins + + if hasattr(sys, "builtin_orig_reload"): + builtins.reload = sys.builtin_orig_reload + + if hasattr(sys, "imp_orig_reload"): + import imp + imp.reload = sys.imp_orig_reload + + if hasattr(sys, "importlib_orig_reload"): + import importlib + importlib.reload = sys.importlib_orig_reload + + del builtins diff --git a/ptvsd/pydevd/_pydev_imps/_pydev_uuid_old.py b/ptvsd/pydevd/_pydev_imps/_pydev_uuid_old.py new file mode 100644 index 00000000..20bc43b7 --- /dev/null +++ b/ptvsd/pydevd/_pydev_imps/_pydev_uuid_old.py @@ -0,0 +1,541 @@ +r"""UUID objects (universally unique identifiers) according to RFC 4122. + +This module provides immutable UUID objects (class UUID) and the functions +uuid1(), uuid3(), uuid4(), uuid5() for generating version 1, 3, 4, and 5 +UUIDs as specified in RFC 4122. + +If all you want is a unique ID, you should probably call uuid1() or uuid4(). +Note that uuid1() may compromise privacy since it creates a UUID containing +the computer's network address. uuid4() creates a random UUID. + +Typical usage: + + >>> import uuid + + # make a UUID based on the host ID and current time + >>> uuid.uuid1() + UUID('a8098c1a-f86e-11da-bd1a-00112444be1e') + + # make a UUID using an MD5 hash of a namespace UUID and a name + >>> uuid.uuid3(uuid.NAMESPACE_DNS, 'python.org') + UUID('6fa459ea-ee8a-3ca4-894e-db77e160355e') + + # make a random UUID + >>> uuid.uuid4() + UUID('16fd2706-8baf-433b-82eb-8c7fada847da') + + # make a UUID using a SHA-1 hash of a namespace UUID and a name + >>> uuid.uuid5(uuid.NAMESPACE_DNS, 'python.org') + UUID('886313e1-3b8a-5372-9b90-0c9aee199e5d') + + # make a UUID from a string of hex digits (braces and hyphens ignored) + >>> x = uuid.UUID('{00010203-0405-0607-0809-0a0b0c0d0e0f}') + + # convert a UUID to a string of hex digits in standard form + >>> str(x) + '00010203-0405-0607-0809-0a0b0c0d0e0f' + + # get the raw 16 bytes of the UUID + >>> x.bytes + '\x00\x01\x02\x03\x04\x05\x06\x07\x08\t\n\x0b\x0c\r\x0e\x0f' + + # make a UUID from a 16-byte string + >>> uuid.UUID(bytes=x.bytes) + UUID('00010203-0405-0607-0809-0a0b0c0d0e0f') +""" + +__author__ = 'Ka-Ping Yee ' + +RESERVED_NCS, RFC_4122, RESERVED_MICROSOFT, RESERVED_FUTURE = [ + 'reserved for NCS compatibility', 'specified in RFC 4122', + 'reserved for Microsoft compatibility', 'reserved for future definition'] + +class UUID(object): + """Instances of the UUID class represent UUIDs as specified in RFC 4122. + UUID objects are immutable, hashable, and usable as dictionary keys. + Converting a UUID to a string with str() yields something in the form + '12345678-1234-1234-1234-123456789abc'. The UUID constructor accepts + five possible forms: a similar string of hexadecimal digits, or a tuple + of six integer fields (with 32-bit, 16-bit, 16-bit, 8-bit, 8-bit, and + 48-bit values respectively) as an argument named 'fields', or a string + of 16 bytes (with all the integer fields in big-endian order) as an + argument named 'bytes', or a string of 16 bytes (with the first three + fields in little-endian order) as an argument named 'bytes_le', or a + single 128-bit integer as an argument named 'int'. + + UUIDs have these read-only attributes: + + bytes the UUID as a 16-byte string (containing the six + integer fields in big-endian byte order) + + bytes_le the UUID as a 16-byte string (with time_low, time_mid, + and time_hi_version in little-endian byte order) + + fields a tuple of the six integer fields of the UUID, + which are also available as six individual attributes + and two derived attributes: + + time_low the first 32 bits of the UUID + time_mid the next 16 bits of the UUID + time_hi_version the next 16 bits of the UUID + clock_seq_hi_variant the next 8 bits of the UUID + clock_seq_low the next 8 bits of the UUID + node the last 48 bits of the UUID + + time the 60-bit timestamp + clock_seq the 14-bit sequence number + + hex the UUID as a 32-character hexadecimal string + + int the UUID as a 128-bit integer + + urn the UUID as a URN as specified in RFC 4122 + + variant the UUID variant (one of the constants RESERVED_NCS, + RFC_4122, RESERVED_MICROSOFT, or RESERVED_FUTURE) + + version the UUID version number (1 through 5, meaningful only + when the variant is RFC_4122) + """ + + def __init__(self, hex=None, bytes=None, bytes_le=None, fields=None, + int=None, version=None): + r"""Create a UUID from either a string of 32 hexadecimal digits, + a string of 16 bytes as the 'bytes' argument, a string of 16 bytes + in little-endian order as the 'bytes_le' argument, a tuple of six + integers (32-bit time_low, 16-bit time_mid, 16-bit time_hi_version, + 8-bit clock_seq_hi_variant, 8-bit clock_seq_low, 48-bit node) as + the 'fields' argument, or a single 128-bit integer as the 'int' + argument. When a string of hex digits is given, curly braces, + hyphens, and a URN prefix are all optional. For example, these + expressions all yield the same UUID: + + UUID('{12345678-1234-5678-1234-567812345678}') + UUID('12345678123456781234567812345678') + UUID('urn:uuid:12345678-1234-5678-1234-567812345678') + UUID(bytes='\x12\x34\x56\x78'*4) + UUID(bytes_le='\x78\x56\x34\x12\x34\x12\x78\x56' + + '\x12\x34\x56\x78\x12\x34\x56\x78') + UUID(fields=(0x12345678, 0x1234, 0x5678, 0x12, 0x34, 0x567812345678)) + UUID(int=0x12345678123456781234567812345678) + + Exactly one of 'hex', 'bytes', 'bytes_le', 'fields', or 'int' must + be given. The 'version' argument is optional; if given, the resulting + UUID will have its variant and version set according to RFC 4122, + overriding the given 'hex', 'bytes', 'bytes_le', 'fields', or 'int'. + """ + + if [hex, bytes, bytes_le, fields, int].count(None) != 4: + raise TypeError('need one of hex, bytes, bytes_le, fields, or int') + if hex is not None: + hex = hex.replace('urn:', '').replace('uuid:', '') + hex = hex.strip('{}').replace('-', '') + if len(hex) != 32: + raise ValueError('badly formed hexadecimal UUID string') + int = long(hex, 16) + if bytes_le is not None: + if len(bytes_le) != 16: + raise ValueError('bytes_le is not a 16-char string') + bytes = (bytes_le[3] + bytes_le[2] + bytes_le[1] + bytes_le[0] + + bytes_le[5] + bytes_le[4] + bytes_le[7] + bytes_le[6] + + bytes_le[8:]) + if bytes is not None: + if len(bytes) != 16: + raise ValueError('bytes is not a 16-char string') + int = long(('%02x'*16) % tuple(map(ord, bytes)), 16) + if fields is not None: + if len(fields) != 6: + raise ValueError('fields is not a 6-tuple') + (time_low, time_mid, time_hi_version, + clock_seq_hi_variant, clock_seq_low, node) = fields + if not 0 <= time_low < 1<<32L: + raise ValueError('field 1 out of range (need a 32-bit value)') + if not 0 <= time_mid < 1<<16L: + raise ValueError('field 2 out of range (need a 16-bit value)') + if not 0 <= time_hi_version < 1<<16L: + raise ValueError('field 3 out of range (need a 16-bit value)') + if not 0 <= clock_seq_hi_variant < 1<<8L: + raise ValueError('field 4 out of range (need an 8-bit value)') + if not 0 <= clock_seq_low < 1<<8L: + raise ValueError('field 5 out of range (need an 8-bit value)') + if not 0 <= node < 1<<48L: + raise ValueError('field 6 out of range (need a 48-bit value)') + clock_seq = (clock_seq_hi_variant << 8L) | clock_seq_low + int = ((time_low << 96L) | (time_mid << 80L) | + (time_hi_version << 64L) | (clock_seq << 48L) | node) + if int is not None: + if not 0 <= int < 1<<128L: + raise ValueError('int is out of range (need a 128-bit value)') + if version is not None: + if not 1 <= version <= 5: + raise ValueError('illegal version number') + # Set the variant to RFC 4122. + int &= ~(0xc000 << 48L) + int |= 0x8000 << 48L + # Set the version number. + int &= ~(0xf000 << 64L) + int |= version << 76L + self.__dict__['int'] = int + + def __cmp__(self, other): + if isinstance(other, UUID): + return cmp(self.int, other.int) + return NotImplemented + + def __hash__(self): + return hash(self.int) + + def __int__(self): + return self.int + + def __repr__(self): + return 'UUID(%r)' % str(self) + + def __setattr__(self, name, value): + raise TypeError('UUID objects are immutable') + + def __str__(self): + hex = '%032x' % self.int + return '%s-%s-%s-%s-%s' % ( + hex[:8], hex[8:12], hex[12:16], hex[16:20], hex[20:]) + + def get_bytes(self): + bytes = '' + for shift in range(0, 128, 8): + bytes = chr((self.int >> shift) & 0xff) + bytes + return bytes + + bytes = property(get_bytes) + + def get_bytes_le(self): + bytes = self.bytes + return (bytes[3] + bytes[2] + bytes[1] + bytes[0] + + bytes[5] + bytes[4] + bytes[7] + bytes[6] + bytes[8:]) + + bytes_le = property(get_bytes_le) + + def get_fields(self): + return (self.time_low, self.time_mid, self.time_hi_version, + self.clock_seq_hi_variant, self.clock_seq_low, self.node) + + fields = property(get_fields) + + def get_time_low(self): + return self.int >> 96L + + time_low = property(get_time_low) + + def get_time_mid(self): + return (self.int >> 80L) & 0xffff + + time_mid = property(get_time_mid) + + def get_time_hi_version(self): + return (self.int >> 64L) & 0xffff + + time_hi_version = property(get_time_hi_version) + + def get_clock_seq_hi_variant(self): + return (self.int >> 56L) & 0xff + + clock_seq_hi_variant = property(get_clock_seq_hi_variant) + + def get_clock_seq_low(self): + return (self.int >> 48L) & 0xff + + clock_seq_low = property(get_clock_seq_low) + + def get_time(self): + return (((self.time_hi_version & 0x0fffL) << 48L) | + (self.time_mid << 32L) | self.time_low) + + time = property(get_time) + + def get_clock_seq(self): + return (((self.clock_seq_hi_variant & 0x3fL) << 8L) | + self.clock_seq_low) + + clock_seq = property(get_clock_seq) + + def get_node(self): + return self.int & 0xffffffffffff + + node = property(get_node) + + def get_hex(self): + return '%032x' % self.int + + hex = property(get_hex) + + def get_urn(self): + return 'urn:uuid:' + str(self) + + urn = property(get_urn) + + def get_variant(self): + if not self.int & (0x8000 << 48L): + return RESERVED_NCS + elif not self.int & (0x4000 << 48L): + return RFC_4122 + elif not self.int & (0x2000 << 48L): + return RESERVED_MICROSOFT + else: + return RESERVED_FUTURE + + variant = property(get_variant) + + def get_version(self): + # The version bits are only meaningful for RFC 4122 UUIDs. + if self.variant == RFC_4122: + return int((self.int >> 76L) & 0xf) + + version = property(get_version) + +def _find_mac(command, args, hw_identifiers, get_index): + import os + for dir in ['', '/sbin/', '/usr/sbin']: + executable = os.path.join(dir, command) + if not os.path.exists(executable): + continue + + try: + # LC_ALL to get English output, 2>/dev/null to + # prevent output on stderr + cmd = 'LC_ALL=C %s %s 2>/dev/null' % (executable, args) + pipe = os.popen(cmd) + except IOError: + continue + + for line in pipe: + words = line.lower().split() + for i in range(len(words)): + if words[i] in hw_identifiers: + return int(words[get_index(i)].replace(':', ''), 16) + return None + +def _ifconfig_getnode(): + """Get the hardware address on Unix by running ifconfig.""" + + # This works on Linux ('' or '-a'), Tru64 ('-av'), but not all Unixes. + for args in ('', '-a', '-av'): + mac = _find_mac('ifconfig', args, ['hwaddr', 'ether'], lambda i: i+1) + if mac: + return mac + + import socket + ip_addr = socket.gethostbyname(socket.gethostname()) + + # Try getting the MAC addr from arp based on our IP address (Solaris). + mac = _find_mac('arp', '-an', [ip_addr], lambda i: -1) + if mac: + return mac + + # This might work on HP-UX. + mac = _find_mac('lanscan', '-ai', ['lan0'], lambda i: 0) + if mac: + return mac + + return None + +def _ipconfig_getnode(): + """Get the hardware address on Windows by running ipconfig.exe.""" + import os, re + dirs = ['', r'c:\windows\system32', r'c:\winnt\system32'] + try: + import ctypes + buffer = ctypes.create_string_buffer(300) + ctypes.windll.kernel32.GetSystemDirectoryA(buffer, 300) # @UndefinedVariable + dirs.insert(0, buffer.value.decode('mbcs')) + except: + pass + for dir in dirs: + try: + pipe = os.popen(os.path.join(dir, 'ipconfig') + ' /all') + except IOError: + continue + for line in pipe: + value = line.split(':')[-1].strip().lower() + if re.match('([0-9a-f][0-9a-f]-){5}[0-9a-f][0-9a-f]', value): + return int(value.replace('-', ''), 16) + +def _netbios_getnode(): + """Get the hardware address on Windows using NetBIOS calls. + See http://support.microsoft.com/kb/118623 for details.""" + import win32wnet, netbios + ncb = netbios.NCB() + ncb.Command = netbios.NCBENUM + ncb.Buffer = adapters = netbios.LANA_ENUM() + adapters._pack() + if win32wnet.Netbios(ncb) != 0: + return + adapters._unpack() + for i in range(adapters.length): + ncb.Reset() + ncb.Command = netbios.NCBRESET + ncb.Lana_num = ord(adapters.lana[i]) + if win32wnet.Netbios(ncb) != 0: + continue + ncb.Reset() + ncb.Command = netbios.NCBASTAT + ncb.Lana_num = ord(adapters.lana[i]) + ncb.Callname = '*'.ljust(16) + ncb.Buffer = status = netbios.ADAPTER_STATUS() + if win32wnet.Netbios(ncb) != 0: + continue + status._unpack() + bytes = map(ord, status.adapter_address) + return ((bytes[0]<<40L) + (bytes[1]<<32L) + (bytes[2]<<24L) + + (bytes[3]<<16L) + (bytes[4]<<8L) + bytes[5]) + +# Thanks to Thomas Heller for ctypes and for his help with its use here. + +# If ctypes is available, use it to find system routines for UUID generation. +_uuid_generate_random = _uuid_generate_time = _UuidCreate = None +try: + import ctypes, ctypes.util + _buffer = ctypes.create_string_buffer(16) + + # The uuid_generate_* routines are provided by libuuid on at least + # Linux and FreeBSD, and provided by libc on Mac OS X. + for libname in ['uuid', 'c']: + try: + lib = ctypes.CDLL(ctypes.util.find_library(libname)) + except: + continue + if hasattr(lib, 'uuid_generate_random'): + _uuid_generate_random = lib.uuid_generate_random + if hasattr(lib, 'uuid_generate_time'): + _uuid_generate_time = lib.uuid_generate_time + + # On Windows prior to 2000, UuidCreate gives a UUID containing the + # hardware address. On Windows 2000 and later, UuidCreate makes a + # random UUID and UuidCreateSequential gives a UUID containing the + # hardware address. These routines are provided by the RPC runtime. + # NOTE: at least on Tim's WinXP Pro SP2 desktop box, while the last + # 6 bytes returned by UuidCreateSequential are fixed, they don't appear + # to bear any relationship to the MAC address of any network device + # on the box. + try: + lib = ctypes.windll.rpcrt4 + except: + lib = None + _UuidCreate = getattr(lib, 'UuidCreateSequential', + getattr(lib, 'UuidCreate', None)) +except: + pass + +def _unixdll_getnode(): + """Get the hardware address on Unix using ctypes.""" + _uuid_generate_time(_buffer) + return UUID(bytes=_buffer.raw).node + +def _windll_getnode(): + """Get the hardware address on Windows using ctypes.""" + if _UuidCreate(_buffer) == 0: + return UUID(bytes=_buffer.raw).node + +def _random_getnode(): + """Get a random node ID, with eighth bit set as suggested by RFC 4122.""" + import random + return random.randrange(0, 1<<48L) | 0x010000000000L + +_node = None + +def getnode(): + """Get the hardware address as a 48-bit positive integer. + + The first time this runs, it may launch a separate program, which could + be quite slow. If all attempts to obtain the hardware address fail, we + choose a random 48-bit number with its eighth bit set to 1 as recommended + in RFC 4122. + """ + + global _node + if _node is not None: + return _node + + import sys + if sys.platform == 'win32': + getters = [_windll_getnode, _netbios_getnode, _ipconfig_getnode] + else: + getters = [_unixdll_getnode, _ifconfig_getnode] + + for getter in getters + [_random_getnode]: + try: + _node = getter() + except: + continue + if _node is not None: + return _node + +_last_timestamp = None + +def uuid1(node=None, clock_seq=None): + """Generate a UUID from a host ID, sequence number, and the current time. + If 'node' is not given, getnode() is used to obtain the hardware + address. If 'clock_seq' is given, it is used as the sequence number; + otherwise a random 14-bit sequence number is chosen.""" + + # When the system provides a version-1 UUID generator, use it (but don't + # use UuidCreate here because its UUIDs don't conform to RFC 4122). + if _uuid_generate_time and node is clock_seq is None: + _uuid_generate_time(_buffer) + return UUID(bytes=_buffer.raw) + + global _last_timestamp + import time + nanoseconds = int(time.time() * 1e9) + # 0x01b21dd213814000 is the number of 100-ns intervals between the + # UUID epoch 1582-10-15 00:00:00 and the Unix epoch 1970-01-01 00:00:00. + timestamp = int(nanoseconds/100) + 0x01b21dd213814000L + if timestamp <= _last_timestamp: + timestamp = _last_timestamp + 1 + _last_timestamp = timestamp + if clock_seq is None: + import random + clock_seq = random.randrange(1<<14L) # instead of stable storage + time_low = timestamp & 0xffffffffL + time_mid = (timestamp >> 32L) & 0xffffL + time_hi_version = (timestamp >> 48L) & 0x0fffL + clock_seq_low = clock_seq & 0xffL + clock_seq_hi_variant = (clock_seq >> 8L) & 0x3fL + if node is None: + node = getnode() + return UUID(fields=(time_low, time_mid, time_hi_version, + clock_seq_hi_variant, clock_seq_low, node), version=1) + +def uuid3(namespace, name): + """Generate a UUID from the MD5 hash of a namespace UUID and a name.""" + import md5 + hash = md5.md5(namespace.bytes + name).digest() + return UUID(bytes=hash[:16], version=3) + +def uuid4(): + """Generate a random UUID.""" + + # When the system provides a version-4 UUID generator, use it. + if _uuid_generate_random: + _uuid_generate_random(_buffer) + return UUID(bytes=_buffer.raw) + + # Otherwise, get randomness from urandom or the 'random' module. + try: + import os + return UUID(bytes=os.urandom(16), version=4) + except: + import random + bytes = [chr(random.randrange(256)) for i in range(16)] + return UUID(bytes=bytes, version=4) + +def uuid5(namespace, name): + """Generate a UUID from the SHA-1 hash of a namespace UUID and a name.""" + import sha + hash = sha.sha(namespace.bytes + name).digest() + return UUID(bytes=hash[:16], version=5) + +# The following standard UUIDs are for use with uuid3() or uuid5(). + +NAMESPACE_DNS = UUID('6ba7b810-9dad-11d1-80b4-00c04fd430c8') +NAMESPACE_URL = UUID('6ba7b811-9dad-11d1-80b4-00c04fd430c8') +NAMESPACE_OID = UUID('6ba7b812-9dad-11d1-80b4-00c04fd430c8') +NAMESPACE_X500 = UUID('6ba7b814-9dad-11d1-80b4-00c04fd430c8') diff --git a/ptvsd/pydevd/_pydev_imps/_pydev_xmlrpclib.py b/ptvsd/pydevd/_pydev_imps/_pydev_xmlrpclib.py new file mode 100644 index 00000000..5f6e2b7f --- /dev/null +++ b/ptvsd/pydevd/_pydev_imps/_pydev_xmlrpclib.py @@ -0,0 +1,1493 @@ +#Just a copy of the version in python 2.5 to be used if it's not available in jython 2.1 +import sys + +# +# XML-RPC CLIENT LIBRARY +# +# an XML-RPC client interface for Python. +# +# the marshalling and response parser code can also be used to +# implement XML-RPC servers. +# +# Notes: +# this version is designed to work with Python 2.1 or newer. +# +# History: +# 1999-01-14 fl Created +# 1999-01-15 fl Changed dateTime to use localtime +# 1999-01-16 fl Added Binary/base64 element, default to RPC2 service +# 1999-01-19 fl Fixed array data element (from Skip Montanaro) +# 1999-01-21 fl Fixed dateTime constructor, etc. +# 1999-02-02 fl Added fault handling, handle empty sequences, etc. +# 1999-02-10 fl Fixed problem with empty responses (from Skip Montanaro) +# 1999-06-20 fl Speed improvements, pluggable parsers/transports (0.9.8) +# 2000-11-28 fl Changed boolean to check the truth value of its argument +# 2001-02-24 fl Added encoding/Unicode/SafeTransport patches +# 2001-02-26 fl Added compare support to wrappers (0.9.9/1.0b1) +# 2001-03-28 fl Make sure response tuple is a singleton +# 2001-03-29 fl Don't require empty params element (from Nicholas Riley) +# 2001-06-10 fl Folded in _xmlrpclib accelerator support (1.0b2) +# 2001-08-20 fl Base xmlrpclib.Error on built-in Exception (from Paul Prescod) +# 2001-09-03 fl Allow Transport subclass to override getparser +# 2001-09-10 fl Lazy import of urllib, cgi, xmllib (20x import speedup) +# 2001-10-01 fl Remove containers from memo cache when done with them +# 2001-10-01 fl Use faster escape method (80% dumps speedup) +# 2001-10-02 fl More dumps microtuning +# 2001-10-04 fl Make sure import expat gets a parser (from Guido van Rossum) +# 2001-10-10 sm Allow long ints to be passed as ints if they don't overflow +# 2001-10-17 sm Test for int and long overflow (allows use on 64-bit systems) +# 2001-11-12 fl Use repr() to marshal doubles (from Paul Felix) +# 2002-03-17 fl Avoid buffered read when possible (from James Rucker) +# 2002-04-07 fl Added pythondoc comments +# 2002-04-16 fl Added __str__ methods to datetime/binary wrappers +# 2002-05-15 fl Added error constants (from Andrew Kuchling) +# 2002-06-27 fl Merged with Python CVS version +# 2002-10-22 fl Added basic authentication (based on code from Phillip Eby) +# 2003-01-22 sm Add support for the bool type +# 2003-02-27 gvr Remove apply calls +# 2003-04-24 sm Use cStringIO if available +# 2003-04-25 ak Add support for nil +# 2003-06-15 gn Add support for time.struct_time +# 2003-07-12 gp Correct marshalling of Faults +# 2003-10-31 mvl Add multicall support +# 2004-08-20 mvl Bump minimum supported Python version to 2.1 +# +# Copyright (c) 1999-2002 by Secret Labs AB. +# Copyright (c) 1999-2002 by Fredrik Lundh. +# +# info@pythonware.com +# http://www.pythonware.com +# +# -------------------------------------------------------------------- +# The XML-RPC client interface is +# +# Copyright (c) 1999-2002 by Secret Labs AB +# Copyright (c) 1999-2002 by Fredrik Lundh +# +# By obtaining, using, and/or copying this software and/or its +# associated documentation, you agree that you have read, understood, +# and will comply with the following terms and conditions: +# +# Permission to use, copy, modify, and distribute this software and +# its associated documentation for any purpose and without fee is +# hereby granted, provided that the above copyright notice appears in +# all copies, and that both that copyright notice and this permission +# notice appear in supporting documentation, and that the name of +# Secret Labs AB or the author not be used in advertising or publicity +# pertaining to distribution of the software without specific, written +# prior permission. +# +# SECRET LABS AB AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD +# TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANT- +# ABILITY AND FITNESS. IN NO EVENT SHALL SECRET LABS AB OR THE AUTHOR +# BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY +# DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, +# WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS +# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE +# OF THIS SOFTWARE. +# -------------------------------------------------------------------- + +# +# things to look into some day: + +# TODO: sort out True/False/boolean issues for Python 2.3 + +""" +An XML-RPC client interface for Python. + +The marshalling and response parser code can also be used to +implement XML-RPC servers. + +Exported exceptions: + + Error Base class for client errors + ProtocolError Indicates an HTTP protocol error + ResponseError Indicates a broken response package + Fault Indicates an XML-RPC fault package + +Exported classes: + + ServerProxy Represents a logical connection to an XML-RPC server + + MultiCall Executor of boxcared xmlrpc requests + Boolean boolean wrapper to generate a "boolean" XML-RPC value + DateTime dateTime wrapper for an ISO 8601 string or time tuple or + localtime integer value to generate a "dateTime.iso8601" + XML-RPC value + Binary binary data wrapper + + SlowParser Slow but safe standard parser (based on xmllib) + Marshaller Generate an XML-RPC params chunk from a Python data structure + Unmarshaller Unmarshal an XML-RPC response from incoming XML event message + Transport Handles an HTTP transaction to an XML-RPC server + SafeTransport Handles an HTTPS transaction to an XML-RPC server + +Exported constants: + + True + False + +Exported functions: + + boolean Convert any Python value to an XML-RPC boolean + getparser Create instance of the fastest available parser & attach + to an unmarshalling object + dumps Convert an argument tuple or a Fault instance to an XML-RPC + request (or response, if the methodresponse option is used). + loads Convert an XML-RPC packet to unmarshalled data plus a method + name (None if not present). +""" + +import re, string, time, operator + +from types import * + +# -------------------------------------------------------------------- +# Internal stuff + +try: + unicode +except NameError: + unicode = None # unicode support not available + +try: + import datetime +except ImportError: + datetime = None + +try: + _bool_is_builtin = False.__class__.__name__ == "bool" +except (NameError, AttributeError): + _bool_is_builtin = 0 + +def _decode(data, encoding, is8bit=re.compile("[\x80-\xff]").search): + # decode non-ascii string (if possible) + if unicode and encoding and is8bit(data): + data = unicode(data, encoding) + return data + +def escape(s, replace=string.replace): + s = replace(s, "&", "&") + s = replace(s, "<", "<") + return replace(s, ">", ">",) + +if unicode: + def _stringify(string): + # convert to 7-bit ascii if possible + try: + return string.encode("ascii") + except UnicodeError: + return string +else: + def _stringify(string): + return string + +__version__ = "1.0.1" + +# xmlrpc integer limits +try: + long +except NameError: + long = int +MAXINT = long(2) ** 31 - 1 +MININT = long(-2) ** 31 + +# -------------------------------------------------------------------- +# Error constants (from Dan Libby's specification at +# http://xmlrpc-epi.sourceforge.net/specs/rfc.fault_codes.php) + +# Ranges of errors +PARSE_ERROR = -32700 +SERVER_ERROR = -32600 +APPLICATION_ERROR = -32500 +SYSTEM_ERROR = -32400 +TRANSPORT_ERROR = -32300 + +# Specific errors +NOT_WELLFORMED_ERROR = -32700 +UNSUPPORTED_ENCODING = -32701 +INVALID_ENCODING_CHAR = -32702 +INVALID_XMLRPC = -32600 +METHOD_NOT_FOUND = -32601 +INVALID_METHOD_PARAMS = -32602 +INTERNAL_ERROR = -32603 + +# -------------------------------------------------------------------- +# Exceptions + +## +# Base class for all kinds of client-side errors. + +class Error(Exception): + """Base class for client errors.""" + def __str__(self): + return repr(self) + +## +# Indicates an HTTP-level protocol error. This is raised by the HTTP +# transport layer, if the server returns an error code other than 200 +# (OK). +# +# @param url The target URL. +# @param errcode The HTTP error code. +# @param errmsg The HTTP error message. +# @param headers The HTTP header dictionary. + +class ProtocolError(Error): + """Indicates an HTTP protocol error.""" + def __init__(self, url, errcode, errmsg, headers): + Error.__init__(self) + self.url = url + self.errcode = errcode + self.errmsg = errmsg + self.headers = headers + def __repr__(self): + return ( + "" % + (self.url, self.errcode, self.errmsg) + ) + +## +# Indicates a broken XML-RPC response package. This exception is +# raised by the unmarshalling layer, if the XML-RPC response is +# malformed. + +class ResponseError(Error): + """Indicates a broken response package.""" + pass + +## +# Indicates an XML-RPC fault response package. This exception is +# raised by the unmarshalling layer, if the XML-RPC response contains +# a fault string. This exception can also used as a class, to +# generate a fault XML-RPC message. +# +# @param faultCode The XML-RPC fault code. +# @param faultString The XML-RPC fault string. + +class Fault(Error): + """Indicates an XML-RPC fault package.""" + def __init__(self, faultCode, faultString, **extra): + Error.__init__(self) + self.faultCode = faultCode + self.faultString = faultString + def __repr__(self): + return ( + "" % + (self.faultCode, repr(self.faultString)) + ) + +# -------------------------------------------------------------------- +# Special values + +## +# Wrapper for XML-RPC boolean values. Use the xmlrpclib.True and +# xmlrpclib.False constants, or the xmlrpclib.boolean() function, to +# generate boolean XML-RPC values. +# +# @param value A boolean value. Any true value is interpreted as True, +# all other values are interpreted as False. + +if _bool_is_builtin: + boolean = Boolean = bool #@UndefinedVariable + # to avoid breaking code which references xmlrpclib.{True,False} + True, False = True, False +else: + class Boolean: + """Boolean-value wrapper. + + Use True or False to generate a "boolean" XML-RPC value. + """ + + def __init__(self, value=0): + self.value = operator.truth(value) + + def encode(self, out): + out.write("%d\n" % self.value) + + def __cmp__(self, other): + if isinstance(other, Boolean): + other = other.value + return cmp(self.value, other) + + def __repr__(self): + if self.value: + return "" % id(self) + else: + return "" % id(self) + + def __int__(self): + return self.value + + def __nonzero__(self): + return self.value + + True, False = Boolean(1), Boolean(0) + + ## + # Map true or false value to XML-RPC boolean values. + # + # @def boolean(value) + # @param value A boolean value. Any true value is mapped to True, + # all other values are mapped to False. + # @return xmlrpclib.True or xmlrpclib.False. + # @see Boolean + # @see True + # @see False + + def boolean(value, _truefalse=(False, True)): + """Convert any Python value to XML-RPC 'boolean'.""" + return _truefalse[operator.truth(value)] + +## +# Wrapper for XML-RPC DateTime values. This converts a time value to +# the format used by XML-RPC. +#

+# The value can be given as a string in the format +# "yyyymmddThh:mm:ss", as a 9-item time tuple (as returned by +# time.localtime()), or an integer value (as returned by time.time()). +# The wrapper uses time.localtime() to convert an integer to a time +# tuple. +# +# @param value The time, given as an ISO 8601 string, a time +# tuple, or a integer time value. + +class DateTime: + """DateTime wrapper for an ISO 8601 string or time tuple or + localtime integer value to generate 'dateTime.iso8601' XML-RPC + value. + """ + + def __init__(self, value=0): + if not isinstance(value, StringType): + if datetime and isinstance(value, datetime.datetime): + self.value = value.strftime("%Y%m%dT%H:%M:%S") + return + if datetime and isinstance(value, datetime.date): + self.value = value.strftime("%Y%m%dT%H:%M:%S") + return + if datetime and isinstance(value, datetime.time): + today = datetime.datetime.now().strftime("%Y%m%d") + self.value = value.strftime(today + "T%H:%M:%S") + return + if not isinstance(value, (TupleType, time.struct_time)): #@UndefinedVariable + if value == 0: + value = time.time() + value = time.localtime(value) + value = time.strftime("%Y%m%dT%H:%M:%S", value) + self.value = value + + def __cmp__(self, other): + if isinstance(other, DateTime): + other = other.value + return cmp(self.value, other) + + ## + # Get date/time value. + # + # @return Date/time value, as an ISO 8601 string. + + def __str__(self): + return self.value + + def __repr__(self): + return "" % (repr(self.value), id(self)) + + def decode(self, data): + data = str(data) + self.value = string.strip(data) + + def encode(self, out): + out.write("") + out.write(self.value) + out.write("\n") + +def _datetime(data): + # decode xml element contents into a DateTime structure. + value = DateTime() + value.decode(data) + return value + +def _datetime_type(data): + t = time.strptime(data, "%Y%m%dT%H:%M:%S") #@UndefinedVariable + return datetime.datetime(*tuple(t)[:6]) + +## +# Wrapper for binary data. This can be used to transport any kind +# of binary data over XML-RPC, using BASE64 encoding. +# +# @param data An 8-bit string containing arbitrary data. + +import base64 +try: + import cStringIO as StringIO +except ImportError: + import StringIO + +class Binary: + """Wrapper for binary data.""" + + def __init__(self, data=None): + self.data = data + + ## + # Get buffer contents. + # + # @return Buffer contents, as an 8-bit string. + + def __str__(self): + return self.data or "" + + def __cmp__(self, other): + if isinstance(other, Binary): + other = other.data + return cmp(self.data, other) + + def decode(self, data): + self.data = base64.decodestring(data) + + def encode(self, out): + out.write("\n") + base64.encode(StringIO.StringIO(self.data), out) + out.write("\n") + +def _binary(data): + # decode xml element contents into a Binary structure + value = Binary() + value.decode(data) + return value + +WRAPPERS = (DateTime, Binary) +if not _bool_is_builtin: + WRAPPERS = WRAPPERS + (Boolean,) + +# -------------------------------------------------------------------- +# XML parsers + +try: + # optional xmlrpclib accelerator + import _xmlrpclib #@UnresolvedImport + FastParser = _xmlrpclib.Parser + FastUnmarshaller = _xmlrpclib.Unmarshaller +except (AttributeError, ImportError): + FastParser = FastUnmarshaller = None + +try: + import _xmlrpclib #@UnresolvedImport + FastMarshaller = _xmlrpclib.Marshaller +except (AttributeError, ImportError): + FastMarshaller = None + +# +# the SGMLOP parser is about 15x faster than Python's builtin +# XML parser. SGMLOP sources can be downloaded from: +# +# http://www.pythonware.com/products/xml/sgmlop.htm +# + +try: + import sgmlop + if not hasattr(sgmlop, "XMLParser"): + raise ImportError() +except ImportError: + SgmlopParser = None # sgmlop accelerator not available +else: + class SgmlopParser: + def __init__(self, target): + + # setup callbacks + self.finish_starttag = target.start + self.finish_endtag = target.end + self.handle_data = target.data + self.handle_xml = target.xml + + # activate parser + self.parser = sgmlop.XMLParser() + self.parser.register(self) + self.feed = self.parser.feed + self.entity = { + "amp": "&", "gt": ">", "lt": "<", + "apos": "'", "quot": '"' + } + + def close(self): + try: + self.parser.close() + finally: + self.parser = self.feed = None # nuke circular reference + + def handle_proc(self, tag, attr): + m = re.search("encoding\s*=\s*['\"]([^\"']+)[\"']", attr) #@UndefinedVariable + if m: + self.handle_xml(m.group(1), 1) + + def handle_entityref(self, entity): + # entity + try: + self.handle_data(self.entity[entity]) + except KeyError: + self.handle_data("&%s;" % entity) + +try: + from xml.parsers import expat + if not hasattr(expat, "ParserCreate"): + raise ImportError() +except ImportError: + ExpatParser = None # expat not available +else: + class ExpatParser: + # fast expat parser for Python 2.0 and later. this is about + # 50% slower than sgmlop, on roundtrip testing + def __init__(self, target): + self._parser = parser = expat.ParserCreate(None, None) + self._target = target + parser.StartElementHandler = target.start + parser.EndElementHandler = target.end + parser.CharacterDataHandler = target.data + encoding = None + if not parser.returns_unicode: + encoding = "utf-8" + target.xml(encoding, None) + + def feed(self, data): + self._parser.Parse(data, 0) + + def close(self): + self._parser.Parse("", 1) # end of data + del self._target, self._parser # get rid of circular references + +class SlowParser: + """Default XML parser (based on xmllib.XMLParser).""" + # this is about 10 times slower than sgmlop, on roundtrip + # testing. + def __init__(self, target): + import xmllib # lazy subclassing (!) + if xmllib.XMLParser not in SlowParser.__bases__: + SlowParser.__bases__ = (xmllib.XMLParser,) + self.handle_xml = target.xml + self.unknown_starttag = target.start + self.handle_data = target.data + self.handle_cdata = target.data + self.unknown_endtag = target.end + try: + xmllib.XMLParser.__init__(self, accept_utf8=1) + except TypeError: + xmllib.XMLParser.__init__(self) # pre-2.0 + +# -------------------------------------------------------------------- +# XML-RPC marshalling and unmarshalling code + +## +# XML-RPC marshaller. +# +# @param encoding Default encoding for 8-bit strings. The default +# value is None (interpreted as UTF-8). +# @see dumps + +class Marshaller: + """Generate an XML-RPC params chunk from a Python data structure. + + Create a Marshaller instance for each set of parameters, and use + the "dumps" method to convert your data (represented as a tuple) + to an XML-RPC params chunk. To write a fault response, pass a + Fault instance instead. You may prefer to use the "dumps" module + function for this purpose. + """ + + # by the way, if you don't understand what's going on in here, + # that's perfectly ok. + + def __init__(self, encoding=None, allow_none=0): + self.memo = {} + self.data = None + self.encoding = encoding + self.allow_none = allow_none + + dispatch = {} + + def dumps(self, values): + out = [] + write = out.append + dump = self.__dump + if isinstance(values, Fault): + # fault instance + write("\n") + dump({'faultCode': values.faultCode, + 'faultString': values.faultString}, + write) + write("\n") + else: + # parameter block + # FIXME: the xml-rpc specification allows us to leave out + # the entire block if there are no parameters. + # however, changing this may break older code (including + # old versions of xmlrpclib.py), so this is better left as + # is for now. See @XMLRPC3 for more information. /F + write("\n") + for v in values: + write("\n") + dump(v, write) + write("\n") + write("\n") + result = string.join(out, "") + return result + + def __dump(self, value, write): + try: + f = self.dispatch[type(value)] + except KeyError: + raise TypeError("cannot marshal %s objects" % type(value)) + else: + f(self, value, write) + + def dump_nil (self, value, write): + if not self.allow_none: + raise TypeError("cannot marshal None unless allow_none is enabled") + write("") + dispatch[NoneType] = dump_nil + + def dump_int(self, value, write): + # in case ints are > 32 bits + if value > MAXINT or value < MININT: + raise OverflowError("int exceeds XML-RPC limits") + write("") + write(str(value)) + write("\n") + dispatch[IntType] = dump_int + + if _bool_is_builtin: + def dump_bool(self, value, write): + write("") + write(value and "1" or "0") + write("\n") + dispatch[bool] = dump_bool #@UndefinedVariable + + def dump_long(self, value, write): + if value > MAXINT or value < MININT: + raise OverflowError("long int exceeds XML-RPC limits") + write("") + write(str(int(value))) + write("\n") + dispatch[LongType] = dump_long + + def dump_double(self, value, write): + write("") + write(repr(value)) + write("\n") + dispatch[FloatType] = dump_double + + def dump_string(self, value, write, escape=escape): + write("") + write(escape(value)) + write("\n") + dispatch[StringType] = dump_string + + if unicode: + def dump_unicode(self, value, write, escape=escape): + value = value.encode(self.encoding) + write("") + write(escape(value)) + write("\n") + dispatch[UnicodeType] = dump_unicode + + def dump_array(self, value, write): + i = id(value) + if self.memo.has_key(i): + raise TypeError("cannot marshal recursive sequences") + self.memo[i] = None + dump = self.__dump + write("\n") + for v in value: + dump(v, write) + write("\n") + del self.memo[i] + dispatch[TupleType] = dump_array + dispatch[ListType] = dump_array + + def dump_struct(self, value, write, escape=escape): + i = id(value) + if self.memo.has_key(i): + raise TypeError("cannot marshal recursive dictionaries") + self.memo[i] = None + dump = self.__dump + write("\n") + for k, v in value.items(): + write("\n") + if type(k) is not StringType: + if unicode and type(k) is UnicodeType: + k = k.encode(self.encoding) + else: + raise TypeError("dictionary key must be string") + write("%s\n" % escape(k)) + dump(v, write) + write("\n") + write("\n") + del self.memo[i] + dispatch[DictType] = dump_struct + + if datetime: + def dump_datetime(self, value, write): + write("") + write(value.strftime("%Y%m%dT%H:%M:%S")) + write("\n") + dispatch[datetime.datetime] = dump_datetime + + def dump_date(self, value, write): + write("") + write(value.strftime("%Y%m%dT00:00:00")) + write("\n") + dispatch[datetime.date] = dump_date + + def dump_time(self, value, write): + write("") + write(datetime.datetime.now().date().strftime("%Y%m%dT")) + write(value.strftime("%H:%M:%S")) + write("\n") + dispatch[datetime.time] = dump_time + + def dump_instance(self, value, write): + # check for special wrappers + if value.__class__ in WRAPPERS: + self.write = write + value.encode(self) + del self.write + else: + # store instance attributes as a struct (really?) + self.dump_struct(value.__dict__, write) + dispatch[InstanceType] = dump_instance + +## +# XML-RPC unmarshaller. +# +# @see loads + +class Unmarshaller: + """Unmarshal an XML-RPC response, based on incoming XML event + messages (start, data, end). Call close() to get the resulting + data structure. + + Note that this reader is fairly tolerant, and gladly accepts bogus + XML-RPC data without complaining (but not bogus XML). + """ + + # and again, if you don't understand what's going on in here, + # that's perfectly ok. + + def __init__(self, use_datetime=0): + self._type = None + self._stack = [] + self._marks = [] + self._data = [] + self._methodname = None + self._encoding = "utf-8" + self.append = self._stack.append + self._use_datetime = use_datetime + if use_datetime and not datetime: + raise ValueError("the datetime module is not available") + + def close(self): + # return response tuple and target method + if self._type is None or self._marks: + raise ResponseError() + if self._type == "fault": + raise Fault(**self._stack[0]) + return tuple(self._stack) + + def getmethodname(self): + return self._methodname + + # + # event handlers + + def xml(self, encoding, standalone): + self._encoding = encoding + # FIXME: assert standalone == 1 ??? + + def start(self, tag, attrs): + # prepare to handle this element + if tag == "array" or tag == "struct": + self._marks.append(len(self._stack)) + self._data = [] + self._value = (tag == "value") + + def data(self, text): + self._data.append(text) + + def end(self, tag, join=string.join): + # call the appropriate end tag handler + try: + f = self.dispatch[tag] + except KeyError: + pass # unknown tag ? + else: + return f(self, join(self._data, "")) + + # + # accelerator support + + def end_dispatch(self, tag, data): + # dispatch data + try: + f = self.dispatch[tag] + except KeyError: + pass # unknown tag ? + else: + return f(self, data) + + # + # element decoders + + dispatch = {} + + def end_nil (self, data): + self.append(None) + self._value = 0 + dispatch["nil"] = end_nil + + def end_boolean(self, data): + if data == "0": + self.append(False) + elif data == "1": + self.append(True) + else: + raise TypeError("bad boolean value") + self._value = 0 + dispatch["boolean"] = end_boolean + + def end_int(self, data): + self.append(int(data)) + self._value = 0 + dispatch["i4"] = end_int + dispatch["int"] = end_int + + def end_double(self, data): + self.append(float(data)) + self._value = 0 + dispatch["double"] = end_double + + def end_string(self, data): + if self._encoding: + data = _decode(data, self._encoding) + self.append(_stringify(data)) + self._value = 0 + dispatch["string"] = end_string + dispatch["name"] = end_string # struct keys are always strings + + def end_array(self, data): + mark = self._marks.pop() + # map arrays to Python lists + self._stack[mark:] = [self._stack[mark:]] + self._value = 0 + dispatch["array"] = end_array + + def end_struct(self, data): + mark = self._marks.pop() + # map structs to Python dictionaries + dict = {} + items = self._stack[mark:] + for i in range(0, len(items), 2): + dict[_stringify(items[i])] = items[i + 1] + self._stack[mark:] = [dict] + self._value = 0 + dispatch["struct"] = end_struct + + def end_base64(self, data): + value = Binary() + value.decode(data) + self.append(value) + self._value = 0 + dispatch["base64"] = end_base64 + + def end_dateTime(self, data): + value = DateTime() + value.decode(data) + if self._use_datetime: + value = _datetime_type(data) + self.append(value) + dispatch["dateTime.iso8601"] = end_dateTime + + def end_value(self, data): + # if we stumble upon a value element with no internal + # elements, treat it as a string element + if self._value: + self.end_string(data) + dispatch["value"] = end_value + + def end_params(self, data): + self._type = "params" + dispatch["params"] = end_params + + def end_fault(self, data): + self._type = "fault" + dispatch["fault"] = end_fault + + def end_methodName(self, data): + if self._encoding: + data = _decode(data, self._encoding) + self._methodname = data + self._type = "methodName" # no params + dispatch["methodName"] = end_methodName + +## Multicall support +# + +class _MultiCallMethod: + # some lesser magic to store calls made to a MultiCall object + # for batch execution + def __init__(self, call_list, name): + self.__call_list = call_list + self.__name = name + def __getattr__(self, name): + return _MultiCallMethod(self.__call_list, "%s.%s" % (self.__name, name)) + def __call__(self, *args): + self.__call_list.append((self.__name, args)) + +class MultiCallIterator: + """Iterates over the results of a multicall. Exceptions are + thrown in response to xmlrpc faults.""" + + def __init__(self, results): + self.results = results + + def __getitem__(self, i): + item = self.results[i] + if type(item) == type({}): + raise Fault(item['faultCode'], item['faultString']) + elif type(item) == type([]): + return item[0] + else: + raise ValueError("unexpected type in multicall result") + +class MultiCall: + """server -> a object used to boxcar method calls + + server should be a ServerProxy object. + + Methods can be added to the MultiCall using normal + method call syntax e.g.: + + multicall = MultiCall(server_proxy) + multicall.add(2,3) + multicall.get_address("Guido") + + To execute the multicall, call the MultiCall object e.g.: + + add_result, address = multicall() + """ + + def __init__(self, server): + self.__server = server + self.__call_list = [] + + def __repr__(self): + return "" % id(self) + + __str__ = __repr__ + + def __getattr__(self, name): + return _MultiCallMethod(self.__call_list, name) + + def __call__(self): + marshalled_list = [] + for name, args in self.__call_list: + marshalled_list.append({'methodName' : name, 'params' : args}) + + return MultiCallIterator(self.__server.system.multicall(marshalled_list)) + +# -------------------------------------------------------------------- +# convenience functions + +## +# Create a parser object, and connect it to an unmarshalling instance. +# This function picks the fastest available XML parser. +# +# return A (parser, unmarshaller) tuple. + +def getparser(use_datetime=0): + """getparser() -> parser, unmarshaller + + Create an instance of the fastest available parser, and attach it + to an unmarshalling object. Return both objects. + """ + if use_datetime and not datetime: + raise ValueError("the datetime module is not available") + if FastParser and FastUnmarshaller: + if use_datetime: + mkdatetime = _datetime_type + else: + mkdatetime = _datetime + target = FastUnmarshaller(True, False, _binary, mkdatetime, Fault) + parser = FastParser(target) + else: + target = Unmarshaller(use_datetime=use_datetime) + if FastParser: + parser = FastParser(target) + elif SgmlopParser: + parser = SgmlopParser(target) + elif ExpatParser: + parser = ExpatParser(target) + else: + parser = SlowParser(target) + return parser, target + +## +# Convert a Python tuple or a Fault instance to an XML-RPC packet. +# +# @def dumps(params, **options) +# @param params A tuple or Fault instance. +# @keyparam methodname If given, create a methodCall request for +# this method name. +# @keyparam methodresponse If given, create a methodResponse packet. +# If used with a tuple, the tuple must be a singleton (that is, +# it must contain exactly one element). +# @keyparam encoding The packet encoding. +# @return A string containing marshalled data. + +def dumps(params, methodname=None, methodresponse=None, encoding=None, + allow_none=0): + """data [,options] -> marshalled data + + Convert an argument tuple or a Fault instance to an XML-RPC + request (or response, if the methodresponse option is used). + + In addition to the data object, the following options can be given + as keyword arguments: + + methodname: the method name for a methodCall packet + + methodresponse: true to create a methodResponse packet. + If this option is used with a tuple, the tuple must be + a singleton (i.e. it can contain only one element). + + encoding: the packet encoding (default is UTF-8) + + All 8-bit strings in the data structure are assumed to use the + packet encoding. Unicode strings are automatically converted, + where necessary. + """ + + assert isinstance(params, TupleType) or isinstance(params, Fault), \ + "argument must be tuple or Fault instance" + + if isinstance(params, Fault): + methodresponse = 1 + elif methodresponse and isinstance(params, TupleType): + assert len(params) == 1, "response tuple must be a singleton" + + if not encoding: + encoding = "utf-8" + + if FastMarshaller: + m = FastMarshaller(encoding) + else: + m = Marshaller(encoding, allow_none) + + data = m.dumps(params) + + if encoding != "utf-8": + xmlheader = "\n" % str(encoding) + else: + xmlheader = "\n" # utf-8 is default + + # standard XML-RPC wrappings + if methodname: + # a method call + if not isinstance(methodname, StringType): + methodname = methodname.encode(encoding) + data = ( + xmlheader, + "\n" + "", methodname, "\n", + data, + "\n" + ) + elif methodresponse: + # a method response, or a fault structure + data = ( + xmlheader, + "\n", + data, + "\n" + ) + else: + return data # return as is + return string.join(data, "") + +## +# Convert an XML-RPC packet to a Python object. If the XML-RPC packet +# represents a fault condition, this function raises a Fault exception. +# +# @param data An XML-RPC packet, given as an 8-bit string. +# @return A tuple containing the unpacked data, and the method name +# (None if not present). +# @see Fault + +def loads(data, use_datetime=0): + """data -> unmarshalled data, method name + + Convert an XML-RPC packet to unmarshalled data plus a method + name (None if not present). + + If the XML-RPC packet represents a fault condition, this function + raises a Fault exception. + """ + p, u = getparser(use_datetime=use_datetime) + p.feed(data) + p.close() + return u.close(), u.getmethodname() + + +# -------------------------------------------------------------------- +# request dispatcher + +class _Method: + # some magic to bind an XML-RPC method to an RPC server. + # supports "nested" methods (e.g. examples.getStateName) + def __init__(self, send, name): + self.__send = send + self.__name = name + def __getattr__(self, name): + return _Method(self.__send, "%s.%s" % (self.__name, name)) + def __call__(self, *args): + return self.__send(self.__name, args) + +## +# Standard transport class for XML-RPC over HTTP. +#

+# You can create custom transports by subclassing this method, and +# overriding selected methods. + +class Transport: + """Handles an HTTP transaction to an XML-RPC server.""" + + # client identifier (may be overridden) + user_agent = "xmlrpclib.py/%s (by www.pythonware.com)" % __version__ + + def __init__(self, use_datetime=0): + self._use_datetime = use_datetime + + ## + # Send a complete request, and parse the response. + # + # @param host Target host. + # @param handler Target PRC handler. + # @param request_body XML-RPC request body. + # @param verbose Debugging flag. + # @return Parsed response. + + def request(self, host, handler, request_body, verbose=0): + # issue XML-RPC request + + h = self.make_connection(host) + if verbose: + h.set_debuglevel(1) + + self.send_request(h, handler, request_body) + self.send_host(h, host) + self.send_user_agent(h) + self.send_content(h, request_body) + + errcode, errmsg, headers = h.getreply() + + if errcode != 200: + raise ProtocolError( + host + handler, + errcode, errmsg, + headers + ) + + self.verbose = verbose + + try: + sock = h._conn.sock + except AttributeError: + sock = None + + return self._parse_response(h.getfile(), sock) + + ## + # Create parser. + # + # @return A 2-tuple containing a parser and a unmarshaller. + + def getparser(self): + # get parser and unmarshaller + return getparser(use_datetime=self._use_datetime) + + ## + # Get authorization info from host parameter + # Host may be a string, or a (host, x509-dict) tuple; if a string, + # it is checked for a "user:pw@host" format, and a "Basic + # Authentication" header is added if appropriate. + # + # @param host Host descriptor (URL or (URL, x509 info) tuple). + # @return A 3-tuple containing (actual host, extra headers, + # x509 info). The header and x509 fields may be None. + + def get_host_info(self, host): + + x509 = {} + if isinstance(host, TupleType): + host, x509 = host + + import urllib + auth, host = urllib.splituser(host) + + if auth: + import base64 + auth = base64.encodestring(urllib.unquote(auth)) + auth = string.join(string.split(auth), "") # get rid of whitespace + extra_headers = [ + ("Authorization", "Basic " + auth) + ] + else: + extra_headers = None + + return host, extra_headers, x509 + + ## + # Connect to server. + # + # @param host Target host. + # @return A connection handle. + + def make_connection(self, host): + # create a HTTP connection object from a host descriptor + import httplib + host, extra_headers, x509 = self.get_host_info(host) + return httplib.HTTP(host) + + ## + # Send request header. + # + # @param connection Connection handle. + # @param handler Target RPC handler. + # @param request_body XML-RPC body. + + def send_request(self, connection, handler, request_body): + connection.putrequest("POST", handler) + + ## + # Send host name. + # + # @param connection Connection handle. + # @param host Host name. + + def send_host(self, connection, host): + host, extra_headers, x509 = self.get_host_info(host) + connection.putheader("Host", host) + if extra_headers: + if isinstance(extra_headers, DictType): + extra_headers = extra_headers.items() + for key, value in extra_headers: + connection.putheader(key, value) + + ## + # Send user-agent identifier. + # + # @param connection Connection handle. + + def send_user_agent(self, connection): + connection.putheader("User-Agent", self.user_agent) + + ## + # Send request body. + # + # @param connection Connection handle. + # @param request_body XML-RPC request body. + + def send_content(self, connection, request_body): + connection.putheader("Content-Type", "text/xml") + connection.putheader("Content-Length", str(len(request_body))) + connection.endheaders() + if request_body: + connection.send(request_body) + + ## + # Parse response. + # + # @param file Stream. + # @return Response tuple and target method. + + def parse_response(self, file): + # compatibility interface + return self._parse_response(file, None) + + ## + # Parse response (alternate interface). This is similar to the + # parse_response method, but also provides direct access to the + # underlying socket object (where available). + # + # @param file Stream. + # @param sock Socket handle (or None, if the socket object + # could not be accessed). + # @return Response tuple and target method. + + def _parse_response(self, file, sock): + # read response from input file/socket, and parse it + + p, u = self.getparser() + + while 1: + if sock: + response = sock.recv(1024) + else: + response = file.read(1024) + if not response: + break + if self.verbose: + sys.stdout.write("body: %s\n" % repr(response)) + p.feed(response) + + file.close() + p.close() + + return u.close() + +## +# Standard transport class for XML-RPC over HTTPS. + +class SafeTransport(Transport): + """Handles an HTTPS transaction to an XML-RPC server.""" + + # FIXME: mostly untested + + def make_connection(self, host): + # create a HTTPS connection object from a host descriptor + # host may be a string, or a (host, x509-dict) tuple + import httplib + host, extra_headers, x509 = self.get_host_info(host) + try: + HTTPS = httplib.HTTPS + except AttributeError: + raise NotImplementedError( + "your version of httplib doesn't support HTTPS" + ) + else: + return HTTPS(host, None, **(x509 or {})) + +## +# Standard server proxy. This class establishes a virtual connection +# to an XML-RPC server. +#

+# This class is available as ServerProxy and Server. New code should +# use ServerProxy, to avoid confusion. +# +# @def ServerProxy(uri, **options) +# @param uri The connection point on the server. +# @keyparam transport A transport factory, compatible with the +# standard transport class. +# @keyparam encoding The default encoding used for 8-bit strings +# (default is UTF-8). +# @keyparam verbose Use a true value to enable debugging output. +# (printed to standard output). +# @see Transport + +class ServerProxy: + """uri [,options] -> a logical connection to an XML-RPC server + + uri is the connection point on the server, given as + scheme://host/target. + + The standard implementation always supports the "http" scheme. If + SSL socket support is available (Python 2.0), it also supports + "https". + + If the target part and the slash preceding it are both omitted, + "/RPC2" is assumed. + + The following options can be given as keyword arguments: + + transport: a transport factory + encoding: the request encoding (default is UTF-8) + + All 8-bit strings passed to the server proxy are assumed to use + the given encoding. + """ + + def __init__(self, uri, transport=None, encoding=None, verbose=0, + allow_none=0, use_datetime=0): + # establish a "logical" server connection + + # get the url + import urllib + type, uri = urllib.splittype(uri) + if type not in ("http", "https"): + raise IOError("unsupported XML-RPC protocol") + self.__host, self.__handler = urllib.splithost(uri) + if not self.__handler: + self.__handler = "/RPC2" + + if transport is None: + if type == "https": + transport = SafeTransport(use_datetime=use_datetime) + else: + transport = Transport(use_datetime=use_datetime) + self.__transport = transport + + self.__encoding = encoding + self.__verbose = verbose + self.__allow_none = allow_none + + def __request(self, methodname, params): + # call a method on the remote server + + request = dumps(params, methodname, encoding=self.__encoding, + allow_none=self.__allow_none) + + response = self.__transport.request( + self.__host, + self.__handler, + request, + verbose=self.__verbose + ) + + if len(response) == 1: + response = response[0] + + return response + + def __repr__(self): + return ( + "" % + (self.__host, self.__handler) + ) + + __str__ = __repr__ + + def __getattr__(self, name): + # magic method dispatcher + return _Method(self.__request, name) + + # note: to call a remote object with an non-standard name, use + # result getattr(server, "strange-python-name")(args) + +# compatibility + +Server = ServerProxy + +# -------------------------------------------------------------------- +# test code + +if __name__ == "__main__": + + # simple test program (from the XML-RPC specification) + + # server = ServerProxy("http://localhost:8000") # local server + server = ServerProxy("http://time.xmlrpc.com/RPC2") + + sys.stdout.write('%s\n' % server) + + try: + sys.stdout.write('%s\n' % (server.currentTime.getCurrentTime(),)) + except Error: + import traceback;traceback.print_exc() + + multi = MultiCall(server) + multi.currentTime.getCurrentTime() + multi.currentTime.getCurrentTime() + try: + for response in multi(): + sys.stdout.write('%s\n' % (response,)) + except Error: + import traceback;traceback.print_exc() diff --git a/ptvsd/pydevd/_pydev_runfiles/__init__.py b/ptvsd/pydevd/_pydev_runfiles/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/ptvsd/pydevd/_pydev_runfiles/pydev_runfiles.py b/ptvsd/pydevd/_pydev_runfiles/pydev_runfiles.py new file mode 100644 index 00000000..66e646d3 --- /dev/null +++ b/ptvsd/pydevd/_pydev_runfiles/pydev_runfiles.py @@ -0,0 +1,874 @@ +from __future__ import nested_scopes + +import fnmatch +import os.path +from _pydev_runfiles.pydev_runfiles_coverage import start_coverage_support +from _pydevd_bundle.pydevd_constants import * #@UnusedWildImport +import re +import time + + +#======================================================================================================================= +# Configuration +#======================================================================================================================= +class Configuration: + + def __init__( + self, + files_or_dirs='', + verbosity=2, + include_tests=None, + tests=None, + port=None, + files_to_tests=None, + jobs=1, + split_jobs='tests', + coverage_output_dir=None, + coverage_include=None, + coverage_output_file=None, + exclude_files=None, + exclude_tests=None, + include_files=None, + django=False, + ): + self.files_or_dirs = files_or_dirs + self.verbosity = verbosity + self.include_tests = include_tests + self.tests = tests + self.port = port + self.files_to_tests = files_to_tests + self.jobs = jobs + self.split_jobs = split_jobs + self.django = django + + if include_tests: + assert isinstance(include_tests, (list, tuple)) + + if exclude_files: + assert isinstance(exclude_files, (list, tuple)) + + if exclude_tests: + assert isinstance(exclude_tests, (list, tuple)) + + self.exclude_files = exclude_files + self.include_files = include_files + self.exclude_tests = exclude_tests + + self.coverage_output_dir = coverage_output_dir + self.coverage_include = coverage_include + self.coverage_output_file = coverage_output_file + + def __str__(self): + return '''Configuration + - files_or_dirs: %s + - verbosity: %s + - tests: %s + - port: %s + - files_to_tests: %s + - jobs: %s + - split_jobs: %s + + - include_files: %s + - include_tests: %s + + - exclude_files: %s + - exclude_tests: %s + + - coverage_output_dir: %s + - coverage_include_dir: %s + - coverage_output_file: %s + + - django: %s +''' % ( + self.files_or_dirs, + self.verbosity, + self.tests, + self.port, + self.files_to_tests, + self.jobs, + self.split_jobs, + + self.include_files, + self.include_tests, + + self.exclude_files, + self.exclude_tests, + + self.coverage_output_dir, + self.coverage_include, + self.coverage_output_file, + + self.django, + ) + + +#======================================================================================================================= +# parse_cmdline +#======================================================================================================================= +def parse_cmdline(argv=None): + """ + Parses command line and returns test directories, verbosity, test filter and test suites + + usage: + runfiles.py -v|--verbosity -t|--tests dirs|files + + Multiprocessing options: + jobs=number (with the number of jobs to be used to run the tests) + split_jobs='module'|'tests' + if == module, a given job will always receive all the tests from a module + if == tests, the tests will be split independently of their originating module (default) + + --exclude_files = comma-separated list of patterns with files to exclude (fnmatch style) + --include_files = comma-separated list of patterns with files to include (fnmatch style) + --exclude_tests = comma-separated list of patterns with test names to exclude (fnmatch style) + + Note: if --tests is given, --exclude_files, --include_files and --exclude_tests are ignored! + """ + if argv is None: + argv = sys.argv + + verbosity = 2 + include_tests = None + tests = None + port = None + jobs = 1 + split_jobs = 'tests' + files_to_tests = {} + coverage_output_dir = None + coverage_include = None + exclude_files = None + exclude_tests = None + include_files = None + django = False + + from _pydev_bundle._pydev_getopt import gnu_getopt + optlist, dirs = gnu_getopt( + argv[1:], "", + [ + "verbosity=", + "tests=", + + "port=", + "config_file=", + + "jobs=", + "split_jobs=", + + "include_tests=", + "include_files=", + + "exclude_files=", + "exclude_tests=", + + "coverage_output_dir=", + "coverage_include=", + + "django=" + ] + ) + + for opt, value in optlist: + if opt in ("-v", "--verbosity"): + verbosity = value + + elif opt in ("-p", "--port"): + port = int(value) + + elif opt in ("-j", "--jobs"): + jobs = int(value) + + elif opt in ("-s", "--split_jobs"): + split_jobs = value + if split_jobs not in ('module', 'tests'): + raise AssertionError('Expected split to be either "module" or "tests". Was :%s' % (split_jobs,)) + + elif opt in ("-d", "--coverage_output_dir",): + coverage_output_dir = value.strip() + + elif opt in ("-i", "--coverage_include",): + coverage_include = value.strip() + + elif opt in ("-I", "--include_tests"): + include_tests = value.split(',') + + elif opt in ("-E", "--exclude_files"): + exclude_files = value.split(',') + + elif opt in ("-F", "--include_files"): + include_files = value.split(',') + + elif opt in ("-e", "--exclude_tests"): + exclude_tests = value.split(',') + + elif opt in ("-t", "--tests"): + tests = value.split(',') + + elif opt in ("--django",): + django = value.strip() in ['true', 'True', '1'] + + elif opt in ("-c", "--config_file"): + config_file = value.strip() + if os.path.exists(config_file): + f = open(config_file, 'rU') + try: + config_file_contents = f.read() + finally: + f.close() + + if config_file_contents: + config_file_contents = config_file_contents.strip() + + if config_file_contents: + for line in config_file_contents.splitlines(): + file_and_test = line.split('|') + if len(file_and_test) == 2: + file, test = file_and_test + if file in files_to_tests: + files_to_tests[file].append(test) + else: + files_to_tests[file] = [test] + + else: + sys.stderr.write('Could not find config file: %s\n' % (config_file,)) + + if type([]) != type(dirs): + dirs = [dirs] + + ret_dirs = [] + for d in dirs: + if '|' in d: + #paths may come from the ide separated by | + ret_dirs.extend(d.split('|')) + else: + ret_dirs.append(d) + + verbosity = int(verbosity) + + if tests: + if verbosity > 4: + sys.stdout.write('--tests provided. Ignoring --exclude_files, --exclude_tests and --include_files\n') + exclude_files = exclude_tests = include_files = None + + config = Configuration( + ret_dirs, + verbosity, + include_tests, + tests, + port, + files_to_tests, + jobs, + split_jobs, + coverage_output_dir, + coverage_include, + exclude_files=exclude_files, + exclude_tests=exclude_tests, + include_files=include_files, + django=django, + ) + + if verbosity > 5: + sys.stdout.write(str(config) + '\n') + return config + + +#======================================================================================================================= +# PydevTestRunner +#======================================================================================================================= +class PydevTestRunner(object): + """ finds and runs a file or directory of files as a unit test """ + + __py_extensions = ["*.py", "*.pyw"] + __exclude_files = ["__init__.*"] + + #Just to check that only this attributes will be written to this file + __slots__ = [ + 'verbosity', #Always used + + 'files_to_tests', #If this one is given, the ones below are not used + + 'files_or_dirs', #Files or directories received in the command line + 'include_tests', #The filter used to collect the tests + 'tests', #Strings with the tests to be run + + 'jobs', #Integer with the number of jobs that should be used to run the test cases + 'split_jobs', #String with 'tests' or 'module' (how should the jobs be split) + + 'configuration', + 'coverage', + ] + + def __init__(self, configuration): + self.verbosity = configuration.verbosity + + self.jobs = configuration.jobs + self.split_jobs = configuration.split_jobs + + files_to_tests = configuration.files_to_tests + if files_to_tests: + self.files_to_tests = files_to_tests + self.files_or_dirs = list(files_to_tests.keys()) + self.tests = None + else: + self.files_to_tests = {} + self.files_or_dirs = configuration.files_or_dirs + self.tests = configuration.tests + + self.configuration = configuration + self.__adjust_path() + + + def __adjust_path(self): + """ add the current file or directory to the python path """ + path_to_append = None + for n in xrange(len(self.files_or_dirs)): + dir_name = self.__unixify(self.files_or_dirs[n]) + if os.path.isdir(dir_name): + if not dir_name.endswith("/"): + self.files_or_dirs[n] = dir_name + "/" + path_to_append = os.path.normpath(dir_name) + elif os.path.isfile(dir_name): + path_to_append = os.path.dirname(dir_name) + else: + if not os.path.exists(dir_name): + block_line = '*' * 120 + sys.stderr.write('\n%s\n* PyDev test runner error: %s does not exist.\n%s\n' % (block_line, dir_name, block_line)) + return + msg = ("unknown type. \n%s\nshould be file or a directory.\n" % (dir_name)) + raise RuntimeError(msg) + if path_to_append is not None: + #Add it as the last one (so, first things are resolved against the default dirs and + #if none resolves, then we try a relative import). + sys.path.append(path_to_append) + + def __is_valid_py_file(self, fname): + """ tests that a particular file contains the proper file extension + and is not in the list of files to exclude """ + is_valid_fname = 0 + for invalid_fname in self.__class__.__exclude_files: + is_valid_fname += int(not fnmatch.fnmatch(fname, invalid_fname)) + if_valid_ext = 0 + for ext in self.__class__.__py_extensions: + if_valid_ext += int(fnmatch.fnmatch(fname, ext)) + return is_valid_fname > 0 and if_valid_ext > 0 + + def __unixify(self, s): + """ stupid windows. converts the backslash to forwardslash for consistency """ + return os.path.normpath(s).replace(os.sep, "/") + + def __importify(self, s, dir=False): + """ turns directory separators into dots and removes the ".py*" extension + so the string can be used as import statement """ + if not dir: + dirname, fname = os.path.split(s) + + if fname.count('.') > 1: + #if there's a file named xxx.xx.py, it is not a valid module, so, let's not load it... + return + + imp_stmt_pieces = [dirname.replace("\\", "/").replace("/", "."), os.path.splitext(fname)[0]] + + if len(imp_stmt_pieces[0]) == 0: + imp_stmt_pieces = imp_stmt_pieces[1:] + + return ".".join(imp_stmt_pieces) + + else: #handle dir + return s.replace("\\", "/").replace("/", ".") + + def __add_files(self, pyfiles, root, files): + """ if files match, appends them to pyfiles. used by os.path.walk fcn """ + for fname in files: + if self.__is_valid_py_file(fname): + name_without_base_dir = self.__unixify(os.path.join(root, fname)) + pyfiles.append(name_without_base_dir) + + + def find_import_files(self): + """ return a list of files to import """ + if self.files_to_tests: + pyfiles = self.files_to_tests.keys() + else: + pyfiles = [] + + for base_dir in self.files_or_dirs: + if os.path.isdir(base_dir): + if hasattr(os, 'walk'): + for root, dirs, files in os.walk(base_dir): + + #Note: handling directories that should be excluded from the search because + #they don't have __init__.py + exclude = {} + for d in dirs: + for init in ['__init__.py', '__init__.pyo', '__init__.pyc', '__init__.pyw', '__init__$py.class']: + if os.path.exists(os.path.join(root, d, init).replace('\\', '/')): + break + else: + exclude[d] = 1 + + if exclude: + new = [] + for d in dirs: + if d not in exclude: + new.append(d) + + dirs[:] = new + + self.__add_files(pyfiles, root, files) + else: + # jython2.1 is too old for os.walk! + os.path.walk(base_dir, self.__add_files, pyfiles) + + elif os.path.isfile(base_dir): + pyfiles.append(base_dir) + + if self.configuration.exclude_files or self.configuration.include_files: + ret = [] + for f in pyfiles: + add = True + basename = os.path.basename(f) + if self.configuration.include_files: + add = False + + for pat in self.configuration.include_files: + if fnmatch.fnmatchcase(basename, pat): + add = True + break + + if not add: + if self.verbosity > 3: + sys.stdout.write('Skipped file: %s (did not match any include_files pattern: %s)\n' % (f, self.configuration.include_files)) + + elif self.configuration.exclude_files: + for pat in self.configuration.exclude_files: + if fnmatch.fnmatchcase(basename, pat): + if self.verbosity > 3: + sys.stdout.write('Skipped file: %s (matched exclude_files pattern: %s)\n' % (f, pat)) + + elif self.verbosity > 2: + sys.stdout.write('Skipped file: %s\n' % (f,)) + + add = False + break + + if add: + if self.verbosity > 3: + sys.stdout.write('Adding file: %s for test discovery.\n' % (f,)) + ret.append(f) + + pyfiles = ret + + + return pyfiles + + def __get_module_from_str(self, modname, print_exception, pyfile): + """ Import the module in the given import path. + * Returns the "final" module, so importing "coilib40.subject.visu" + returns the "visu" module, not the "coilib40" as returned by __import__ """ + try: + mod = __import__(modname) + for part in modname.split('.')[1:]: + mod = getattr(mod, part) + return mod + except: + if print_exception: + from _pydev_runfiles import pydev_runfiles_xml_rpc + from _pydevd_bundle import pydevd_io + buf_err = pydevd_io.start_redirect(keep_original_redirection=True, std='stderr') + buf_out = pydevd_io.start_redirect(keep_original_redirection=True, std='stdout') + try: + import traceback;traceback.print_exc() + sys.stderr.write('ERROR: Module: %s could not be imported (file: %s).\n' % (modname, pyfile)) + finally: + pydevd_io.end_redirect('stderr') + pydevd_io.end_redirect('stdout') + + pydev_runfiles_xml_rpc.notifyTest( + 'error', buf_out.getvalue(), buf_err.getvalue(), pyfile, modname, 0) + + return None + + def remove_duplicates_keeping_order(self, seq): + seen = set() + seen_add = seen.add + return [x for x in seq if not (x in seen or seen_add(x))] + + def find_modules_from_files(self, pyfiles): + """ returns a list of modules given a list of files """ + #let's make sure that the paths we want are in the pythonpath... + imports = [(s, self.__importify(s)) for s in pyfiles] + + sys_path = [os.path.normpath(path) for path in sys.path] + sys_path = self.remove_duplicates_keeping_order(sys_path) + + system_paths = [] + for s in sys_path: + system_paths.append(self.__importify(s, True)) + + ret = [] + for pyfile, imp in imports: + if imp is None: + continue #can happen if a file is not a valid module + choices = [] + for s in system_paths: + if imp.startswith(s): + add = imp[len(s) + 1:] + if add: + choices.append(add) + #sys.stdout.write(' ' + add + ' ') + + if not choices: + sys.stdout.write('PYTHONPATH not found for file: %s\n' % imp) + else: + for i, import_str in enumerate(choices): + print_exception = i == len(choices) - 1 + mod = self.__get_module_from_str(import_str, print_exception, pyfile) + if mod is not None: + ret.append((pyfile, mod, import_str)) + break + + + return ret + + #=================================================================================================================== + # GetTestCaseNames + #=================================================================================================================== + class GetTestCaseNames: + """Yes, we need a class for that (cannot use outer context on jython 2.1)""" + + def __init__(self, accepted_classes, accepted_methods): + self.accepted_classes = accepted_classes + self.accepted_methods = accepted_methods + + def __call__(self, testCaseClass): + """Return a sorted sequence of method names found within testCaseClass""" + testFnNames = [] + className = testCaseClass.__name__ + + if className in self.accepted_classes: + for attrname in dir(testCaseClass): + #If a class is chosen, we select all the 'test' methods' + if attrname.startswith('test') and hasattr(getattr(testCaseClass, attrname), '__call__'): + testFnNames.append(attrname) + + else: + for attrname in dir(testCaseClass): + #If we have the class+method name, we must do a full check and have an exact match. + if className + '.' + attrname in self.accepted_methods: + if hasattr(getattr(testCaseClass, attrname), '__call__'): + testFnNames.append(attrname) + + #sorted() is not available in jython 2.1 + testFnNames.sort() + return testFnNames + + + def _decorate_test_suite(self, suite, pyfile, module_name): + import unittest + if isinstance(suite, unittest.TestSuite): + add = False + suite.__pydev_pyfile__ = pyfile + suite.__pydev_module_name__ = module_name + + for t in suite._tests: + t.__pydev_pyfile__ = pyfile + t.__pydev_module_name__ = module_name + if self._decorate_test_suite(t, pyfile, module_name): + add = True + + return add + + elif isinstance(suite, unittest.TestCase): + return True + + else: + return False + + + + def find_tests_from_modules(self, file_and_modules_and_module_name): + """ returns the unittests given a list of modules """ + #Use our own suite! + from _pydev_runfiles import pydev_runfiles_unittest + import unittest + unittest.TestLoader.suiteClass = pydev_runfiles_unittest.PydevTestSuite + loader = unittest.TestLoader() + + ret = [] + if self.files_to_tests: + for pyfile, m, module_name in file_and_modules_and_module_name: + accepted_classes = {} + accepted_methods = {} + tests = self.files_to_tests[pyfile] + for t in tests: + accepted_methods[t] = t + + loader.getTestCaseNames = self.GetTestCaseNames(accepted_classes, accepted_methods) + + suite = loader.loadTestsFromModule(m) + if self._decorate_test_suite(suite, pyfile, module_name): + ret.append(suite) + return ret + + + if self.tests: + accepted_classes = {} + accepted_methods = {} + + for t in self.tests: + splitted = t.split('.') + if len(splitted) == 1: + accepted_classes[t] = t + + elif len(splitted) == 2: + accepted_methods[t] = t + + loader.getTestCaseNames = self.GetTestCaseNames(accepted_classes, accepted_methods) + + + for pyfile, m, module_name in file_and_modules_and_module_name: + suite = loader.loadTestsFromModule(m) + if self._decorate_test_suite(suite, pyfile, module_name): + ret.append(suite) + + return ret + + + def filter_tests(self, test_objs, internal_call=False): + """ based on a filter name, only return those tests that have + the test case names that match """ + import unittest + if not internal_call: + if not self.configuration.include_tests and not self.tests and not self.configuration.exclude_tests: + #No need to filter if we have nothing to filter! + return test_objs + + if self.verbosity > 1: + if self.configuration.include_tests: + sys.stdout.write('Tests to include: %s\n' % (self.configuration.include_tests,)) + + if self.tests: + sys.stdout.write('Tests to run: %s\n' % (self.tests,)) + + if self.configuration.exclude_tests: + sys.stdout.write('Tests to exclude: %s\n' % (self.configuration.exclude_tests,)) + + test_suite = [] + for test_obj in test_objs: + + if isinstance(test_obj, unittest.TestSuite): + #Note: keep the suites as they are and just 'fix' the tests (so, don't use the iter_tests). + if test_obj._tests: + test_obj._tests = self.filter_tests(test_obj._tests, True) + if test_obj._tests: #Only add the suite if we still have tests there. + test_suite.append(test_obj) + + elif isinstance(test_obj, unittest.TestCase): + try: + testMethodName = test_obj._TestCase__testMethodName + except AttributeError: + #changed in python 2.5 + testMethodName = test_obj._testMethodName + + add = True + if self.configuration.exclude_tests: + for pat in self.configuration.exclude_tests: + if fnmatch.fnmatchcase(testMethodName, pat): + if self.verbosity > 3: + sys.stdout.write('Skipped test: %s (matched exclude_tests pattern: %s)\n' % (testMethodName, pat)) + + elif self.verbosity > 2: + sys.stdout.write('Skipped test: %s\n' % (testMethodName,)) + + add = False + break + + if add: + if self.__match_tests(self.tests, test_obj, testMethodName): + include = True + if self.configuration.include_tests: + include = False + for pat in self.configuration.include_tests: + if fnmatch.fnmatchcase(testMethodName, pat): + include = True + break + if include: + test_suite.append(test_obj) + else: + if self.verbosity > 3: + sys.stdout.write('Skipped test: %s (did not match any include_tests pattern %s)\n' % ( + testMethodName, self.configuration.include_tests,)) + return test_suite + + + def iter_tests(self, test_objs): + #Note: not using yield because of Jython 2.1. + import unittest + tests = [] + for test_obj in test_objs: + if isinstance(test_obj, unittest.TestSuite): + tests.extend(self.iter_tests(test_obj._tests)) + + elif isinstance(test_obj, unittest.TestCase): + tests.append(test_obj) + return tests + + + def list_test_names(self, test_objs): + names = [] + for tc in self.iter_tests(test_objs): + try: + testMethodName = tc._TestCase__testMethodName + except AttributeError: + #changed in python 2.5 + testMethodName = tc._testMethodName + names.append(testMethodName) + return names + + + def __match_tests(self, tests, test_case, test_method_name): + if not tests: + return 1 + + for t in tests: + class_and_method = t.split('.') + if len(class_and_method) == 1: + #only class name + if class_and_method[0] == test_case.__class__.__name__: + return 1 + + elif len(class_and_method) == 2: + if class_and_method[0] == test_case.__class__.__name__ and class_and_method[1] == test_method_name: + return 1 + + return 0 + + + def __match(self, filter_list, name): + """ returns whether a test name matches the test filter """ + if filter_list is None: + return 1 + for f in filter_list: + if re.match(f, name): + return 1 + return 0 + + + def run_tests(self, handle_coverage=True): + """ runs all tests """ + sys.stdout.write("Finding files... ") + files = self.find_import_files() + if self.verbosity > 3: + sys.stdout.write('%s ... done.\n' % (self.files_or_dirs)) + else: + sys.stdout.write('done.\n') + sys.stdout.write("Importing test modules ... ") + + + if handle_coverage: + coverage_files, coverage = start_coverage_support(self.configuration) + + file_and_modules_and_module_name = self.find_modules_from_files(files) + sys.stdout.write("done.\n") + + all_tests = self.find_tests_from_modules(file_and_modules_and_module_name) + all_tests = self.filter_tests(all_tests) + + from _pydev_runfiles import pydev_runfiles_unittest + test_suite = pydev_runfiles_unittest.PydevTestSuite(all_tests) + from _pydev_runfiles import pydev_runfiles_xml_rpc + pydev_runfiles_xml_rpc.notifyTestsCollected(test_suite.countTestCases()) + + start_time = time.time() + + def run_tests(): + executed_in_parallel = False + if self.jobs > 1: + from _pydev_runfiles import pydev_runfiles_parallel + + #What may happen is that the number of jobs needed is lower than the number of jobs requested + #(e.g.: 2 jobs were requested for running 1 test) -- in which case execute_tests_in_parallel will + #return False and won't run any tests. + executed_in_parallel = pydev_runfiles_parallel.execute_tests_in_parallel( + all_tests, self.jobs, self.split_jobs, self.verbosity, coverage_files, self.configuration.coverage_include) + + if not executed_in_parallel: + #If in coverage, we don't need to pass anything here (coverage is already enabled for this execution). + runner = pydev_runfiles_unittest.PydevTextTestRunner(stream=sys.stdout, descriptions=1, verbosity=self.verbosity) + sys.stdout.write('\n') + runner.run(test_suite) + + if self.configuration.django: + get_django_test_suite_runner()(run_tests).run_tests([]) + else: + run_tests() + + if handle_coverage: + coverage.stop() + coverage.save() + + total_time = 'Finished in: %.2f secs.' % (time.time() - start_time,) + pydev_runfiles_xml_rpc.notifyTestRunFinished(total_time) + + +DJANGO_TEST_SUITE_RUNNER = None + +def get_django_test_suite_runner(): + global DJANGO_TEST_SUITE_RUNNER + if DJANGO_TEST_SUITE_RUNNER: + return DJANGO_TEST_SUITE_RUNNER + try: + # django >= 1.8 + import django + from django.test.runner import DiscoverRunner + + class MyDjangoTestSuiteRunner(DiscoverRunner): + + def __init__(self, on_run_suite): + django.setup() + DiscoverRunner.__init__(self) + self.on_run_suite = on_run_suite + + def build_suite(self, *args, **kwargs): + pass + + def suite_result(self, *args, **kwargs): + pass + + def run_suite(self, *args, **kwargs): + self.on_run_suite() + except: + # django < 1.8 + try: + from django.test.simple import DjangoTestSuiteRunner + except: + class DjangoTestSuiteRunner: + def __init__(self): + pass + + def run_tests(self, *args, **kwargs): + raise AssertionError("Unable to run suite with django.test.runner.DiscoverRunner nor django.test.simple.DjangoTestSuiteRunner because it couldn't be imported.") + + class MyDjangoTestSuiteRunner(DjangoTestSuiteRunner): + + def __init__(self, on_run_suite): + DjangoTestSuiteRunner.__init__(self) + self.on_run_suite = on_run_suite + + def build_suite(self, *args, **kwargs): + pass + + def suite_result(self, *args, **kwargs): + pass + + def run_suite(self, *args, **kwargs): + self.on_run_suite() + + DJANGO_TEST_SUITE_RUNNER = MyDjangoTestSuiteRunner + return DJANGO_TEST_SUITE_RUNNER + + +#======================================================================================================================= +# main +#======================================================================================================================= +def main(configuration): + PydevTestRunner(configuration).run_tests() diff --git a/ptvsd/pydevd/_pydev_runfiles/pydev_runfiles_coverage.py b/ptvsd/pydevd/_pydev_runfiles/pydev_runfiles_coverage.py new file mode 100644 index 00000000..a8359250 --- /dev/null +++ b/ptvsd/pydevd/_pydev_runfiles/pydev_runfiles_coverage.py @@ -0,0 +1,76 @@ +import os.path +import sys +from _pydevd_bundle.pydevd_constants import Null + + +#======================================================================================================================= +# get_coverage_files +#======================================================================================================================= +def get_coverage_files(coverage_output_dir, number_of_files): + base_dir = coverage_output_dir + ret = [] + i = 0 + while len(ret) < number_of_files: + while True: + f = os.path.join(base_dir, '.coverage.%s' % i) + i += 1 + if not os.path.exists(f): + ret.append(f) + break #Break only inner for. + return ret + + +#======================================================================================================================= +# start_coverage_support +#======================================================================================================================= +def start_coverage_support(configuration): + return start_coverage_support_from_params( + configuration.coverage_output_dir, + configuration.coverage_output_file, + configuration.jobs, + configuration.coverage_include, + ) + + +#======================================================================================================================= +# start_coverage_support_from_params +#======================================================================================================================= +def start_coverage_support_from_params(coverage_output_dir, coverage_output_file, jobs, coverage_include): + coverage_files = [] + coverage_instance = Null() + if coverage_output_dir or coverage_output_file: + try: + import coverage #@UnresolvedImport + except: + sys.stderr.write('Error: coverage module could not be imported\n') + sys.stderr.write('Please make sure that the coverage module (http://nedbatchelder.com/code/coverage/)\n') + sys.stderr.write('is properly installed in your interpreter: %s\n' % (sys.executable,)) + + import traceback;traceback.print_exc() + else: + if coverage_output_dir: + if not os.path.exists(coverage_output_dir): + sys.stderr.write('Error: directory for coverage output (%s) does not exist.\n' % (coverage_output_dir,)) + + elif not os.path.isdir(coverage_output_dir): + sys.stderr.write('Error: expected (%s) to be a directory.\n' % (coverage_output_dir,)) + + else: + n = jobs + if n <= 0: + n += 1 + n += 1 #Add 1 more for the current process (which will do the initial import). + coverage_files = get_coverage_files(coverage_output_dir, n) + os.environ['COVERAGE_FILE'] = coverage_files.pop(0) + + coverage_instance = coverage.coverage(source=[coverage_include]) + coverage_instance.start() + + elif coverage_output_file: + #Client of parallel run. + os.environ['COVERAGE_FILE'] = coverage_output_file + coverage_instance = coverage.coverage(source=[coverage_include]) + coverage_instance.start() + + return coverage_files, coverage_instance + diff --git a/ptvsd/pydevd/_pydev_runfiles/pydev_runfiles_nose.py b/ptvsd/pydevd/_pydev_runfiles/pydev_runfiles_nose.py new file mode 100644 index 00000000..1cee0ff1 --- /dev/null +++ b/ptvsd/pydevd/_pydev_runfiles/pydev_runfiles_nose.py @@ -0,0 +1,182 @@ +from nose.plugins.multiprocess import MultiProcessTestRunner # @UnresolvedImport +from nose.plugins.base import Plugin # @UnresolvedImport +import sys +from _pydev_runfiles import pydev_runfiles_xml_rpc +import time +from _pydev_runfiles.pydev_runfiles_coverage import start_coverage_support + +#======================================================================================================================= +# PydevPlugin +#======================================================================================================================= +class PydevPlugin(Plugin): + + def __init__(self, configuration): + self.configuration = configuration + Plugin.__init__(self) + + + def begin(self): + # Called before any test is run (it's always called, with multiprocess or not) + self.start_time = time.time() + self.coverage_files, self.coverage = start_coverage_support(self.configuration) + + + def finalize(self, result): + # Called after all tests are run (it's always called, with multiprocess or not) + self.coverage.stop() + self.coverage.save() + + pydev_runfiles_xml_rpc.notifyTestRunFinished('Finished in: %.2f secs.' % (time.time() - self.start_time,)) + + + + #=================================================================================================================== + # Methods below are not called with multiprocess (so, we monkey-patch MultiProcessTestRunner.consolidate + # so that they're called, but unfortunately we loose some info -- i.e.: the time for each test in this + # process). + #=================================================================================================================== + + + def report_cond(self, cond, test, captured_output, error=''): + ''' + @param cond: fail, error, ok + ''' + + # test.address() is something as: + # ('D:\\workspaces\\temp\\test_workspace\\pytesting1\\src\\mod1\\hello.py', 'mod1.hello', 'TestCase.testMet1') + # + # and we must pass: location, test + # E.g.: ['D:\\src\\mod1\\hello.py', 'TestCase.testMet1'] + try: + if hasattr(test, 'address'): + address = test.address() + address = address[0], address[2] + else: + # multiprocess + try: + address = test[0], test[1] + except TypeError: + # It may be an error at setup, in which case it's not really a test, but a Context object. + f = test.context.__file__ + if f.endswith('.pyc'): + f = f[:-1] + elif f.endswith('$py.class'): + f = f[:-len('$py.class')] + '.py' + address = f, '?' + except: + sys.stderr.write("PyDev: Internal pydev error getting test address. Please report at the pydev bug tracker\n") + import traceback;traceback.print_exc() + sys.stderr.write("\n\n\n") + address = '?', '?' + + error_contents = self.get_io_from_error(error) + try: + time_str = '%.2f' % (time.time() - test._pydev_start_time) + except: + time_str = '?' + + pydev_runfiles_xml_rpc.notifyTest(cond, captured_output, error_contents, address[0], address[1], time_str) + + + def startTest(self, test): + test._pydev_start_time = time.time() + if hasattr(test, 'address'): + address = test.address() + file, test = address[0], address[2] + else: + # multiprocess + file, test = test + pydev_runfiles_xml_rpc.notifyStartTest(file, test) + + + def get_io_from_error(self, err): + if type(err) == type(()): + if len(err) != 3: + if len(err) == 2: + return err[1] # multiprocess + try: + from StringIO import StringIO + except: + from io import StringIO + s = StringIO() + etype, value, tb = err + import traceback;traceback.print_exception(etype, value, tb, file=s) + return s.getvalue() + return err + + + def get_captured_output(self, test): + if hasattr(test, 'capturedOutput') and test.capturedOutput: + return test.capturedOutput + return '' + + + def addError(self, test, err): + self.report_cond( + 'error', + test, + self.get_captured_output(test), + err, + ) + + + def addFailure(self, test, err): + self.report_cond( + 'fail', + test, + self.get_captured_output(test), + err, + ) + + + def addSuccess(self, test): + self.report_cond( + 'ok', + test, + self.get_captured_output(test), + '', + ) + + +PYDEV_NOSE_PLUGIN_SINGLETON = None +def start_pydev_nose_plugin_singleton(configuration): + global PYDEV_NOSE_PLUGIN_SINGLETON + PYDEV_NOSE_PLUGIN_SINGLETON = PydevPlugin(configuration) + return PYDEV_NOSE_PLUGIN_SINGLETON + + + + +original = MultiProcessTestRunner.consolidate +#======================================================================================================================= +# new_consolidate +#======================================================================================================================= +def new_consolidate(self, result, batch_result): + ''' + Used so that it can work with the multiprocess plugin. + Monkeypatched because nose seems a bit unsupported at this time (ideally + the plugin would have this support by default). + ''' + ret = original(self, result, batch_result) + + parent_frame = sys._getframe().f_back + # addr is something as D:\pytesting1\src\mod1\hello.py:TestCase.testMet4 + # so, convert it to what report_cond expects + addr = parent_frame.f_locals['addr'] + i = addr.rindex(':') + addr = [addr[:i], addr[i + 1:]] + + output, testsRun, failures, errors, errorClasses = batch_result + if failures or errors: + for failure in failures: + PYDEV_NOSE_PLUGIN_SINGLETON.report_cond('fail', addr, output, failure) + + for error in errors: + PYDEV_NOSE_PLUGIN_SINGLETON.report_cond('error', addr, output, error) + else: + PYDEV_NOSE_PLUGIN_SINGLETON.report_cond('ok', addr, output) + + + return ret + +MultiProcessTestRunner.consolidate = new_consolidate diff --git a/ptvsd/pydevd/_pydev_runfiles/pydev_runfiles_parallel.py b/ptvsd/pydevd/_pydev_runfiles/pydev_runfiles_parallel.py new file mode 100644 index 00000000..c6f15a2e --- /dev/null +++ b/ptvsd/pydevd/_pydev_runfiles/pydev_runfiles_parallel.py @@ -0,0 +1,295 @@ +import unittest +from _pydev_imps._pydev_saved_modules import thread +try: + import Queue +except: + import queue as Queue #@UnresolvedImport +from _pydevd_bundle.pydevd_constants import * #@UnusedWildImport +from _pydev_runfiles import pydev_runfiles_xml_rpc +import time +import os + +#======================================================================================================================= +# flatten_test_suite +#======================================================================================================================= +def flatten_test_suite(test_suite, ret): + if isinstance(test_suite, unittest.TestSuite): + for t in test_suite._tests: + flatten_test_suite(t, ret) + + elif isinstance(test_suite, unittest.TestCase): + ret.append(test_suite) + + +#======================================================================================================================= +# execute_tests_in_parallel +#======================================================================================================================= +def execute_tests_in_parallel(tests, jobs, split, verbosity, coverage_files, coverage_include): + ''' + @param tests: list(PydevTestSuite) + A list with the suites to be run + + @param split: str + Either 'module' or the number of tests that should be run in each batch + + @param coverage_files: list(file) + A list with the files that should be used for giving coverage information (if empty, coverage information + should not be gathered). + + @param coverage_include: str + The pattern that should be included in the coverage. + + @return: bool + Returns True if the tests were actually executed in parallel. If the tests were not executed because only 1 + should be used (e.g.: 2 jobs were requested for running 1 test), False will be returned and no tests will be + run. + + It may also return False if in debug mode (in which case, multi-processes are not accepted) + ''' + try: + from _pydevd_bundle.pydevd_comm import get_global_debugger + if get_global_debugger() is not None: + return False + except: + pass #Ignore any error here. + + #This queue will receive the tests to be run. Each entry in a queue is a list with the tests to be run together When + #split == 'tests', each list will have a single element, when split == 'module', each list will have all the tests + #from a given module. + tests_queue = [] + + queue_elements = [] + if split == 'module': + module_to_tests = {} + for test in tests: + lst = [] + flatten_test_suite(test, lst) + for test in lst: + key = (test.__pydev_pyfile__, test.__pydev_module_name__) + module_to_tests.setdefault(key, []).append(test) + + for key, tests in module_to_tests.items(): + queue_elements.append(tests) + + if len(queue_elements) < jobs: + #Don't create jobs we will never use. + jobs = len(queue_elements) + + elif split == 'tests': + for test in tests: + lst = [] + flatten_test_suite(test, lst) + for test in lst: + queue_elements.append([test]) + + if len(queue_elements) < jobs: + #Don't create jobs we will never use. + jobs = len(queue_elements) + + else: + raise AssertionError('Do not know how to handle: %s' % (split,)) + + for test_cases in queue_elements: + test_queue_elements = [] + for test_case in test_cases: + try: + test_name = test_case.__class__.__name__+"."+test_case._testMethodName + except AttributeError: + #Support for jython 2.1 (__testMethodName is pseudo-private in the test case) + test_name = test_case.__class__.__name__+"."+test_case._TestCase__testMethodName + + test_queue_elements.append(test_case.__pydev_pyfile__+'|'+test_name) + + tests_queue.append(test_queue_elements) + + if jobs < 2: + return False + + sys.stdout.write('Running tests in parallel with: %s jobs.\n' %(jobs,)) + + + queue = Queue.Queue() + for item in tests_queue: + queue.put(item, block=False) + + + providers = [] + clients = [] + for i in xrange(jobs): + test_cases_provider = CommunicationThread(queue) + providers.append(test_cases_provider) + + test_cases_provider.start() + port = test_cases_provider.port + + if coverage_files: + clients.append(ClientThread(i, port, verbosity, coverage_files.pop(0), coverage_include)) + else: + clients.append(ClientThread(i, port, verbosity)) + + for client in clients: + client.start() + + client_alive = True + while client_alive: + client_alive = False + for client in clients: + #Wait for all the clients to exit. + if not client.finished: + client_alive = True + time.sleep(.2) + break + + for provider in providers: + provider.shutdown() + + return True + + + +#======================================================================================================================= +# CommunicationThread +#======================================================================================================================= +class CommunicationThread(threading.Thread): + + def __init__(self, tests_queue): + threading.Thread.__init__(self) + self.setDaemon(True) + self.queue = tests_queue + self.finished = False + from _pydev_bundle.pydev_imports import SimpleXMLRPCServer + + + # This is a hack to patch slow socket.getfqdn calls that + # BaseHTTPServer (and its subclasses) make. + # See: http://bugs.python.org/issue6085 + # See: http://www.answermysearches.com/xmlrpc-server-slow-in-python-how-to-fix/2140/ + try: + import BaseHTTPServer + def _bare_address_string(self): + host, port = self.client_address[:2] + return '%s' % host + BaseHTTPServer.BaseHTTPRequestHandler.address_string = _bare_address_string + + except: + pass + # End hack. + + + # Create server + + from _pydev_bundle import pydev_localhost + server = SimpleXMLRPCServer((pydev_localhost.get_localhost(), 0), logRequests=False) + server.register_function(self.GetTestsToRun) + server.register_function(self.notifyStartTest) + server.register_function(self.notifyTest) + server.register_function(self.notifyCommands) + self.port = server.socket.getsockname()[1] + self.server = server + + + def GetTestsToRun(self, job_id): + ''' + @param job_id: + + @return: list(str) + Each entry is a string in the format: filename|Test.testName + ''' + try: + ret = self.queue.get(block=False) + return ret + except: #Any exception getting from the queue (empty or not) means we finished our work on providing the tests. + self.finished = True + return [] + + + def notifyCommands(self, job_id, commands): + #Batch notification. + for command in commands: + getattr(self, command[0])(job_id, *command[1], **command[2]) + + return True + + def notifyStartTest(self, job_id, *args, **kwargs): + pydev_runfiles_xml_rpc.notifyStartTest(*args, **kwargs) + return True + + + def notifyTest(self, job_id, *args, **kwargs): + pydev_runfiles_xml_rpc.notifyTest(*args, **kwargs) + return True + + def shutdown(self): + if hasattr(self.server, 'shutdown'): + self.server.shutdown() + else: + self._shutdown = True + + def run(self): + if hasattr(self.server, 'shutdown'): + self.server.serve_forever() + else: + self._shutdown = False + while not self._shutdown: + self.server.handle_request() + + + +#======================================================================================================================= +# Client +#======================================================================================================================= +class ClientThread(threading.Thread): + + def __init__(self, job_id, port, verbosity, coverage_output_file=None, coverage_include=None): + threading.Thread.__init__(self) + self.setDaemon(True) + self.port = port + self.job_id = job_id + self.verbosity = verbosity + self.finished = False + self.coverage_output_file = coverage_output_file + self.coverage_include = coverage_include + + + def _reader_thread(self, pipe, target): + while True: + target.write(pipe.read(1)) + + + def run(self): + try: + from _pydev_runfiles import pydev_runfiles_parallel_client + #TODO: Support Jython: + # + #For jython, instead of using sys.executable, we should use: + #r'D:\bin\jdk_1_5_09\bin\java.exe', + #'-classpath', + #'D:/bin/jython-2.2.1/jython.jar', + #'org.python.util.jython', + + args = [ + sys.executable, + pydev_runfiles_parallel_client.__file__, + str(self.job_id), + str(self.port), + str(self.verbosity), + ] + + if self.coverage_output_file and self.coverage_include: + args.append(self.coverage_output_file) + args.append(self.coverage_include) + + import subprocess + if False: + proc = subprocess.Popen(args, env=os.environ, shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + + thread.start_new_thread(self._reader_thread,(proc.stdout, sys.stdout)) + + thread.start_new_thread(target=self._reader_thread,args=(proc.stderr, sys.stderr)) + else: + proc = subprocess.Popen(args, env=os.environ, shell=False) + proc.wait() + + finally: + self.finished = True + diff --git a/ptvsd/pydevd/_pydev_runfiles/pydev_runfiles_parallel_client.py b/ptvsd/pydevd/_pydev_runfiles/pydev_runfiles_parallel_client.py new file mode 100644 index 00000000..d7dcf04a --- /dev/null +++ b/ptvsd/pydevd/_pydev_runfiles/pydev_runfiles_parallel_client.py @@ -0,0 +1,214 @@ +from _pydevd_bundle.pydevd_constants import * #@UnusedWildImport +from _pydev_bundle.pydev_imports import xmlrpclib, _queue +Queue = _queue.Queue +import traceback +from _pydev_runfiles.pydev_runfiles_coverage import start_coverage_support_from_params + + + +#======================================================================================================================= +# ParallelNotification +#======================================================================================================================= +class ParallelNotification(object): + + def __init__(self, method, args, kwargs): + self.method = method + self.args = args + self.kwargs = kwargs + + def to_tuple(self): + return self.method, self.args, self.kwargs + + +#======================================================================================================================= +# KillServer +#======================================================================================================================= +class KillServer(object): + pass + + + +#======================================================================================================================= +# ServerComm +#======================================================================================================================= +class ServerComm(threading.Thread): + + + + def __init__(self, job_id, server): + self.notifications_queue = Queue() + threading.Thread.__init__(self) + self.setDaemon(False) #Wait for all the notifications to be passed before exiting! + assert job_id is not None + assert port is not None + self.job_id = job_id + + self.finished = False + self.server = server + + + def run(self): + while True: + kill_found = False + commands = [] + command = self.notifications_queue.get(block=True) + if isinstance(command, KillServer): + kill_found = True + else: + assert isinstance(command, ParallelNotification) + commands.append(command.to_tuple()) + + try: + while True: + command = self.notifications_queue.get(block=False) #No block to create a batch. + if isinstance(command, KillServer): + kill_found = True + else: + assert isinstance(command, ParallelNotification) + commands.append(command.to_tuple()) + except: + pass #That's OK, we're getting it until it becomes empty so that we notify multiple at once. + + + if commands: + try: + #Batch notification. + self.server.lock.acquire() + try: + self.server.notifyCommands(self.job_id, commands) + finally: + self.server.lock.release() + except: + traceback.print_exc() + + if kill_found: + self.finished = True + return + + + +#======================================================================================================================= +# ServerFacade +#======================================================================================================================= +class ServerFacade(object): + + + def __init__(self, notifications_queue): + self.notifications_queue = notifications_queue + + + def notifyTestsCollected(self, *args, **kwargs): + pass #This notification won't be passed + + + def notifyTestRunFinished(self, *args, **kwargs): + pass #This notification won't be passed + + + def notifyStartTest(self, *args, **kwargs): + self.notifications_queue.put_nowait(ParallelNotification('notifyStartTest', args, kwargs)) + + + def notifyTest(self, *args, **kwargs): + self.notifications_queue.put_nowait(ParallelNotification('notifyTest', args, kwargs)) + + + +#======================================================================================================================= +# run_client +#======================================================================================================================= +def run_client(job_id, port, verbosity, coverage_output_file, coverage_include): + job_id = int(job_id) + + from _pydev_bundle import pydev_localhost + server = xmlrpclib.Server('http://%s:%s' % (pydev_localhost.get_localhost(), port)) + server.lock = threading.Lock() + + + server_comm = ServerComm(job_id, server) + server_comm.start() + + try: + server_facade = ServerFacade(server_comm.notifications_queue) + from _pydev_runfiles import pydev_runfiles + from _pydev_runfiles import pydev_runfiles_xml_rpc + pydev_runfiles_xml_rpc.set_server(server_facade) + + #Starts None and when the 1st test is gotten, it's started (because a server may be initiated and terminated + #before receiving any test -- which would mean a different process got all the tests to run). + coverage = None + + try: + tests_to_run = [1] + while tests_to_run: + #Investigate: is it dangerous to use the same xmlrpclib server from different threads? + #It seems it should be, as it creates a new connection for each request... + server.lock.acquire() + try: + tests_to_run = server.GetTestsToRun(job_id) + finally: + server.lock.release() + + if not tests_to_run: + break + + if coverage is None: + _coverage_files, coverage = start_coverage_support_from_params( + None, coverage_output_file, 1, coverage_include) + + + files_to_tests = {} + for test in tests_to_run: + filename_and_test = test.split('|') + if len(filename_and_test) == 2: + files_to_tests.setdefault(filename_and_test[0], []).append(filename_and_test[1]) + + configuration = pydev_runfiles.Configuration( + '', + verbosity, + None, + None, + None, + files_to_tests, + 1, #Always single job here + None, + + #The coverage is handled in this loop. + coverage_output_file=None, + coverage_include=None, + ) + test_runner = pydev_runfiles.PydevTestRunner(configuration) + sys.stdout.flush() + test_runner.run_tests(handle_coverage=False) + finally: + if coverage is not None: + coverage.stop() + coverage.save() + + + except: + traceback.print_exc() + server_comm.notifications_queue.put_nowait(KillServer()) + + + +#======================================================================================================================= +# main +#======================================================================================================================= +if __name__ == '__main__': + if len(sys.argv) -1 == 3: + job_id, port, verbosity = sys.argv[1:] + coverage_output_file, coverage_include = None, None + + elif len(sys.argv) -1 == 5: + job_id, port, verbosity, coverage_output_file, coverage_include = sys.argv[1:] + + else: + raise AssertionError('Could not find out how to handle the parameters: '+sys.argv[1:]) + + job_id = int(job_id) + port = int(port) + verbosity = int(verbosity) + run_client(job_id, port, verbosity, coverage_output_file, coverage_include) + + diff --git a/ptvsd/pydevd/_pydev_runfiles/pydev_runfiles_pytest2.py b/ptvsd/pydevd/_pydev_runfiles/pydev_runfiles_pytest2.py new file mode 100644 index 00000000..fae814cf --- /dev/null +++ b/ptvsd/pydevd/_pydev_runfiles/pydev_runfiles_pytest2.py @@ -0,0 +1,278 @@ +from _pydev_runfiles import pydev_runfiles_xml_rpc +import pickle +import zlib +import base64 +import os +import py +from pydevd_file_utils import _NormFile +import pytest +import sys +import time + + +#========================================================================= +# Load filters with tests we should skip +#========================================================================= +py_test_accept_filter = None + + +def _load_filters(): + global py_test_accept_filter + if py_test_accept_filter is None: + py_test_accept_filter = os.environ.get('PYDEV_PYTEST_SKIP') + if py_test_accept_filter: + py_test_accept_filter = pickle.loads( + zlib.decompress(base64.b64decode(py_test_accept_filter))) + else: + py_test_accept_filter = {} + + +def is_in_xdist_node(): + main_pid = os.environ.get('PYDEV_MAIN_PID') + if main_pid and main_pid != str(os.getpid()): + return True + return False + + +connected = False +def connect_to_server_for_communication_to_xml_rpc_on_xdist(): + global connected + if connected: + return + connected = True + if is_in_xdist_node(): + port = os.environ.get('PYDEV_PYTEST_SERVER') + if not port: + sys.stderr.write( + 'Error: no PYDEV_PYTEST_SERVER environment variable defined.\n') + else: + pydev_runfiles_xml_rpc.initialize_server(int(port), daemon=True) + + +PY2 = sys.version_info[0] <= 2 +PY3 = not PY2 + + +class State: + start_time = time.time() + buf_err = None + buf_out = None + + +def start_redirect(): + if State.buf_out is not None: + return + from _pydevd_bundle import pydevd_io + State.buf_err = pydevd_io.start_redirect(keep_original_redirection=True, std='stderr') + State.buf_out = pydevd_io.start_redirect(keep_original_redirection=True, std='stdout') + + +def get_curr_output(): + return State.buf_out.getvalue(), State.buf_err.getvalue() + + +def pytest_unconfigure(): + if is_in_xdist_node(): + return + # Only report that it finished when on the main node (we don't want to report + # the finish on each separate node). + pydev_runfiles_xml_rpc.notifyTestRunFinished( + 'Finished in: %.2f secs.' % (time.time() - State.start_time,)) + + +def pytest_collection_modifyitems(session, config, items): + # A note: in xdist, this is not called on the main process, only in the + # secondary nodes, so, we'll actually make the filter and report it multiple + # times. + connect_to_server_for_communication_to_xml_rpc_on_xdist() + + _load_filters() + if not py_test_accept_filter: + pydev_runfiles_xml_rpc.notifyTestsCollected(len(items)) + return # Keep on going (nothing to filter) + + new_items = [] + for item in items: + f = _NormFile(str(item.parent.fspath)) + name = item.name + + if f not in py_test_accept_filter: + # print('Skip file: %s' % (f,)) + continue # Skip the file + + accept_tests = py_test_accept_filter[f] + + if item.cls is not None: + class_name = item.cls.__name__ + else: + class_name = None + for test in accept_tests: + # This happens when parameterizing pytest tests. + i = name.find('[') + if i > 0: + name = name[:i] + if test == name: + # Direct match of the test (just go on with the default + # loading) + new_items.append(item) + break + + if class_name is not None: + if test == class_name + '.' + name: + new_items.append(item) + break + + if class_name == test: + new_items.append(item) + break + else: + pass + # print('Skip test: %s.%s. Accept: %s' % (class_name, name, accept_tests)) + + # Modify the original list + items[:] = new_items + pydev_runfiles_xml_rpc.notifyTestsCollected(len(items)) + + +from py.io import TerminalWriter + +def _get_error_contents_from_report(report): + if report.longrepr is not None: + tw = TerminalWriter(stringio=True) + tw.hasmarkup = False + report.toterminal(tw) + exc = tw.stringio.getvalue() + s = exc.strip() + if s: + return s + + return '' + +def pytest_collectreport(report): + error_contents = _get_error_contents_from_report(report) + if error_contents: + report_test('fail', '', '', '', error_contents, 0.0) + +def append_strings(s1, s2): + if s1.__class__ == s2.__class__: + return s1 + s2 + + if sys.version_info[0] == 2: + if not isinstance(s1, basestring): + s1 = str(s1) + + if not isinstance(s2, basestring): + s2 = str(s2) + + # Prefer bytes + if isinstance(s1, unicode): + s1 = s1.encode('utf-8') + + if isinstance(s2, unicode): + s2 = s2.encode('utf-8') + + return s1 + s2 + else: + # Prefer str + if isinstance(s1, bytes): + s1 = s1.decode('utf-8', 'replace') + + if isinstance(s2, bytes): + s2 = s2.decode('utf-8', 'replace') + + return s1 + s2 + + + +def pytest_runtest_logreport(report): + if is_in_xdist_node(): + # When running with xdist, we don't want the report to be called from the node, only + # from the main process. + return + report_duration = report.duration + report_when = report.when + report_outcome = report.outcome + + if hasattr(report, 'wasxfail'): + if report_outcome != 'skipped': + report_outcome = 'passed' + + if report_outcome == 'passed': + # passed on setup/teardown: no need to report if in setup or teardown + # (only on the actual test if it passed). + if report_when in ('setup', 'teardown'): + return + + status = 'ok' + + elif report_outcome == 'skipped': + status = 'skip' + + else: + # It has only passed, skipped and failed (no error), so, let's consider + # error if not on call. + if report_when in ('setup', 'teardown'): + status = 'error' + + else: + # any error in the call (not in setup or teardown) is considered a + # regular failure. + status = 'fail' + + # This will work if pytest is not capturing it, if it is, nothing will + # come from here... + captured_output, error_contents = getattr(report, 'pydev_captured_output', ''), getattr(report, 'pydev_error_contents', '') + for type_section, value in report.sections: + if value: + if type_section in ('err', 'stderr', 'Captured stderr call'): + error_contents = append_strings(error_contents, value) + else: + captured_output = append_strings(error_contents, value) + + filename = getattr(report, 'pydev_fspath_strpath', '') + test = report.location[2] + + if report_outcome != 'skipped': + # On skipped, we'll have a traceback for the skip, which is not what we + # want. + exc = _get_error_contents_from_report(report) + if exc: + if error_contents: + error_contents = append_strings(error_contents, '----------------------------- Exceptions -----------------------------\n') + error_contents = append_strings(error_contents, exc) + + report_test(status, filename, test, captured_output, error_contents, report_duration) + + +def report_test(status, filename, test, captured_output, error_contents, duration): + ''' + @param filename: 'D:\\src\\mod1\\hello.py' + @param test: 'TestCase.testMet1' + @param status: fail, error, ok + ''' + time_str = '%.2f' % (duration,) + pydev_runfiles_xml_rpc.notifyTest( + status, captured_output, error_contents, filename, test, time_str) + + +@pytest.hookimpl(hookwrapper=True) +def pytest_runtest_makereport(item, call): + outcome = yield + report = outcome.get_result() + report.pydev_fspath_strpath = item.fspath.strpath + report.pydev_captured_output, report.pydev_error_contents = get_curr_output() + + +@pytest.mark.tryfirst +def pytest_runtest_setup(item): + ''' + Note: with xdist will be on a secondary process. + ''' + # We have our own redirection: if xdist does its redirection, we'll have + # nothing in our contents (which is OK), but if it does, we'll get nothing + # from pytest but will get our own here. + start_redirect() + filename = item.fspath.strpath + test = item.location[2] + + pydev_runfiles_xml_rpc.notifyStartTest(filename, test) diff --git a/ptvsd/pydevd/_pydev_runfiles/pydev_runfiles_unittest.py b/ptvsd/pydevd/_pydev_runfiles/pydev_runfiles_unittest.py new file mode 100644 index 00000000..f0ad9efd --- /dev/null +++ b/ptvsd/pydevd/_pydev_runfiles/pydev_runfiles_unittest.py @@ -0,0 +1,185 @@ +try: + import unittest2 as python_unittest # @UnresolvedImport +except: + import unittest as python_unittest + +from _pydev_runfiles import pydev_runfiles_xml_rpc +import time +from _pydevd_bundle import pydevd_io +import traceback +from _pydevd_bundle.pydevd_constants import * #@UnusedWildImport + + +#======================================================================================================================= +# PydevTextTestRunner +#======================================================================================================================= +class PydevTextTestRunner(python_unittest.TextTestRunner): + + def _makeResult(self): + return PydevTestResult(self.stream, self.descriptions, self.verbosity) + + +_PythonTextTestResult = python_unittest.TextTestRunner()._makeResult().__class__ + +#======================================================================================================================= +# PydevTestResult +#======================================================================================================================= +class PydevTestResult(_PythonTextTestResult): + + def addSubTest(self, test, subtest, err): + """Called at the end of a subtest. + 'err' is None if the subtest ended successfully, otherwise it's a + tuple of values as returned by sys.exc_info(). + """ + _PythonTextTestResult.addSubTest(self, test, subtest, err) + if err is not None: + subdesc = subtest._subDescription() + error = (test, self._exc_info_to_string(err, test)) + self._reportErrors([error], [], '', '%s %s' % (self.get_test_name(test), subdesc)) + + + def startTest(self, test): + _PythonTextTestResult.startTest(self, test) + self.buf = pydevd_io.start_redirect(keep_original_redirection=True, std='both') + self.start_time = time.time() + self._current_errors_stack = [] + self._current_failures_stack = [] + + try: + test_name = test.__class__.__name__+"."+test._testMethodName + except AttributeError: + #Support for jython 2.1 (__testMethodName is pseudo-private in the test case) + test_name = test.__class__.__name__+"."+test._TestCase__testMethodName + + pydev_runfiles_xml_rpc.notifyStartTest( + test.__pydev_pyfile__, test_name) + + + + + def get_test_name(self, test): + try: + try: + test_name = test.__class__.__name__ + "." + test._testMethodName + except AttributeError: + #Support for jython 2.1 (__testMethodName is pseudo-private in the test case) + try: + test_name = test.__class__.__name__ + "." + test._TestCase__testMethodName + #Support for class/module exceptions (test is instance of _ErrorHolder) + except: + test_name = test.description.split()[1][1:-1] + ' <' + test.description.split()[0] + '>' + except: + traceback.print_exc() + return '' + return test_name + + + def stopTest(self, test): + end_time = time.time() + pydevd_io.end_redirect(std='both') + + _PythonTextTestResult.stopTest(self, test) + + captured_output = self.buf.getvalue() + del self.buf + error_contents = '' + test_name = self.get_test_name(test) + + + diff_time = '%.2f' % (end_time - self.start_time) + if not self._current_errors_stack and not self._current_failures_stack: + pydev_runfiles_xml_rpc.notifyTest( + 'ok', captured_output, error_contents, test.__pydev_pyfile__, test_name, diff_time) + else: + self._reportErrors(self._current_errors_stack, self._current_failures_stack, captured_output, test_name) + + + def _reportErrors(self, errors, failures, captured_output, test_name, diff_time=''): + error_contents = [] + for test, s in errors+failures: + if type(s) == type((1,)): #If it's a tuple (for jython 2.1) + sio = StringIO() + traceback.print_exception(s[0], s[1], s[2], file=sio) + s = sio.getvalue() + error_contents.append(s) + + sep = '\n'+self.separator1 + error_contents = sep.join(error_contents) + + if errors and not failures: + try: + pydev_runfiles_xml_rpc.notifyTest( + 'error', captured_output, error_contents, test.__pydev_pyfile__, test_name, diff_time) + except: + file_start = error_contents.find('File "') + file_end = error_contents.find('", ', file_start) + if file_start != -1 and file_end != -1: + file = error_contents[file_start+6:file_end] + else: + file = '' + pydev_runfiles_xml_rpc.notifyTest( + 'error', captured_output, error_contents, file, test_name, diff_time) + + elif failures and not errors: + pydev_runfiles_xml_rpc.notifyTest( + 'fail', captured_output, error_contents, test.__pydev_pyfile__, test_name, diff_time) + + else: #Ok, we got both, errors and failures. Let's mark it as an error in the end. + pydev_runfiles_xml_rpc.notifyTest( + 'error', captured_output, error_contents, test.__pydev_pyfile__, test_name, diff_time) + + + + def addError(self, test, err): + _PythonTextTestResult.addError(self, test, err) + #Support for class/module exceptions (test is instance of _ErrorHolder) + if not hasattr(self, '_current_errors_stack') or test.__class__.__name__ == '_ErrorHolder': + #Not in start...end, so, report error now (i.e.: django pre/post-setup) + self._reportErrors([self.errors[-1]], [], '', self.get_test_name(test)) + else: + self._current_errors_stack.append(self.errors[-1]) + + + def addFailure(self, test, err): + _PythonTextTestResult.addFailure(self, test, err) + if not hasattr(self, '_current_failures_stack'): + #Not in start...end, so, report error now (i.e.: django pre/post-setup) + self._reportErrors([], [self.failures[-1]], '', self.get_test_name(test)) + else: + self._current_failures_stack.append(self.failures[-1]) + + +try: + #Version 2.7 onwards has a different structure... Let's not make any changes in it for now + #(waiting for bug: http://bugs.python.org/issue11798) + try: + from unittest2 import suite + except ImportError: + from unittest import suite + #=================================================================================================================== + # PydevTestSuite + #=================================================================================================================== + class PydevTestSuite(python_unittest.TestSuite): + pass + + +except ImportError: + + #=================================================================================================================== + # PydevTestSuite + #=================================================================================================================== + class PydevTestSuite(python_unittest.TestSuite): + + + def run(self, result): + for index, test in enumerate(self._tests): + if result.shouldStop: + break + test(result) + + # Let the memory be released! + self._tests[index] = None + + return result + + diff --git a/ptvsd/pydevd/_pydev_runfiles/pydev_runfiles_xml_rpc.py b/ptvsd/pydevd/_pydev_runfiles/pydev_runfiles_xml_rpc.py new file mode 100644 index 00000000..256ce215 --- /dev/null +++ b/ptvsd/pydevd/_pydev_runfiles/pydev_runfiles_xml_rpc.py @@ -0,0 +1,281 @@ +import threading +import traceback +import warnings + +from _pydev_bundle._pydev_filesystem_encoding import getfilesystemencoding +from _pydev_bundle.pydev_imports import xmlrpclib, _queue +Queue = _queue.Queue +from _pydevd_bundle.pydevd_constants import * + +#This may happen in IronPython (in Python it shouldn't happen as there are +#'fast' replacements that are used in xmlrpclib.py) +warnings.filterwarnings( + 'ignore', 'The xmllib module is obsolete.*', DeprecationWarning) + + +file_system_encoding = getfilesystemencoding() + +#======================================================================================================================= +# _ServerHolder +#======================================================================================================================= +class _ServerHolder: + ''' + Helper so that we don't have to use a global here. + ''' + SERVER = None + + +#======================================================================================================================= +# set_server +#======================================================================================================================= +def set_server(server): + _ServerHolder.SERVER = server + + + +#======================================================================================================================= +# ParallelNotification +#======================================================================================================================= +class ParallelNotification(object): + + def __init__(self, method, args): + self.method = method + self.args = args + + def to_tuple(self): + return self.method, self.args + + + +#======================================================================================================================= +# KillServer +#======================================================================================================================= +class KillServer(object): + pass + + +#======================================================================================================================= +# ServerFacade +#======================================================================================================================= +class ServerFacade(object): + + + def __init__(self, notifications_queue): + self.notifications_queue = notifications_queue + + + def notifyTestsCollected(self, *args): + self.notifications_queue.put_nowait(ParallelNotification('notifyTestsCollected', args)) + + def notifyConnected(self, *args): + self.notifications_queue.put_nowait(ParallelNotification('notifyConnected', args)) + + + def notifyTestRunFinished(self, *args): + self.notifications_queue.put_nowait(ParallelNotification('notifyTestRunFinished', args)) + + + def notifyStartTest(self, *args): + self.notifications_queue.put_nowait(ParallelNotification('notifyStartTest', args)) + + + def notifyTest(self, *args): + new_args = [] + for arg in args: + new_args.append(_encode_if_needed(arg)) + args = tuple(new_args) + self.notifications_queue.put_nowait(ParallelNotification('notifyTest', args)) + + + + + +#======================================================================================================================= +# ServerComm +#======================================================================================================================= +class ServerComm(threading.Thread): + + + + def __init__(self, notifications_queue, port, daemon=False): + threading.Thread.__init__(self) + self.setDaemon(daemon) # If False, wait for all the notifications to be passed before exiting! + self.finished = False + self.notifications_queue = notifications_queue + + from _pydev_bundle import pydev_localhost + + # It is necessary to specify an encoding, that matches + # the encoding of all bytes-strings passed into an + # XMLRPC call: "All 8-bit strings in the data structure are assumed to use the + # packet encoding. Unicode strings are automatically converted, + # where necessary." + # Byte strings most likely come from file names. + encoding = file_system_encoding + if encoding == "mbcs": + # Windos symbolic name for the system encoding CP_ACP. + # We need to convert it into a encoding that is recognized by Java. + # Unfortunately this is not always possible. You could use + # GetCPInfoEx and get a name similar to "windows-1251". Then + # you need a table to translate on a best effort basis. Much to complicated. + # ISO-8859-1 is good enough. + encoding = "ISO-8859-1" + + self.server = xmlrpclib.Server('http://%s:%s' % (pydev_localhost.get_localhost(), port), + encoding=encoding) + + + def run(self): + while True: + kill_found = False + commands = [] + command = self.notifications_queue.get(block=True) + if isinstance(command, KillServer): + kill_found = True + else: + assert isinstance(command, ParallelNotification) + commands.append(command.to_tuple()) + + try: + while True: + command = self.notifications_queue.get(block=False) #No block to create a batch. + if isinstance(command, KillServer): + kill_found = True + else: + assert isinstance(command, ParallelNotification) + commands.append(command.to_tuple()) + except: + pass #That's OK, we're getting it until it becomes empty so that we notify multiple at once. + + + if commands: + try: + self.server.notifyCommands(commands) + except: + traceback.print_exc() + + if kill_found: + self.finished = True + return + + + +#======================================================================================================================= +# initialize_server +#======================================================================================================================= +def initialize_server(port, daemon=False): + if _ServerHolder.SERVER is None: + if port is not None: + notifications_queue = Queue() + _ServerHolder.SERVER = ServerFacade(notifications_queue) + _ServerHolder.SERVER_COMM = ServerComm(notifications_queue, port, daemon) + _ServerHolder.SERVER_COMM.start() + else: + #Create a null server, so that we keep the interface even without any connection. + _ServerHolder.SERVER = Null() + _ServerHolder.SERVER_COMM = Null() + + try: + _ServerHolder.SERVER.notifyConnected() + except: + traceback.print_exc() + + + +#======================================================================================================================= +# notifyTest +#======================================================================================================================= +def notifyTestsCollected(tests_count): + assert tests_count is not None + try: + _ServerHolder.SERVER.notifyTestsCollected(tests_count) + except: + traceback.print_exc() + + +#======================================================================================================================= +# notifyStartTest +#======================================================================================================================= +def notifyStartTest(file, test): + ''' + @param file: the tests file (c:/temp/test.py) + @param test: the test ran (i.e.: TestCase.test1) + ''' + assert file is not None + if test is None: + test = '' #Could happen if we have an import error importing module. + + try: + _ServerHolder.SERVER.notifyStartTest(file, test) + except: + traceback.print_exc() + + +def _encode_if_needed(obj): + # In the java side we expect strings to be ISO-8859-1 (org.python.pydev.debug.pyunit.PyUnitServer.initializeDispatches().new Dispatch() {...}.getAsStr(Object)) + if not IS_PY3K: + if isinstance(obj, str): + try: + return xmlrpclib.Binary(obj.decode(sys.stdin.encoding).encode('ISO-8859-1', 'xmlcharrefreplace')) + except: + return xmlrpclib.Binary(obj) + + elif isinstance(obj, unicode): + return xmlrpclib.Binary(obj.encode('ISO-8859-1', 'xmlcharrefreplace')) + + else: + if isinstance(obj, str): # Unicode in py3 + return xmlrpclib.Binary(obj.encode('ISO-8859-1', 'xmlcharrefreplace')) + + elif isinstance(obj, bytes): + try: + return xmlrpclib.Binary(obj.decode(sys.stdin.encoding).encode('ISO-8859-1', 'xmlcharrefreplace')) + except: + return xmlrpclib.Binary(obj) #bytes already + + return obj + + +#======================================================================================================================= +# notifyTest +#======================================================================================================================= +def notifyTest(cond, captured_output, error_contents, file, test, time): + ''' + @param cond: ok, fail, error + @param captured_output: output captured from stdout + @param captured_output: output captured from stderr + @param file: the tests file (c:/temp/test.py) + @param test: the test ran (i.e.: TestCase.test1) + @param time: float with the number of seconds elapsed + ''' + assert cond is not None + assert captured_output is not None + assert error_contents is not None + assert file is not None + if test is None: + test = '' #Could happen if we have an import error importing module. + assert time is not None + try: + captured_output = _encode_if_needed(captured_output) + error_contents = _encode_if_needed(error_contents) + + _ServerHolder.SERVER.notifyTest(cond, captured_output, error_contents, file, test, time) + except: + traceback.print_exc() + +#======================================================================================================================= +# notifyTestRunFinished +#======================================================================================================================= +def notifyTestRunFinished(total_time): + assert total_time is not None + try: + _ServerHolder.SERVER.notifyTestRunFinished(total_time) + except: + traceback.print_exc() + + +#======================================================================================================================= +# force_server_kill +#======================================================================================================================= +def force_server_kill(): + _ServerHolder.SERVER_COMM.notifications_queue.put_nowait(KillServer()) diff --git a/ptvsd/pydevd/_pydevd_bundle/__init__.py b/ptvsd/pydevd/_pydevd_bundle/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/ptvsd/pydevd/_pydevd_bundle/pydevconsole_code_for_ironpython.py b/ptvsd/pydevd/_pydevd_bundle/pydevconsole_code_for_ironpython.py new file mode 100644 index 00000000..71346ccd --- /dev/null +++ b/ptvsd/pydevd/_pydevd_bundle/pydevconsole_code_for_ironpython.py @@ -0,0 +1,513 @@ +"""Utilities needed to emulate Python's interactive interpreter. + +""" + +# Inspired by similar code by Jeff Epler and Fredrik Lundh. + + +import sys +import traceback + + + + + + + + +#START --------------------------- from codeop import CommandCompiler, compile_command +#START --------------------------- from codeop import CommandCompiler, compile_command +#START --------------------------- from codeop import CommandCompiler, compile_command +#START --------------------------- from codeop import CommandCompiler, compile_command +#START --------------------------- from codeop import CommandCompiler, compile_command +r"""Utilities to compile possibly incomplete Python source code. + +This module provides two interfaces, broadly similar to the builtin +function compile(), which take program text, a filename and a 'mode' +and: + +- Return code object if the command is complete and valid +- Return None if the command is incomplete +- Raise SyntaxError, ValueError or OverflowError if the command is a + syntax error (OverflowError and ValueError can be produced by + malformed literals). + +Approach: + +First, check if the source consists entirely of blank lines and +comments; if so, replace it with 'pass', because the built-in +parser doesn't always do the right thing for these. + +Compile three times: as is, with \n, and with \n\n appended. If it +compiles as is, it's complete. If it compiles with one \n appended, +we expect more. If it doesn't compile either way, we compare the +error we get when compiling with \n or \n\n appended. If the errors +are the same, the code is broken. But if the errors are different, we +expect more. Not intuitive; not even guaranteed to hold in future +releases; but this matches the compiler's behavior from Python 1.4 +through 2.2, at least. + +Caveat: + +It is possible (but not likely) that the parser stops parsing with a +successful outcome before reaching the end of the source; in this +case, trailing symbols may be ignored instead of causing an error. +For example, a backslash followed by two newlines may be followed by +arbitrary garbage. This will be fixed once the API for the parser is +better. + +The two interfaces are: + +compile_command(source, filename, symbol): + + Compiles a single command in the manner described above. + +CommandCompiler(): + + Instances of this class have __call__ methods identical in + signature to compile_command; the difference is that if the + instance compiles program text containing a __future__ statement, + the instance 'remembers' and compiles all subsequent program texts + with the statement in force. + +The module also provides another class: + +Compile(): + + Instances of this class act like the built-in function compile, + but with 'memory' in the sense described above. +""" + +import __future__ + +_features = [getattr(__future__, fname) + for fname in __future__.all_feature_names] + +__all__ = ["compile_command", "Compile", "CommandCompiler"] + +PyCF_DONT_IMPLY_DEDENT = 0x200 # Matches pythonrun.h + +def _maybe_compile(compiler, source, filename, symbol): + # Check for source consisting of only blank lines and comments + for line in source.split("\n"): + line = line.strip() + if line and line[0] != '#': + break # Leave it alone + else: + if symbol != "eval": + source = "pass" # Replace it with a 'pass' statement + + err = err1 = err2 = None + code = code1 = code2 = None + + try: + code = compiler(source, filename, symbol) + except SyntaxError, err: + pass + + try: + code1 = compiler(source + "\n", filename, symbol) + except SyntaxError, err1: + pass + + try: + code2 = compiler(source + "\n\n", filename, symbol) + except SyntaxError, err2: + pass + + if code: + return code + if not code1 and repr(err1) == repr(err2): + raise SyntaxError, err1 + +def _compile(source, filename, symbol): + return compile(source, filename, symbol, PyCF_DONT_IMPLY_DEDENT) + +def compile_command(source, filename="", symbol="single"): + r"""Compile a command and determine whether it is incomplete. + + Arguments: + + source -- the source string; may contain \n characters + filename -- optional filename from which source was read; default + "" + symbol -- optional grammar start symbol; "single" (default) or "eval" + + Return value / exceptions raised: + + - Return a code object if the command is complete and valid + - Return None if the command is incomplete + - Raise SyntaxError, ValueError or OverflowError if the command is a + syntax error (OverflowError and ValueError can be produced by + malformed literals). + """ + return _maybe_compile(_compile, source, filename, symbol) + +class Compile: + """Instances of this class behave much like the built-in compile + function, but if one is used to compile text containing a future + statement, it "remembers" and compiles all subsequent program texts + with the statement in force.""" + def __init__(self): + self.flags = PyCF_DONT_IMPLY_DEDENT + + def __call__(self, source, filename, symbol): + codeob = compile(source, filename, symbol, self.flags, 1) + for feature in _features: + if codeob.co_flags & feature.compiler_flag: + self.flags |= feature.compiler_flag + return codeob + +class CommandCompiler: + """Instances of this class have __call__ methods identical in + signature to compile_command; the difference is that if the + instance compiles program text containing a __future__ statement, + the instance 'remembers' and compiles all subsequent program texts + with the statement in force.""" + + def __init__(self,): + self.compiler = Compile() + + def __call__(self, source, filename="", symbol="single"): + r"""Compile a command and determine whether it is incomplete. + + Arguments: + + source -- the source string; may contain \n characters + filename -- optional filename from which source was read; + default "" + symbol -- optional grammar start symbol; "single" (default) or + "eval" + + Return value / exceptions raised: + + - Return a code object if the command is complete and valid + - Return None if the command is incomplete + - Raise SyntaxError, ValueError or OverflowError if the command is a + syntax error (OverflowError and ValueError can be produced by + malformed literals). + """ + return _maybe_compile(self.compiler, source, filename, symbol) + +#END --------------------------- from codeop import CommandCompiler, compile_command +#END --------------------------- from codeop import CommandCompiler, compile_command +#END --------------------------- from codeop import CommandCompiler, compile_command +#END --------------------------- from codeop import CommandCompiler, compile_command +#END --------------------------- from codeop import CommandCompiler, compile_command + + + + + + + + + + + + + + + + + +__all__ = ["InteractiveInterpreter", "InteractiveConsole", "interact", + "compile_command"] + +def softspace(file, newvalue): + oldvalue = 0 + try: + oldvalue = file.softspace + except AttributeError: + pass + try: + file.softspace = newvalue + except (AttributeError, TypeError): + # "attribute-less object" or "read-only attributes" + pass + return oldvalue + +class InteractiveInterpreter: + """Base class for InteractiveConsole. + + This class deals with parsing and interpreter state (the user's + namespace); it doesn't deal with input buffering or prompting or + input file naming (the filename is always passed in explicitly). + + """ + + def __init__(self, locals=None): + """Constructor. + + The optional 'locals' argument specifies the dictionary in + which code will be executed; it defaults to a newly created + dictionary with key "__name__" set to "__console__" and key + "__doc__" set to None. + + """ + if locals is None: + locals = {"__name__": "__console__", "__doc__": None} + self.locals = locals + self.compile = CommandCompiler() + + def runsource(self, source, filename="", symbol="single"): + """Compile and run some source in the interpreter. + + Arguments are as for compile_command(). + + One several things can happen: + + 1) The input is incorrect; compile_command() raised an + exception (SyntaxError or OverflowError). A syntax traceback + will be printed by calling the showsyntaxerror() method. + + 2) The input is incomplete, and more input is required; + compile_command() returned None. Nothing happens. + + 3) The input is complete; compile_command() returned a code + object. The code is executed by calling self.runcode() (which + also handles run-time exceptions, except for SystemExit). + + The return value is True in case 2, False in the other cases (unless + an exception is raised). The return value can be used to + decide whether to use sys.ps1 or sys.ps2 to prompt the next + line. + + """ + try: + code = self.compile(source, filename, symbol) + except (OverflowError, SyntaxError, ValueError): + # Case 1 + self.showsyntaxerror(filename) + return False + + if code is None: + # Case 2 + return True + + # Case 3 + self.runcode(code) + return False + + def runcode(self, code): + """Execute a code object. + + When an exception occurs, self.showtraceback() is called to + display a traceback. All exceptions are caught except + SystemExit, which is reraised. + + A note about KeyboardInterrupt: this exception may occur + elsewhere in this code, and may not always be caught. The + caller should be prepared to deal with it. + + """ + try: + exec code in self.locals + except SystemExit: + raise + except: + self.showtraceback() + else: + if softspace(sys.stdout, 0): + sys.stdout.write('\n') + + def showsyntaxerror(self, filename=None): + """Display the syntax error that just occurred. + + This doesn't display a stack trace because there isn't one. + + If a filename is given, it is stuffed in the exception instead + of what was there before (because Python's parser always uses + "" when reading from a string). + + The output is written by self.write(), below. + + """ + type, value, sys.last_traceback = sys.exc_info() + sys.last_type = type + sys.last_value = value + if filename and type is SyntaxError: + # Work hard to stuff the correct filename in the exception + try: + msg, (dummy_filename, lineno, offset, line) = value + except: + # Not the format we expect; leave it alone + pass + else: + # Stuff in the right filename + value = SyntaxError(msg, (filename, lineno, offset, line)) + sys.last_value = value + list = traceback.format_exception_only(type, value) + map(self.write, list) + + def showtraceback(self): + """Display the exception that just occurred. + + We remove the first stack item because it is our own code. + + The output is written by self.write(), below. + + """ + try: + type, value, tb = sys.exc_info() + sys.last_type = type + sys.last_value = value + sys.last_traceback = tb + tblist = traceback.extract_tb(tb) + del tblist[:1] + list = traceback.format_list(tblist) + if list: + list.insert(0, "Traceback (most recent call last):\n") + list[len(list):] = traceback.format_exception_only(type, value) + finally: + tblist = tb = None + map(self.write, list) + + def write(self, data): + """Write a string. + + The base implementation writes to sys.stderr; a subclass may + replace this with a different implementation. + + """ + sys.stderr.write(data) + + +class InteractiveConsole(InteractiveInterpreter): + """Closely emulate the behavior of the interactive Python interpreter. + + This class builds on InteractiveInterpreter and adds prompting + using the familiar sys.ps1 and sys.ps2, and input buffering. + + """ + + def __init__(self, locals=None, filename=""): + """Constructor. + + The optional locals argument will be passed to the + InteractiveInterpreter base class. + + The optional filename argument should specify the (file)name + of the input stream; it will show up in tracebacks. + + """ + InteractiveInterpreter.__init__(self, locals) + self.filename = filename + self.resetbuffer() + + def resetbuffer(self): + """Reset the input buffer.""" + self.buffer = [] + + def interact(self, banner=None): + """Closely emulate the interactive Python console. + + The optional banner argument specify the banner to print + before the first interaction; by default it prints a banner + similar to the one printed by the real Python interpreter, + followed by the current class name in parentheses (so as not + to confuse this with the real interpreter -- since it's so + close!). + + """ + try: + sys.ps1 #@UndefinedVariable + except AttributeError: + sys.ps1 = ">>> " + try: + sys.ps2 #@UndefinedVariable + except AttributeError: + sys.ps2 = "... " + cprt = 'Type "help", "copyright", "credits" or "license" for more information.' + if banner is None: + self.write("Python %s on %s\n%s\n(%s)\n" % + (sys.version, sys.platform, cprt, + self.__class__.__name__)) + else: + self.write("%s\n" % str(banner)) + more = 0 + while 1: + try: + if more: + prompt = sys.ps2 #@UndefinedVariable + else: + prompt = sys.ps1 #@UndefinedVariable + try: + line = self.raw_input(prompt) + # Can be None if sys.stdin was redefined + encoding = getattr(sys.stdin, "encoding", None) + if encoding and not isinstance(line, unicode): + line = line.decode(encoding) + except EOFError: + self.write("\n") + break + else: + more = self.push(line) + except KeyboardInterrupt: + self.write("\nKeyboardInterrupt\n") + self.resetbuffer() + more = 0 + + def push(self, line): + """Push a line to the interpreter. + + The line should not have a trailing newline; it may have + internal newlines. The line is appended to a buffer and the + interpreter's runsource() method is called with the + concatenated contents of the buffer as source. If this + indicates that the command was executed or invalid, the buffer + is reset; otherwise, the command is incomplete, and the buffer + is left as it was after the line was appended. The return + value is 1 if more input is required, 0 if the line was dealt + with in some way (this is the same as runsource()). + + """ + self.buffer.append(line) + source = "\n".join(self.buffer) + more = self.runsource(source, self.filename) + if not more: + self.resetbuffer() + return more + + def raw_input(self, prompt=""): + """Write a prompt and read a line. + + The returned line does not include the trailing newline. + When the user enters the EOF key sequence, EOFError is raised. + + The base implementation uses the built-in function + raw_input(); a subclass may replace this with a different + implementation. + + """ + return raw_input(prompt) + + +def interact(banner=None, readfunc=None, local=None): + """Closely emulate the interactive Python interpreter. + + This is a backwards compatible interface to the InteractiveConsole + class. When readfunc is not specified, it attempts to import the + readline module to enable GNU readline if it is available. + + Arguments (all optional, all default to None): + + banner -- passed to InteractiveConsole.interact() + readfunc -- if not None, replaces InteractiveConsole.raw_input() + local -- passed to InteractiveInterpreter.__init__() + + """ + console = InteractiveConsole(local) + if readfunc is not None: + console.raw_input = readfunc + else: + try: + import readline + except ImportError: + pass + console.interact(banner) + + +if __name__ == '__main__': + import pdb + pdb.run("interact()\n") diff --git a/ptvsd/pydevd/_pydevd_bundle/pydevd_additional_thread_info.py b/ptvsd/pydevd/_pydevd_bundle/pydevd_additional_thread_info.py new file mode 100644 index 00000000..1c67795a --- /dev/null +++ b/ptvsd/pydevd/_pydevd_bundle/pydevd_additional_thread_info.py @@ -0,0 +1,23 @@ +# Defines which version of the PyDBAdditionalThreadInfo we'll use. + +import os +use_cython = os.getenv('PYDEVD_USE_CYTHON', None) + +if use_cython == 'YES': + # We must import the cython version if forcing cython + from _pydevd_bundle.pydevd_cython_wrapper import PyDBAdditionalThreadInfo # @UnusedImport + +elif use_cython == 'NO': + # Use the regular version if not forcing cython + from _pydevd_bundle.pydevd_additional_thread_info_regular import PyDBAdditionalThreadInfo # @UnusedImport @Reimport + +elif use_cython is None: + # Regular: use fallback if not found (message is already given elsewhere). + try: + from _pydevd_bundle.pydevd_cython_wrapper import PyDBAdditionalThreadInfo + except ImportError: + from _pydevd_bundle.pydevd_additional_thread_info_regular import PyDBAdditionalThreadInfo # @UnusedImport +else: + raise RuntimeError('Unexpected value for PYDEVD_USE_CYTHON: %s (accepted: YES, NO)' % (use_cython,)) + + diff --git a/ptvsd/pydevd/_pydevd_bundle/pydevd_additional_thread_info_regular.py b/ptvsd/pydevd/_pydevd_bundle/pydevd_additional_thread_info_regular.py new file mode 100644 index 00000000..796e349b --- /dev/null +++ b/ptvsd/pydevd/_pydevd_bundle/pydevd_additional_thread_info_regular.py @@ -0,0 +1,124 @@ +import sys +from _pydevd_bundle.pydevd_constants import STATE_RUN, PYTHON_SUSPEND, IS_JYTHON, IS_IRONPYTHON +# IFDEF CYTHON +# ELSE +from _pydevd_bundle.pydevd_frame import PyDBFrame +# ENDIF + +version = 4 + +if not hasattr(sys, '_current_frames'): + + # Some versions of Jython don't have it (but we can provide a replacement) + if IS_JYTHON: + from java.lang import NoSuchFieldException + from org.python.core import ThreadStateMapping + try: + cachedThreadState = ThreadStateMapping.getDeclaredField('globalThreadStates') # Dev version + except NoSuchFieldException: + cachedThreadState = ThreadStateMapping.getDeclaredField('cachedThreadState') # Release Jython 2.7.0 + cachedThreadState.accessible = True + thread_states = cachedThreadState.get(ThreadStateMapping) + + def _current_frames(): + as_array = thread_states.entrySet().toArray() + ret = {} + for thread_to_state in as_array: + thread = thread_to_state.getKey() + if thread is None: + continue + thread_state = thread_to_state.getValue() + if thread_state is None: + continue + + frame = thread_state.frame + if frame is None: + continue + + ret[thread.getId()] = frame + return ret + + elif IS_IRONPYTHON: + _tid_to_last_frame = {} + + # IronPython doesn't have it. Let's use our workaround... + def _current_frames(): + return _tid_to_last_frame + + else: + raise RuntimeError('Unable to proceed (sys._current_frames not available in this Python implementation).') +else: + _current_frames = sys._current_frames + +#======================================================================================================================= +# PyDBAdditionalThreadInfo +#======================================================================================================================= +# IFDEF CYTHON +# cdef class PyDBAdditionalThreadInfo: +# ELSE +class PyDBAdditionalThreadInfo(object): +# ENDIF + + # IFDEF CYTHON + # cdef public int pydev_state; + # cdef public object pydev_step_stop; # Actually, it's a frame or None + # cdef public int pydev_step_cmd; + # cdef public bint pydev_notify_kill; + # cdef public object pydev_smart_step_stop; # Actually, it's a frame or None + # cdef public bint pydev_django_resolve_frame; + # cdef public object pydev_call_from_jinja2; + # cdef public object pydev_call_inside_jinja2; + # cdef public bint is_tracing; + # cdef public tuple conditional_breakpoint_exception; + # cdef public str pydev_message; + # cdef public int suspend_type; + # cdef public int pydev_next_line; + # cdef public str pydev_func_name; + # ELSE + __slots__ = [ + 'pydev_state', + 'pydev_step_stop', + 'pydev_step_cmd', + 'pydev_notify_kill', + 'pydev_smart_step_stop', + 'pydev_django_resolve_frame', + 'pydev_call_from_jinja2', + 'pydev_call_inside_jinja2', + 'is_tracing', + 'conditional_breakpoint_exception', + 'pydev_message', + 'suspend_type', + 'pydev_next_line', + 'pydev_func_name', + ] + # ENDIF + + def __init__(self): + self.pydev_state = STATE_RUN + self.pydev_step_stop = None + self.pydev_step_cmd = -1 # Something as CMD_STEP_INTO, CMD_STEP_OVER, etc. + self.pydev_notify_kill = False + self.pydev_smart_step_stop = None + self.pydev_django_resolve_frame = False + self.pydev_call_from_jinja2 = None + self.pydev_call_inside_jinja2 = None + self.is_tracing = False + self.conditional_breakpoint_exception = None + self.pydev_message = '' + self.suspend_type = PYTHON_SUSPEND + self.pydev_next_line = -1 + self.pydev_func_name = '.invalid.' # Must match the type in cython + + + def iter_frames(self, t): + #sys._current_frames(): dictionary with thread id -> topmost frame + current_frames = _current_frames() + v = current_frames.get(t.ident) + if v is not None: + return [v] + return [] + + def __str__(self): + return 'State:%s Stop:%s Cmd: %s Kill:%s' % ( + self.pydev_state, self.pydev_step_stop, self.pydev_step_cmd, self.pydev_notify_kill) + diff --git a/ptvsd/pydevd/_pydevd_bundle/pydevd_breakpoints.py b/ptvsd/pydevd/_pydevd_bundle/pydevd_breakpoints.py new file mode 100644 index 00000000..2fbdf76b --- /dev/null +++ b/ptvsd/pydevd/_pydevd_bundle/pydevd_breakpoints.py @@ -0,0 +1,185 @@ +from _pydevd_bundle.pydevd_constants import dict_iter_values, IS_PY24 +import pydevd_tracing +import sys +from _pydev_bundle import pydev_log +from _pydevd_bundle import pydevd_import_class + +_original_excepthook = None +_handle_exceptions = None + + +from _pydev_imps._pydev_saved_modules import threading + +threadingCurrentThread = threading.currentThread + +from _pydevd_bundle.pydevd_comm import get_global_debugger + +class ExceptionBreakpoint: + + def __init__( + self, + qname, + notify_always, + notify_on_terminate, + notify_on_first_raise_only, + ignore_libraries + ): + exctype = _get_class(qname) + self.qname = qname + if exctype is not None: + self.name = exctype.__name__ + else: + self.name = None + + self.notify_on_terminate = notify_on_terminate + self.notify_always = notify_always + self.notify_on_first_raise_only = notify_on_first_raise_only + self.ignore_libraries = ignore_libraries + + self.type = exctype + + + def __str__(self): + return self.qname + + +class LineBreakpoint(object): + def __init__(self, line, condition, func_name, expression, suspend_policy="NONE"): + self.line = line + self.condition = condition + self.func_name = func_name + self.expression = expression + self.suspend_policy = suspend_policy + # need for frame evaluation: list of code objects, which bytecode was modified by this breakpoint + self.code_objects = set() + +def get_exception_full_qname(exctype): + if not exctype: + return None + return str(exctype.__module__) + '.' + exctype.__name__ + +def get_exception_name(exctype): + if not exctype: + return None + return exctype.__name__ + + +def get_exception_breakpoint(exctype, exceptions): + exception_full_qname = get_exception_full_qname(exctype) + + exc = None + if exceptions is not None: + try: + return exceptions[exception_full_qname] + except KeyError: + for exception_breakpoint in dict_iter_values(exceptions): + if exception_breakpoint.type is not None and issubclass(exctype, exception_breakpoint.type): + if exc is None or issubclass(exception_breakpoint.type, exc.type): + exc = exception_breakpoint + return exc + + +def _set_additional_info_if_needed(thread): + try: + additional_info = thread.additional_info + if additional_info is None: + raise AttributeError() + except: + from _pydevd_bundle.pydevd_additional_thread_info import PyDBAdditionalThreadInfo + thread.additional_info = PyDBAdditionalThreadInfo() + + +#======================================================================================================================= +# _excepthook +#======================================================================================================================= +def _excepthook(exctype, value, tb): + global _handle_exceptions + if _handle_exceptions: + exception_breakpoint = get_exception_breakpoint(exctype, _handle_exceptions) + else: + exception_breakpoint = None + + #Always call the original excepthook before going on to call the debugger post mortem to show it. + _original_excepthook(exctype, value, tb) + + if not exception_breakpoint: + return + + if tb is None: #sometimes it can be None, e.g. with GTK + return + + if exctype is KeyboardInterrupt: + return + + frames = [] + debugger = get_global_debugger() + user_frame = None + + while tb: + frame = tb.tb_frame + if exception_breakpoint.ignore_libraries and not debugger.not_in_scope(frame.f_code.co_filename): + user_frame = tb.tb_frame + frames.append(tb.tb_frame) + tb = tb.tb_next + + thread = threadingCurrentThread() + frames_byid = dict([(id(frame),frame) for frame in frames]) + if exception_breakpoint.ignore_libraries and user_frame is not None: + frame = user_frame + else: + frame = frames[-1] + exception = (exctype, value, tb) + _set_additional_info_if_needed(thread) + try: + thread.additional_info.pydev_message = exception_breakpoint.qname + except: + thread.additional_info.pydev_message = exception_breakpoint.qname.encode('utf-8') + + pydevd_tracing.SetTrace(None) #no tracing from here + + pydev_log.debug('Handling post-mortem stop on exception breakpoint %s' % exception_breakpoint.qname) + + debugger.handle_post_mortem_stop(thread, frame, frames_byid, exception) + +#======================================================================================================================= +# _set_pm_excepthook +#======================================================================================================================= +def _set_pm_excepthook(handle_exceptions_dict=None): + ''' + Should be called to register the excepthook to be used. + + It's only useful for uncaught exceptions. I.e.: exceptions that go up to the excepthook. + + @param handle_exceptions: dict(exception -> ExceptionBreakpoint) + The exceptions that should be handled. + ''' + global _handle_exceptions + global _original_excepthook + if sys.excepthook != _excepthook: + #Only keep the original if it's not our own _excepthook (if called many times). + _original_excepthook = sys.excepthook + + _handle_exceptions = handle_exceptions_dict + sys.excepthook = _excepthook + +def _restore_pm_excepthook(): + global _original_excepthook + if _original_excepthook: + sys.excepthook = _original_excepthook + _original_excepthook = None + + +def update_exception_hook(dbg): + if dbg.break_on_uncaught_exceptions: + _set_pm_excepthook(dbg.break_on_uncaught_exceptions) + else: + _restore_pm_excepthook() + +def _get_class( kls ): + if IS_PY24 and "BaseException" == kls: + kls = "Exception" + + try: + return eval(kls) + except: + return pydevd_import_class.import_name(kls) diff --git a/ptvsd/pydevd/_pydevd_bundle/pydevd_comm.py b/ptvsd/pydevd/_pydevd_bundle/pydevd_comm.py new file mode 100644 index 00000000..3313797a --- /dev/null +++ b/ptvsd/pydevd/_pydevd_bundle/pydevd_comm.py @@ -0,0 +1,1441 @@ +''' pydevd - a debugging daemon +This is the daemon you launch for python remote debugging. + +Protocol: +each command has a format: + id\tsequence-num\ttext + id: protocol command number + sequence-num: each request has a sequence number. Sequence numbers + originating at the debugger are odd, sequence numbers originating + at the daemon are even. Every response uses the same sequence number + as the request. + payload: it is protocol dependent. When response is a complex structure, it + is returned as XML. Each attribute value is urlencoded, and then the whole + payload is urlencoded again to prevent stray characters corrupting protocol/xml encodings + + Commands: + + NUMBER NAME FROM* ARGUMENTS RESPONSE NOTE +100 series: program execution + 101 RUN JAVA - - + 102 LIST_THREADS JAVA RETURN with XML listing of all threads + 103 THREAD_CREATE PYDB - XML with thread information + 104 THREAD_KILL JAVA id (or * to exit) kills the thread + PYDB id nofies JAVA that thread was killed + 105 THREAD_SUSPEND JAVA XML of the stack, suspends the thread + reason for suspension + PYDB id notifies JAVA that thread was suspended + + 106 CMD_THREAD_RUN JAVA id resume the thread + PYDB id \t reason notifies JAVA that thread was resumed + + 107 STEP_INTO JAVA thread_id + 108 STEP_OVER JAVA thread_id + 109 STEP_RETURN JAVA thread_id + + 110 GET_VARIABLE JAVA thread_id \t frame_id \t GET_VARIABLE with XML of var content + FRAME|GLOBAL \t attributes* + + 111 SET_BREAK JAVA file/line of the breakpoint + 112 REMOVE_BREAK JAVA file/line of the return + 113 CMD_EVALUATE_EXPRESSION JAVA expression result of evaluating the expression + 114 CMD_GET_FRAME JAVA request for frame contents + 115 CMD_EXEC_EXPRESSION JAVA + 116 CMD_WRITE_TO_CONSOLE PYDB + 117 CMD_CHANGE_VARIABLE + 118 CMD_RUN_TO_LINE + 119 CMD_RELOAD_CODE + 120 CMD_GET_COMPLETIONS JAVA + +500 series diagnostics/ok + 501 VERSION either Version string (1.0) Currently just used at startup + 502 RETURN either Depends on caller - + +900 series: errors + 901 ERROR either - This is reserved for unexpected errors. + + * JAVA - remote debugger, the java end + * PYDB - pydevd, the python end +''' + +import os + +from _pydev_bundle.pydev_imports import _queue +from _pydev_imps._pydev_saved_modules import time +from _pydev_imps._pydev_saved_modules import thread +from _pydev_imps._pydev_saved_modules import threading +from _pydev_imps._pydev_saved_modules import socket +from socket import socket, AF_INET, SOCK_STREAM, SHUT_RD, SHUT_WR, SOL_SOCKET, SO_REUSEADDR, SHUT_RDWR, timeout +from _pydevd_bundle.pydevd_constants import DebugInfoHolder, get_thread_id, IS_JYTHON, IS_PY2, IS_PY3K, STATE_RUN,\ + dict_keys + +try: + from urllib import quote_plus, unquote, unquote_plus +except: + from urllib.parse import quote_plus, unquote, unquote_plus #@Reimport @UnresolvedImport +import pydevconsole +from _pydevd_bundle import pydevd_vars +import pydevd_tracing +from _pydevd_bundle import pydevd_xml +from _pydevd_bundle import pydevd_vm_type +from pydevd_file_utils import get_abs_path_real_path_and_base_from_frame, NORM_PATHS_AND_BASE_CONTAINER, norm_file_to_client +import sys +import traceback +from _pydevd_bundle.pydevd_utils import quote_smart as quote, compare_object_attrs_key, to_string +from _pydev_bundle import pydev_log +from _pydev_bundle import _pydev_completer + +from pydevd_tracing import get_exception_traceback_str +from _pydevd_bundle import pydevd_console +from _pydev_bundle.pydev_monkey import disable_trace_thread_modules, enable_trace_thread_modules +from _pydev_bundle.pydev_is_thread_alive import is_thread_alive + + +CMD_RUN = 101 +CMD_LIST_THREADS = 102 +CMD_THREAD_CREATE = 103 +CMD_THREAD_KILL = 104 +CMD_THREAD_SUSPEND = 105 +CMD_THREAD_RUN = 106 +CMD_STEP_INTO = 107 +CMD_STEP_OVER = 108 +CMD_STEP_RETURN = 109 +CMD_GET_VARIABLE = 110 +CMD_SET_BREAK = 111 +CMD_REMOVE_BREAK = 112 +CMD_EVALUATE_EXPRESSION = 113 +CMD_GET_FRAME = 114 +CMD_EXEC_EXPRESSION = 115 +CMD_WRITE_TO_CONSOLE = 116 +CMD_CHANGE_VARIABLE = 117 +CMD_RUN_TO_LINE = 118 +CMD_RELOAD_CODE = 119 +CMD_GET_COMPLETIONS = 120 + +# Note: renumbered (conflicted on merge) +CMD_CONSOLE_EXEC = 121 +CMD_ADD_EXCEPTION_BREAK = 122 +CMD_REMOVE_EXCEPTION_BREAK = 123 +CMD_LOAD_SOURCE = 124 +CMD_ADD_DJANGO_EXCEPTION_BREAK = 125 +CMD_REMOVE_DJANGO_EXCEPTION_BREAK = 126 +CMD_SET_NEXT_STATEMENT = 127 +CMD_SMART_STEP_INTO = 128 +CMD_EXIT = 129 +CMD_SIGNATURE_CALL_TRACE = 130 + +CMD_SET_PY_EXCEPTION = 131 +CMD_GET_FILE_CONTENTS = 132 +CMD_SET_PROPERTY_TRACE = 133 +# Pydev debug console commands +CMD_EVALUATE_CONSOLE_EXPRESSION = 134 +CMD_RUN_CUSTOM_OPERATION = 135 +CMD_GET_BREAKPOINT_EXCEPTION = 136 +CMD_STEP_CAUGHT_EXCEPTION = 137 +CMD_SEND_CURR_EXCEPTION_TRACE = 138 +CMD_SEND_CURR_EXCEPTION_TRACE_PROCEEDED = 139 +CMD_IGNORE_THROWN_EXCEPTION_AT = 140 +CMD_ENABLE_DONT_TRACE = 141 +CMD_SHOW_CONSOLE = 142 + +CMD_GET_ARRAY = 143 +CMD_STEP_INTO_MY_CODE = 144 +CMD_GET_CONCURRENCY_EVENT = 145 +CMD_SHOW_RETURN_VALUES = 146 +CMD_INPUT_REQUESTED = 147 +CMD_GET_DESCRIPTION = 148 + +CMD_PROCESS_CREATED = 149 + +CMD_VERSION = 501 +CMD_RETURN = 502 +CMD_ERROR = 901 + +ID_TO_MEANING = { + '101': 'CMD_RUN', + '102': 'CMD_LIST_THREADS', + '103': 'CMD_THREAD_CREATE', + '104': 'CMD_THREAD_KILL', + '105': 'CMD_THREAD_SUSPEND', + '106': 'CMD_THREAD_RUN', + '107': 'CMD_STEP_INTO', + '108': 'CMD_STEP_OVER', + '109': 'CMD_STEP_RETURN', + '110': 'CMD_GET_VARIABLE', + '111': 'CMD_SET_BREAK', + '112': 'CMD_REMOVE_BREAK', + '113': 'CMD_EVALUATE_EXPRESSION', + '114': 'CMD_GET_FRAME', + '115': 'CMD_EXEC_EXPRESSION', + '116': 'CMD_WRITE_TO_CONSOLE', + '117': 'CMD_CHANGE_VARIABLE', + '118': 'CMD_RUN_TO_LINE', + '119': 'CMD_RELOAD_CODE', + '120': 'CMD_GET_COMPLETIONS', + '121': 'CMD_CONSOLE_EXEC', + '122': 'CMD_ADD_EXCEPTION_BREAK', + '123': 'CMD_REMOVE_EXCEPTION_BREAK', + '124': 'CMD_LOAD_SOURCE', + '125': 'CMD_ADD_DJANGO_EXCEPTION_BREAK', + '126': 'CMD_REMOVE_DJANGO_EXCEPTION_BREAK', + '127': 'CMD_SET_NEXT_STATEMENT', + '128': 'CMD_SMART_STEP_INTO', + '129': 'CMD_EXIT', + '130': 'CMD_SIGNATURE_CALL_TRACE', + + '131': 'CMD_SET_PY_EXCEPTION', + '132': 'CMD_GET_FILE_CONTENTS', + '133': 'CMD_SET_PROPERTY_TRACE', + '134': 'CMD_EVALUATE_CONSOLE_EXPRESSION', + '135': 'CMD_RUN_CUSTOM_OPERATION', + '136': 'CMD_GET_BREAKPOINT_EXCEPTION', + '137': 'CMD_STEP_CAUGHT_EXCEPTION', + '138': 'CMD_SEND_CURR_EXCEPTION_TRACE', + '139': 'CMD_SEND_CURR_EXCEPTION_TRACE_PROCEEDED', + '140': 'CMD_IGNORE_THROWN_EXCEPTION_AT', + '141': 'CMD_ENABLE_DONT_TRACE', + '142': 'CMD_SHOW_CONSOLE', + '143': 'CMD_GET_ARRAY', + '144': 'CMD_STEP_INTO_MY_CODE', + '145': 'CMD_GET_CONCURRENCY_EVENT', + '146': 'CMD_SHOW_RETURN_VALUES', + '147': 'CMD_INPUT_REQUESTED', + '148': 'CMD_GET_DESCRIPTION', + + '149': 'CMD_PROCESS_CREATED', + + '501': 'CMD_VERSION', + '502': 'CMD_RETURN', + '901': 'CMD_ERROR', + } + +MAX_IO_MSG_SIZE = 1000 #if the io is too big, we'll not send all (could make the debugger too non-responsive) +#this number can be changed if there's need to do so + +VERSION_STRING = "@@BUILD_NUMBER@@" + +from _pydev_bundle._pydev_filesystem_encoding import getfilesystemencoding +file_system_encoding = getfilesystemencoding() + +#--------------------------------------------------------------------------------------------------- UTILITIES + +#======================================================================================================================= +# pydevd_log +#======================================================================================================================= +def pydevd_log(level, *args): + """ levels are: + 0 most serious warnings/errors + 1 warnings/significant events + 2 informational trace + """ + if level <= DebugInfoHolder.DEBUG_TRACE_LEVEL: + #yes, we can have errors printing if the console of the program has been finished (and we're still trying to print something) + try: + sys.stderr.write('%s\n' % (args,)) + except: + pass + +#======================================================================================================================= +# GlobalDebuggerHolder +#======================================================================================================================= +class GlobalDebuggerHolder: + ''' + Holder for the global debugger. + ''' + global_dbg = None # Note: don't rename (the name is used in our attach to process) + +#======================================================================================================================= +# get_global_debugger +#======================================================================================================================= +def get_global_debugger(): + return GlobalDebuggerHolder.global_dbg + +GetGlobalDebugger = get_global_debugger # Backward-compatibility + +#======================================================================================================================= +# set_global_debugger +#======================================================================================================================= +def set_global_debugger(dbg): + GlobalDebuggerHolder.global_dbg = dbg + + +#------------------------------------------------------------------- ACTUAL COMM + +#======================================================================================================================= +# PyDBDaemonThread +#======================================================================================================================= +class PyDBDaemonThread(threading.Thread): + created_pydb_daemon_threads = {} + + def __init__(self): + threading.Thread.__init__(self) + self.setDaemon(True) + self.killReceived = False + self.pydev_do_not_trace = True + self.is_pydev_daemon_thread = True + + def run(self): + created_pydb_daemon = self.created_pydb_daemon_threads + created_pydb_daemon[self] = 1 + try: + try: + if IS_JYTHON and not isinstance(threading.currentThread(), threading._MainThread): + # we shouldn't update sys.modules for the main thread, cause it leads to the second importing 'threading' + # module, and the new instance of main thread is created + import org.python.core as PyCore #@UnresolvedImport + ss = PyCore.PySystemState() + # Note: Py.setSystemState() affects only the current thread. + PyCore.Py.setSystemState(ss) + + self._on_run() + except: + if sys is not None and traceback is not None: + traceback.print_exc() + finally: + del created_pydb_daemon[self] + + def _on_run(self): + raise NotImplementedError('Should be reimplemented by: %s' % self.__class__) + + def do_kill_pydev_thread(self): + #that was not working very well because jython gave some socket errors + self.killReceived = True + + def _stop_trace(self): + if self.pydev_do_not_trace: + + disable_tracing = True + + if pydevd_vm_type.get_vm_type() == pydevd_vm_type.PydevdVmType.JYTHON and sys.hexversion <= 0x020201f0: + # don't run untraced threads if we're in jython 2.2.1 or lower + # jython bug: if we start a thread and another thread changes the tracing facility + # it affects other threads (it's not set only for the thread but globally) + # Bug: http://sourceforge.net/tracker/index.php?func=detail&aid=1870039&group_id=12867&atid=112867 + disable_tracing = False + + if disable_tracing: + pydevd_tracing.SetTrace(None) # no debugging on this thread + + +#======================================================================================================================= +# ReaderThread +#======================================================================================================================= +class ReaderThread(PyDBDaemonThread): + """ reader thread reads and dispatches commands in an infinite loop """ + + def __init__(self, sock): + PyDBDaemonThread.__init__(self) + self.sock = sock + self.setName("pydevd.Reader") + from _pydevd_bundle.pydevd_process_net_command import process_net_command + self.process_net_command = process_net_command + self.global_debugger_holder = GlobalDebuggerHolder + + + + def do_kill_pydev_thread(self): + #We must close the socket so that it doesn't stay halted there. + self.killReceived = True + try: + self.sock.shutdown(SHUT_RD) #shutdown the socket for read + except: + #just ignore that + pass + + def _on_run(self): + self._stop_trace() + read_buffer = "" + try: + + while not self.killReceived: + try: + r = self.sock.recv(1024) + except: + if not self.killReceived: + traceback.print_exc() + self.handle_except() + return #Finished communication. + + #Note: the java backend is always expected to pass utf-8 encoded strings. We now work with unicode + #internally and thus, we may need to convert to the actual encoding where needed (i.e.: filenames + #on python 2 may need to be converted to the filesystem encoding). + if hasattr(r, 'decode'): + r = r.decode('utf-8') + + read_buffer += r + if DebugInfoHolder.DEBUG_RECORD_SOCKET_READS: + sys.stderr.write('debugger: received >>%s<<\n' % (read_buffer,)) + sys.stderr.flush() + + if len(read_buffer) == 0: + self.handle_except() + break + while read_buffer.find('\n') != -1: + command, read_buffer = read_buffer.split('\n', 1) + + args = command.split('\t', 2) + try: + cmd_id = int(args[0]) + pydev_log.debug('Received command: %s %s\n' % (ID_TO_MEANING.get(str(cmd_id), '???'), command,)) + self.process_command(cmd_id, int(args[1]), args[2]) + except: + traceback.print_exc() + sys.stderr.write("Can't process net command: %s\n" % command) + sys.stderr.flush() + + except: + traceback.print_exc() + self.handle_except() + + + def handle_except(self): + self.global_debugger_holder.global_dbg.finish_debugging_session() + + def process_command(self, cmd_id, seq, text): + self.process_net_command(self.global_debugger_holder.global_dbg, cmd_id, seq, text) + + +#----------------------------------------------------------------------------------- SOCKET UTILITIES - WRITER +#======================================================================================================================= +# WriterThread +#======================================================================================================================= +class WriterThread(PyDBDaemonThread): + """ writer thread writes out the commands in an infinite loop """ + def __init__(self, sock): + PyDBDaemonThread.__init__(self) + self.sock = sock + self.setName("pydevd.Writer") + self.cmdQueue = _queue.Queue() + if pydevd_vm_type.get_vm_type() == 'python': + self.timeout = 0 + else: + self.timeout = 0.1 + + def add_command(self, cmd): + """ cmd is NetCommand """ + if not self.killReceived: #we don't take new data after everybody die + self.cmdQueue.put(cmd) + + def _on_run(self): + """ just loop and write responses """ + + self._stop_trace() + get_has_timeout = sys.hexversion >= 0x02030000 # 2.3 onwards have it. + try: + while True: + try: + try: + if get_has_timeout: + cmd = self.cmdQueue.get(1, 0.1) + else: + time.sleep(.01) + cmd = self.cmdQueue.get(0) + except _queue.Empty: + if self.killReceived: + try: + self.sock.shutdown(SHUT_WR) + self.sock.close() + except: + pass + + return #break if queue is empty and killReceived + else: + continue + except: + #pydevd_log(0, 'Finishing debug communication...(1)') + #when liberating the thread here, we could have errors because we were shutting down + #but the thread was still not liberated + return + out = cmd.outgoing + + if DebugInfoHolder.DEBUG_TRACE_LEVEL >= 1: + out_message = 'sending cmd --> ' + out_message += "%20s" % ID_TO_MEANING.get(out[:3], 'UNKNOWN') + out_message += ' ' + out_message += unquote(unquote(out)).replace('\n', ' ') + try: + sys.stderr.write('%s\n' % (out_message,)) + except: + pass + + if IS_PY3K: + out = bytearray(out, 'utf-8') + self.sock.send(out) #TODO: this does not guarantee that all message are sent (and jython does not have a send all) + if cmd.id == CMD_EXIT: + break + if time is None: + break #interpreter shutdown + time.sleep(self.timeout) + except Exception: + GlobalDebuggerHolder.global_dbg.finish_debugging_session() + if DebugInfoHolder.DEBUG_TRACE_LEVEL >= 0: + traceback.print_exc() + + def empty(self): + return self.cmdQueue.empty() + + + +#--------------------------------------------------- CREATING THE SOCKET THREADS + +#======================================================================================================================= +# start_server +#======================================================================================================================= +def start_server(port): + """ binds to a port, waits for the debugger to connect """ + s = socket(AF_INET, SOCK_STREAM) + s.settimeout(None) + + try: + from socket import SO_REUSEPORT + s.setsockopt(SOL_SOCKET, SO_REUSEPORT, 1) + except ImportError: + s.setsockopt(SOL_SOCKET, SO_REUSEADDR, 1) + + s.bind(('', port)) + pydevd_log(1, "Bound to port ", str(port)) + + try: + s.listen(1) + newSock, _addr = s.accept() + pydevd_log(1, "Connection accepted") + # closing server socket is not necessary but we don't need it + s.shutdown(SHUT_RDWR) + s.close() + return newSock + + except: + sys.stderr.write("Could not bind to port: %s\n" % (port,)) + sys.stderr.flush() + traceback.print_exc() + +#======================================================================================================================= +# start_client +#======================================================================================================================= +def start_client(host, port): + """ connects to a host/port """ + pydevd_log(1, "Connecting to ", host, ":", str(port)) + + s = socket(AF_INET, SOCK_STREAM) + + MAX_TRIES = 100 + i = 0 + while i_=" \t') + self.outgoing = '%s\t%s\t%s\n' % (id, seq, encoded) + +#======================================================================================================================= +# NetCommandFactory +#======================================================================================================================= +class NetCommandFactory: + + def _thread_to_xml(self, thread): + """ thread information as XML """ + name = pydevd_xml.make_valid_xml_value(thread.getName()) + cmdText = '' % (quote(name), get_thread_id(thread)) + return cmdText + + def make_error_message(self, seq, text): + cmd = NetCommand(CMD_ERROR, seq, text) + if DebugInfoHolder.DEBUG_TRACE_LEVEL > 2: + sys.stderr.write("Error: %s" % (text,)) + return cmd + + def make_thread_created_message(self, thread): + cmdText = "" + self._thread_to_xml(thread) + "" + return NetCommand(CMD_THREAD_CREATE, 0, cmdText) + + def make_process_created_message(self): + cmdText = '' + return NetCommand(CMD_PROCESS_CREATED, 0, cmdText) + + def make_custom_frame_created_message(self, frameId, frameDescription): + frameDescription = pydevd_xml.make_valid_xml_value(frameDescription) + cmdText = '' % (frameDescription, frameId) + return NetCommand(CMD_THREAD_CREATE, 0, cmdText) + + + def make_list_threads_message(self, seq): + """ returns thread listing as XML """ + try: + t = threading.enumerate() + cmd_text = [""] + append = cmd_text.append + for i in t: + if is_thread_alive(i): + append(self._thread_to_xml(i)) + append("") + return NetCommand(CMD_RETURN, seq, ''.join(cmd_text)) + except: + return self.make_error_message(seq, get_exception_traceback_str()) + + def make_variable_changed_message(self, seq, payload): + # notify debugger that value was changed successfully + return NetCommand(CMD_RETURN, seq, payload) + + def make_io_message(self, v, ctx, dbg=None): + ''' + @param v: the message to pass to the debug server + @param ctx: 1 for stdio 2 for stderr + @param dbg: If not none, add to the writer + ''' + + try: + if len(v) > MAX_IO_MSG_SIZE: + v = v[0:MAX_IO_MSG_SIZE] + v += '...' + + v = pydevd_xml.make_valid_xml_value(quote(v, '/>_= \t')) + net = NetCommand(str(CMD_WRITE_TO_CONSOLE), 0, '' % (v, ctx)) + except: + net = self.make_error_message(0, get_exception_traceback_str()) + + if dbg: + dbg.writer.add_command(net) + + return net + + def make_version_message(self, seq): + try: + return NetCommand(CMD_VERSION, seq, VERSION_STRING) + except: + return self.make_error_message(seq, get_exception_traceback_str()) + + def make_thread_killed_message(self, id): + try: + return NetCommand(CMD_THREAD_KILL, 0, str(id)) + except: + return self.make_error_message(0, get_exception_traceback_str()) + + def make_thread_suspend_str(self, thread_id, frame, stop_reason, message, suspend_type="trace"): + """ + + + + + """ + cmd_text_list = [""] + append = cmd_text_list.append + make_valid_xml_value = pydevd_xml.make_valid_xml_value + + if message: + message = make_valid_xml_value(message) + + append('' % (thread_id, stop_reason, message, suspend_type)) + + curr_frame = frame + try: + while curr_frame: + #print cmdText + my_id = id(curr_frame) + #print "id is ", my_id + + if curr_frame.f_code is None: + break #Iron Python sometimes does not have it! + + my_name = curr_frame.f_code.co_name #method name (if in method) or ? if global + if my_name is None: + break #Iron Python sometimes does not have it! + + #print "name is ", my_name + + abs_path_real_path_and_base = get_abs_path_real_path_and_base_from_frame(curr_frame) + + myFile = norm_file_to_client(abs_path_real_path_and_base[0]) + if file_system_encoding.lower() != "utf-8" and hasattr(myFile, "decode"): + # myFile is a byte string encoded using the file system encoding + # convert it to utf8 + myFile = myFile.decode(file_system_encoding).encode("utf-8") + + #print "file is ", myFile + #myFile = inspect.getsourcefile(curr_frame) or inspect.getfile(frame) + + myLine = str(curr_frame.f_lineno) + #print "line is ", myLine + + #the variables are all gotten 'on-demand' + #variables = pydevd_xml.frame_vars_to_xml(curr_frame.f_locals) + + variables = '' + append('' % (quote(myFile, '/>_= \t'), myLine)) + append(variables) + append("") + curr_frame = curr_frame.f_back + except : + traceback.print_exc() + + append("") + return ''.join(cmd_text_list) + + def make_thread_suspend_message(self, thread_id, frame, stop_reason, message, suspend_type): + try: + return NetCommand(CMD_THREAD_SUSPEND, 0, self.make_thread_suspend_str(thread_id, frame, stop_reason, message, suspend_type)) + except: + return self.make_error_message(0, get_exception_traceback_str()) + + def make_thread_run_message(self, id, reason): + try: + return NetCommand(CMD_THREAD_RUN, 0, str(id) + "\t" + str(reason)) + except: + return self.make_error_message(0, get_exception_traceback_str()) + + def make_get_variable_message(self, seq, payload): + try: + return NetCommand(CMD_GET_VARIABLE, seq, payload) + except Exception: + return self.make_error_message(seq, get_exception_traceback_str()) + + + def make_get_array_message(self, seq, payload): + try: + return NetCommand(CMD_GET_ARRAY, seq, payload) + except Exception: + return self.make_error_message(seq, get_exception_traceback_str()) + + def make_get_description_message(self, seq, payload): + try: + return NetCommand(CMD_GET_DESCRIPTION, seq, payload) + except Exception: + return self.make_error_message(seq, get_exception_traceback_str()) + + def make_get_frame_message(self, seq, payload): + try: + return NetCommand(CMD_GET_FRAME, seq, payload) + except Exception: + return self.make_error_message(seq, get_exception_traceback_str()) + + + def make_evaluate_expression_message(self, seq, payload): + try: + return NetCommand(CMD_EVALUATE_EXPRESSION, seq, payload) + except Exception: + return self.make_error_message(seq, get_exception_traceback_str()) + + def make_get_completions_message(self, seq, payload): + try: + return NetCommand(CMD_GET_COMPLETIONS, seq, payload) + except Exception: + return self.make_error_message(seq, get_exception_traceback_str()) + + def make_get_file_contents(self, seq, payload): + try: + return NetCommand(CMD_GET_FILE_CONTENTS, seq, payload) + except Exception: + return self.make_error_message(seq, get_exception_traceback_str()) + + def make_send_breakpoint_exception_message(self, seq, payload): + try: + return NetCommand(CMD_GET_BREAKPOINT_EXCEPTION, seq, payload) + except Exception: + return self.make_error_message(seq, get_exception_traceback_str()) + + def make_send_curr_exception_trace_message(self, seq, thread_id, curr_frame_id, exc_type, exc_desc, trace_obj): + try: + while trace_obj.tb_next is not None: + trace_obj = trace_obj.tb_next + + exc_type = pydevd_xml.make_valid_xml_value(str(exc_type)).replace('\t', ' ') or 'exception: type unknown' + exc_desc = pydevd_xml.make_valid_xml_value(str(exc_desc)).replace('\t', ' ') or 'exception: no description' + + payload = str(curr_frame_id) + '\t' + exc_type + "\t" + exc_desc + "\t" + \ + self.make_thread_suspend_str(thread_id, trace_obj.tb_frame, CMD_SEND_CURR_EXCEPTION_TRACE, '') + + return NetCommand(CMD_SEND_CURR_EXCEPTION_TRACE, seq, payload) + except Exception: + return self.make_error_message(seq, get_exception_traceback_str()) + + def make_send_curr_exception_trace_proceeded_message(self, seq, thread_id): + try: + return NetCommand(CMD_SEND_CURR_EXCEPTION_TRACE_PROCEEDED, 0, str(thread_id)) + except: + return self.make_error_message(0, get_exception_traceback_str()) + + def make_send_console_message(self, seq, payload): + try: + return NetCommand(CMD_EVALUATE_CONSOLE_EXPRESSION, seq, payload) + except Exception: + return self.make_error_message(seq, get_exception_traceback_str()) + + def make_custom_operation_message(self, seq, payload): + try: + return NetCommand(CMD_RUN_CUSTOM_OPERATION, seq, payload) + except Exception: + return self.make_error_message(seq, get_exception_traceback_str()) + + def make_load_source_message(self, seq, source, dbg=None): + try: + net = NetCommand(CMD_LOAD_SOURCE, seq, '%s' % source) + + except: + net = self.make_error_message(0, get_exception_traceback_str()) + + if dbg: + dbg.writer.add_command(net) + return net + + def make_show_console_message(self, thread_id, frame): + try: + return NetCommand(CMD_SHOW_CONSOLE, 0, self.make_thread_suspend_str(thread_id, frame, CMD_SHOW_CONSOLE, '')) + except: + return self.make_error_message(0, get_exception_traceback_str()) + + def make_input_requested_message(self, started): + try: + return NetCommand(CMD_INPUT_REQUESTED, 0, started) + except: + return self.make_error_message(0, get_exception_traceback_str()) + + + def make_exit_message(self): + try: + net = NetCommand(CMD_EXIT, 0, '') + + except: + net = self.make_error_message(0, get_exception_traceback_str()) + + return net + +INTERNAL_TERMINATE_THREAD = 1 +INTERNAL_SUSPEND_THREAD = 2 + + +#======================================================================================================================= +# InternalThreadCommand +#======================================================================================================================= +class InternalThreadCommand: + """ internal commands are generated/executed by the debugger. + + The reason for their existence is that some commands have to be executed + on specific threads. These are the InternalThreadCommands that get + get posted to PyDB.cmdQueue. + """ + + def can_be_executed_by(self, thread_id): + '''By default, it must be in the same thread to be executed + ''' + return self.thread_id == thread_id or self.thread_id.endswith('|' + thread_id) + + def do_it(self, dbg): + raise NotImplementedError("you have to override do_it") + + +class ReloadCodeCommand(InternalThreadCommand): + + + def __init__(self, module_name, thread_id): + self.thread_id = thread_id + self.module_name = module_name + self.executed = False + self.lock = thread.allocate_lock() + + + def can_be_executed_by(self, thread_id): + if self.thread_id == '*': + return True #Any thread can execute it! + + return InternalThreadCommand.can_be_executed_by(self, thread_id) + + + def do_it(self, dbg): + self.lock.acquire() + try: + if self.executed: + return + self.executed = True + finally: + self.lock.release() + + module_name = self.module_name + if module_name not in sys.modules: + if '.' in module_name: + new_module_name = module_name.split('.')[-1] + if new_module_name in sys.modules: + module_name = new_module_name + + if module_name not in sys.modules: + sys.stderr.write('pydev debugger: Unable to find module to reload: "' + module_name + '".\n') + # Too much info... + # sys.stderr.write('pydev debugger: This usually means you are trying to reload the __main__ module (which cannot be reloaded).\n') + + else: + sys.stderr.write('pydev debugger: Start reloading module: "' + module_name + '" ... \n') + from _pydevd_bundle import pydevd_reload + if pydevd_reload.xreload(sys.modules[module_name]): + sys.stderr.write('pydev debugger: reload finished\n') + else: + sys.stderr.write('pydev debugger: reload finished without applying any change\n') + + +#======================================================================================================================= +# InternalTerminateThread +#======================================================================================================================= +class InternalTerminateThread(InternalThreadCommand): + def __init__(self, thread_id): + self.thread_id = thread_id + + def do_it(self, dbg): + pydevd_log(1, "killing ", str(self.thread_id)) + cmd = dbg.cmd_factory.make_thread_killed_message(self.thread_id) + dbg.writer.add_command(cmd) + + +#======================================================================================================================= +# InternalRunThread +#======================================================================================================================= +class InternalRunThread(InternalThreadCommand): + def __init__(self, thread_id): + self.thread_id = thread_id + + def do_it(self, dbg): + t = pydevd_find_thread_by_id(self.thread_id) + if t: + t.additional_info.pydev_step_cmd = -1 + t.additional_info.pydev_step_stop = None + t.additional_info.pydev_state = STATE_RUN + + +#======================================================================================================================= +# InternalStepThread +#======================================================================================================================= +class InternalStepThread(InternalThreadCommand): + def __init__(self, thread_id, cmd_id): + self.thread_id = thread_id + self.cmd_id = cmd_id + + def do_it(self, dbg): + t = pydevd_find_thread_by_id(self.thread_id) + if t: + t.additional_info.pydev_step_cmd = self.cmd_id + t.additional_info.pydev_state = STATE_RUN + + +#======================================================================================================================= +# InternalSetNextStatementThread +#======================================================================================================================= +class InternalSetNextStatementThread(InternalThreadCommand): + def __init__(self, thread_id, cmd_id, line, func_name): + self.thread_id = thread_id + self.cmd_id = cmd_id + self.line = line + + if IS_PY2: + if isinstance(func_name, unicode): + # On cython with python 2.X it requires an str, not unicode (but on python 3.3 it should be a str, not bytes). + func_name = func_name.encode('utf-8') + + self.func_name = func_name + + def do_it(self, dbg): + t = pydevd_find_thread_by_id(self.thread_id) + if t: + t.additional_info.pydev_step_cmd = self.cmd_id + t.additional_info.pydev_next_line = int(self.line) + t.additional_info.pydev_func_name = self.func_name + t.additional_info.pydev_state = STATE_RUN + + +#======================================================================================================================= +# InternalGetVariable +#======================================================================================================================= +class InternalGetVariable(InternalThreadCommand): + """ gets the value of a variable """ + def __init__(self, seq, thread_id, frame_id, scope, attrs): + self.sequence = seq + self.thread_id = thread_id + self.frame_id = frame_id + self.scope = scope + self.attributes = attrs + + def do_it(self, dbg): + """ Converts request into python variable """ + try: + xml = "" + val_dict = pydevd_vars.resolve_compound_variable(self.thread_id, self.frame_id, self.scope, self.attributes) + if val_dict is None: + val_dict = {} + + # assume properly ordered if resolver returns 'OrderedDict' + # check type as string to support OrderedDict backport for older Python + keys = dict_keys(val_dict) + if not val_dict.__class__.__name__ == "OrderedDict": + keys.sort(key=compare_object_attrs_key) + + for k in keys: + xml += pydevd_xml.var_to_xml(val_dict[k], to_string(k)) + + xml += "" + cmd = dbg.cmd_factory.make_get_variable_message(self.sequence, xml) + dbg.writer.add_command(cmd) + except Exception: + cmd = dbg.cmd_factory.make_error_message( + self.sequence, "Error resolving variables %s" % (get_exception_traceback_str(),)) + dbg.writer.add_command(cmd) + + +#======================================================================================================================= +# InternalGetArray +#======================================================================================================================= +class InternalGetArray(InternalThreadCommand): + def __init__(self, seq, roffset, coffset, rows, cols, format, thread_id, frame_id, scope, attrs): + self.sequence = seq + self.thread_id = thread_id + self.frame_id = frame_id + self.scope = scope + self.name = attrs.split("\t")[-1] + self.attrs = attrs + self.roffset = int(roffset) + self.coffset = int(coffset) + self.rows = int(rows) + self.cols = int(cols) + self.format = format + + def do_it(self, dbg): + try: + frame = pydevd_vars.find_frame(self.thread_id, self.frame_id) + var = pydevd_vars.eval_in_context(self.name, frame.f_globals, frame.f_locals) + xml = pydevd_vars.table_like_struct_to_xml(var, self.name, self.roffset, self.coffset, self.rows, self.cols, self.format ) + cmd = dbg.cmd_factory.make_get_array_message(self.sequence, xml) + dbg.writer.add_command(cmd) + except: + cmd = dbg.cmd_factory.make_error_message(self.sequence, "Error resolving array: " + get_exception_traceback_str()) + dbg.writer.add_command(cmd) + +#======================================================================================================================= +# InternalChangeVariable +#======================================================================================================================= +class InternalChangeVariable(InternalThreadCommand): + """ changes the value of a variable """ + def __init__(self, seq, thread_id, frame_id, scope, attr, expression): + self.sequence = seq + self.thread_id = thread_id + self.frame_id = frame_id + self.scope = scope + self.attr = attr + self.expression = expression + + def do_it(self, dbg): + """ Converts request into python variable """ + try: + result = pydevd_vars.change_attr_expression(self.thread_id, self.frame_id, self.attr, self.expression, dbg) + xml = "" + xml += pydevd_xml.var_to_xml(result, "") + xml += "" + cmd = dbg.cmd_factory.make_variable_changed_message(self.sequence, xml) + dbg.writer.add_command(cmd) + except Exception: + cmd = dbg.cmd_factory.make_error_message(self.sequence, "Error changing variable attr:%s expression:%s traceback:%s" % (self.attr, self.expression, get_exception_traceback_str())) + dbg.writer.add_command(cmd) + + +#======================================================================================================================= +# InternalGetFrame +#======================================================================================================================= +class InternalGetFrame(InternalThreadCommand): + """ gets the value of a variable """ + def __init__(self, seq, thread_id, frame_id): + self.sequence = seq + self.thread_id = thread_id + self.frame_id = frame_id + + def do_it(self, dbg): + """ Converts request into python variable """ + try: + frame = pydevd_vars.find_frame(self.thread_id, self.frame_id) + if frame is not None: + hidden_ns = pydevconsole.get_ipython_hidden_vars() + xml = "" + xml += pydevd_xml.frame_vars_to_xml(frame.f_locals, hidden_ns) + del frame + xml += "" + cmd = dbg.cmd_factory.make_get_frame_message(self.sequence, xml) + dbg.writer.add_command(cmd) + else: + #pydevd_vars.dump_frames(self.thread_id) + #don't print this error: frame not found: means that the client is not synchronized (but that's ok) + cmd = dbg.cmd_factory.make_error_message(self.sequence, "Frame not found: %s from thread: %s" % (self.frame_id, self.thread_id)) + dbg.writer.add_command(cmd) + except: + cmd = dbg.cmd_factory.make_error_message(self.sequence, "Error resolving frame: %s from thread: %s" % (self.frame_id, self.thread_id)) + dbg.writer.add_command(cmd) + + +#======================================================================================================================= +# InternalEvaluateExpression +#======================================================================================================================= +class InternalEvaluateExpression(InternalThreadCommand): + """ gets the value of a variable """ + + def __init__(self, seq, thread_id, frame_id, expression, doExec, doTrim, temp_name): + self.sequence = seq + self.thread_id = thread_id + self.frame_id = frame_id + self.expression = expression + self.doExec = doExec + self.doTrim = doTrim + self.temp_name = temp_name + + def do_it(self, dbg): + """ Converts request into python variable """ + try: + result = pydevd_vars.evaluate_expression(self.thread_id, self.frame_id, self.expression, self.doExec) + if self.temp_name != "": + pydevd_vars.change_attr_expression(self.thread_id, self.frame_id, self.temp_name, self.expression, dbg, result) + xml = "" + xml += pydevd_xml.var_to_xml(result, self.expression, self.doTrim) + xml += "" + cmd = dbg.cmd_factory.make_evaluate_expression_message(self.sequence, xml) + dbg.writer.add_command(cmd) + except: + exc = get_exception_traceback_str() + sys.stderr.write('%s\n' % (exc,)) + cmd = dbg.cmd_factory.make_error_message(self.sequence, "Error evaluating expression " + exc) + dbg.writer.add_command(cmd) + +#======================================================================================================================= +# InternalGetCompletions +#======================================================================================================================= +class InternalGetCompletions(InternalThreadCommand): + """ Gets the completions in a given scope """ + + def __init__(self, seq, thread_id, frame_id, act_tok): + self.sequence = seq + self.thread_id = thread_id + self.frame_id = frame_id + self.act_tok = act_tok + + + def do_it(self, dbg): + """ Converts request into completions """ + try: + remove_path = None + try: + + frame = pydevd_vars.find_frame(self.thread_id, self.frame_id) + if frame is not None: + + msg = _pydev_completer.generate_completions_as_xml(frame, self.act_tok) + + cmd = dbg.cmd_factory.make_get_completions_message(self.sequence, msg) + dbg.writer.add_command(cmd) + else: + cmd = dbg.cmd_factory.make_error_message(self.sequence, "InternalGetCompletions: Frame not found: %s from thread: %s" % (self.frame_id, self.thread_id)) + dbg.writer.add_command(cmd) + + + finally: + if remove_path is not None: + sys.path.remove(remove_path) + + except: + exc = get_exception_traceback_str() + sys.stderr.write('%s\n' % (exc,)) + cmd = dbg.cmd_factory.make_error_message(self.sequence, "Error evaluating expression " + exc) + dbg.writer.add_command(cmd) + + +# ======================================================================================================================= +# InternalGetDescription +# ======================================================================================================================= +class InternalGetDescription(InternalThreadCommand): + """ Fetch the variable description stub from the debug console + """ + + def __init__(self, seq, thread_id, frame_id, expression): + self.sequence = seq + self.thread_id = thread_id + self.frame_id = frame_id + self.expression = expression + + def do_it(self, dbg): + """ Get completions and write back to the client + """ + try: + frame = pydevd_vars.find_frame(self.thread_id, self.frame_id) + description = pydevd_console.get_description(frame, self.thread_id, self.frame_id, self.expression) + description = pydevd_xml.make_valid_xml_value(quote(description, '/>_= \t')) + description_xml = '' % description + cmd = dbg.cmd_factory.make_get_description_message(self.sequence, description_xml) + dbg.writer.add_command(cmd) + except: + exc = get_exception_traceback_str() + cmd = dbg.cmd_factory.make_error_message(self.sequence, "Error in fetching description" + exc) + dbg.writer.add_command(cmd) + + +#======================================================================================================================= +# InternalGetBreakpointException +#======================================================================================================================= +class InternalGetBreakpointException(InternalThreadCommand): + """ Send details of exception raised while evaluating conditional breakpoint """ + def __init__(self, thread_id, exc_type, stacktrace): + self.sequence = 0 + self.thread_id = thread_id + self.stacktrace = stacktrace + self.exc_type = exc_type + + def do_it(self, dbg): + try: + callstack = "" + + makeValid = pydevd_xml.make_valid_xml_value + + for filename, line, methodname, methodobj in self.stacktrace: + if file_system_encoding.lower() != "utf-8" and hasattr(filename, "decode"): + # filename is a byte string encoded using the file system encoding + # convert it to utf8 + filename = filename.decode(file_system_encoding).encode("utf-8") + + callstack += '' \ + % (self.thread_id, makeValid(filename), line, makeValid(methodname), makeValid(methodobj)) + callstack += "" + + cmd = dbg.cmd_factory.make_send_breakpoint_exception_message(self.sequence, self.exc_type + "\t" + callstack) + dbg.writer.add_command(cmd) + except: + exc = get_exception_traceback_str() + sys.stderr.write('%s\n' % (exc,)) + cmd = dbg.cmd_factory.make_error_message(self.sequence, "Error Sending Exception: " + exc) + dbg.writer.add_command(cmd) + + +#======================================================================================================================= +# InternalSendCurrExceptionTrace +#======================================================================================================================= +class InternalSendCurrExceptionTrace(InternalThreadCommand): + """ Send details of the exception that was caught and where we've broken in. + """ + def __init__(self, thread_id, arg, curr_frame_id): + ''' + :param arg: exception type, description, traceback object + ''' + self.sequence = 0 + self.thread_id = thread_id + self.curr_frame_id = curr_frame_id + self.arg = arg + + def do_it(self, dbg): + try: + cmd = dbg.cmd_factory.make_send_curr_exception_trace_message(self.sequence, self.thread_id, self.curr_frame_id, *self.arg) + del self.arg + dbg.writer.add_command(cmd) + except: + exc = get_exception_traceback_str() + sys.stderr.write('%s\n' % (exc,)) + cmd = dbg.cmd_factory.make_error_message(self.sequence, "Error Sending Current Exception Trace: " + exc) + dbg.writer.add_command(cmd) + +#======================================================================================================================= +# InternalSendCurrExceptionTraceProceeded +#======================================================================================================================= +class InternalSendCurrExceptionTraceProceeded(InternalThreadCommand): + """ Send details of the exception that was caught and where we've broken in. + """ + def __init__(self, thread_id): + self.sequence = 0 + self.thread_id = thread_id + + def do_it(self, dbg): + try: + cmd = dbg.cmd_factory.make_send_curr_exception_trace_proceeded_message(self.sequence, self.thread_id) + dbg.writer.add_command(cmd) + except: + exc = get_exception_traceback_str() + sys.stderr.write('%s\n' % (exc,)) + cmd = dbg.cmd_factory.make_error_message(self.sequence, "Error Sending Current Exception Trace Proceeded: " + exc) + dbg.writer.add_command(cmd) + + +#======================================================================================================================= +# InternalEvaluateConsoleExpression +#======================================================================================================================= +class InternalEvaluateConsoleExpression(InternalThreadCommand): + """ Execute the given command in the debug console """ + + def __init__(self, seq, thread_id, frame_id, line, buffer_output=True): + self.sequence = seq + self.thread_id = thread_id + self.frame_id = frame_id + self.line = line + self.buffer_output = buffer_output + + def do_it(self, dbg): + """ Create an XML for console output, error and more (true/false) + + + + true/false + + """ + try: + frame = pydevd_vars.find_frame(self.thread_id, self.frame_id) + if frame is not None: + console_message = pydevd_console.execute_console_command( + frame, self.thread_id, self.frame_id, self.line, self.buffer_output) + + cmd = dbg.cmd_factory.make_send_console_message(self.sequence, console_message.to_xml()) + else: + from _pydevd_bundle.pydevd_console import ConsoleMessage + console_message = ConsoleMessage() + console_message.add_console_message( + pydevd_console.CONSOLE_ERROR, + "Select the valid frame in the debug view (thread: %s, frame: %s invalid)" % (self.thread_id, self.frame_id), + ) + cmd = dbg.cmd_factory.make_error_message(self.sequence, console_message.to_xml()) + except: + exc = get_exception_traceback_str() + cmd = dbg.cmd_factory.make_error_message(self.sequence, "Error evaluating expression " + exc) + dbg.writer.add_command(cmd) + + +#======================================================================================================================= +# InternalRunCustomOperation +#======================================================================================================================= +class InternalRunCustomOperation(InternalThreadCommand): + """ Run a custom command on an expression + """ + def __init__(self, seq, thread_id, frame_id, scope, attrs, style, encoded_code_or_file, fnname): + self.sequence = seq + self.thread_id = thread_id + self.frame_id = frame_id + self.scope = scope + self.attrs = attrs + self.style = style + self.code_or_file = unquote_plus(encoded_code_or_file) + self.fnname = fnname + + def do_it(self, dbg): + try: + res = pydevd_vars.custom_operation(self.thread_id, self.frame_id, self.scope, self.attrs, + self.style, self.code_or_file, self.fnname) + resEncoded = quote_plus(res) + cmd = dbg.cmd_factory.make_custom_operation_message(self.sequence, resEncoded) + dbg.writer.add_command(cmd) + except: + exc = get_exception_traceback_str() + cmd = dbg.cmd_factory.make_error_message(self.sequence, "Error in running custom operation" + exc) + dbg.writer.add_command(cmd) + + +#======================================================================================================================= +# InternalConsoleGetCompletions +#======================================================================================================================= +class InternalConsoleGetCompletions(InternalThreadCommand): + """ Fetch the completions in the debug console + """ + def __init__(self, seq, thread_id, frame_id, act_tok): + self.sequence = seq + self.thread_id = thread_id + self.frame_id = frame_id + self.act_tok = act_tok + + def do_it(self, dbg): + """ Get completions and write back to the client + """ + try: + frame = pydevd_vars.find_frame(self.thread_id, self.frame_id) + completions_xml = pydevd_console.get_completions(frame, self.act_tok) + cmd = dbg.cmd_factory.make_send_console_message(self.sequence, completions_xml) + dbg.writer.add_command(cmd) + except: + exc = get_exception_traceback_str() + cmd = dbg.cmd_factory.make_error_message(self.sequence, "Error in fetching completions" + exc) + dbg.writer.add_command(cmd) + + +#======================================================================================================================= +# InternalConsoleExec +#======================================================================================================================= +class InternalConsoleExec(InternalThreadCommand): + """ gets the value of a variable """ + + def __init__(self, seq, thread_id, frame_id, expression): + self.sequence = seq + self.thread_id = thread_id + self.frame_id = frame_id + self.expression = expression + + def do_it(self, dbg): + """ Converts request into python variable """ + try: + try: + #don't trace new threads created by console command + disable_trace_thread_modules() + + result = pydevconsole.console_exec(self.thread_id, self.frame_id, self.expression, dbg) + xml = "" + xml += pydevd_xml.var_to_xml(result, "") + xml += "" + cmd = dbg.cmd_factory.make_evaluate_expression_message(self.sequence, xml) + dbg.writer.add_command(cmd) + except: + exc = get_exception_traceback_str() + sys.stderr.write('%s\n' % (exc,)) + cmd = dbg.cmd_factory.make_error_message(self.sequence, "Error evaluating console expression " + exc) + dbg.writer.add_command(cmd) + finally: + enable_trace_thread_modules() + + sys.stderr.flush() + sys.stdout.flush() + + +#======================================================================================================================= +# pydevd_find_thread_by_id +#======================================================================================================================= +def pydevd_find_thread_by_id(thread_id): + try: + # there was a deadlock here when I did not remove the tracing function when thread was dead + threads = threading.enumerate() + for i in threads: + tid = get_thread_id(i) + if thread_id == tid or thread_id.endswith('|' + tid): + return i + + sys.stderr.write("Could not find thread %s\n" % thread_id) + sys.stderr.write("Available: %s\n" % [get_thread_id(t) for t in threads]) + sys.stderr.flush() + except: + traceback.print_exc() + + return None diff --git a/ptvsd/pydevd/_pydevd_bundle/pydevd_command_line_handling.py b/ptvsd/pydevd/_pydevd_bundle/pydevd_command_line_handling.py new file mode 100644 index 00000000..d672f277 --- /dev/null +++ b/ptvsd/pydevd/_pydevd_bundle/pydevd_command_line_handling.py @@ -0,0 +1,147 @@ +class ArgHandlerWithParam: + ''' + Handler for some arguments which needs a value + ''' + + def __init__(self, arg_name, convert_val=None, default_val=None): + self.arg_name = arg_name + self.arg_v_rep = '--%s' % (arg_name,) + self.convert_val = convert_val + self.default_val = default_val + + def to_argv(self, lst, setup): + v = setup.get(self.arg_name) + if v is not None and v != self.default_val: + lst.append(self.arg_v_rep) + lst.append('%s' % (v,)) + + def handle_argv(self, argv, i, setup): + assert argv[i] == self.arg_v_rep + del argv[i] + + val = argv[i] + if self.convert_val: + val = self.convert_val(val) + + setup[self.arg_name] = val + del argv[i] + +class ArgHandlerBool: + ''' + If a given flag is received, mark it as 'True' in setup. + ''' + + def __init__(self, arg_name, default_val=False): + self.arg_name = arg_name + self.arg_v_rep = '--%s' % (arg_name,) + self.default_val = default_val + + def to_argv(self, lst, setup): + v = setup.get(self.arg_name) + if v: + lst.append(self.arg_v_rep) + + def handle_argv(self, argv, i, setup): + assert argv[i] == self.arg_v_rep + del argv[i] + setup[self.arg_name] = True + + +ACCEPTED_ARG_HANDLERS = [ + ArgHandlerWithParam('port', int, 0), + ArgHandlerWithParam('vm_type'), + ArgHandlerWithParam('client'), + + ArgHandlerBool('server'), + ArgHandlerBool('DEBUG_RECORD_SOCKET_READS'), + ArgHandlerBool('multiproc'), # Used by PyCharm (reuses connection: ssh tunneling) + ArgHandlerBool('multiprocess'), # Used by PyDev (creates new connection to ide) + ArgHandlerBool('save-signatures'), + ArgHandlerBool('save-threading'), + ArgHandlerBool('save-asyncio'), + ArgHandlerBool('print-in-debugger-startup'), + ArgHandlerBool('cmd-line'), + ArgHandlerBool('module'), +] + +ARGV_REP_TO_HANDLER = {} +for handler in ACCEPTED_ARG_HANDLERS: + ARGV_REP_TO_HANDLER[handler.arg_v_rep] = handler + +def get_pydevd_file(): + import pydevd + f = pydevd.__file__ + if f.endswith('.pyc'): + f = f[:-1] + elif f.endswith('$py.class'): + f = f[:-len('$py.class')] + '.py' + return f + +def setup_to_argv(setup): + ''' + :param dict setup: + A dict previously gotten from process_command_line. + + :note: does not handle --file nor --DEBUG. + ''' + ret = [get_pydevd_file()] + + for handler in ACCEPTED_ARG_HANDLERS: + if handler.arg_name in setup: + handler.to_argv(ret, setup) + return ret + +def process_command_line(argv): + """ parses the arguments. + removes our arguments from the command line """ + setup = {} + for handler in ACCEPTED_ARG_HANDLERS: + setup[handler.arg_name] = handler.default_val + setup['file'] = '' + setup['qt-support'] = '' + + i = 0 + del argv[0] + while i < len(argv): + handler = ARGV_REP_TO_HANDLER.get(argv[i]) + if handler is not None: + handler.handle_argv(argv, i, setup) + + elif argv[i].startswith('--qt-support'): + # The --qt-support is special because we want to keep backward compatibility: + # Previously, just passing '--qt-support' meant that we should use the auto-discovery mode + # whereas now, if --qt-support is passed, it should be passed as --qt-support=, where + # mode can be one of 'auto', 'none', 'pyqt5', 'pyqt4', 'pyside'. + if argv[i] == '--qt-support': + setup['qt-support'] = 'auto' + + elif argv[i].startswith('--qt-support='): + qt_support = argv[i][len('--qt-support='):] + valid_modes = ('none', 'auto', 'pyqt5', 'pyqt4', 'pyside') + if qt_support not in valid_modes: + raise ValueError("qt-support mode invalid: " + qt_support) + if qt_support == 'none': + # On none, actually set an empty string to evaluate to False. + setup['qt-support'] = '' + else: + setup['qt-support'] = qt_support + else: + raise ValueError("Unexpected definition for qt-support flag: " + argv[i]) + + del argv[i] + + + elif argv[i] == '--file': + # --file is special because it's the last one (so, no handler for it). + del argv[i] + setup['file'] = argv[i] + i = len(argv) # pop out, file is our last argument + + elif argv[i] == '--DEBUG': + from pydevd import set_debug + del argv[i] + set_debug(setup) + + else: + raise ValueError("Unexpected option: " + argv[i]) + return setup diff --git a/ptvsd/pydevd/_pydevd_bundle/pydevd_console.py b/ptvsd/pydevd/_pydevd_bundle/pydevd_console.py new file mode 100644 index 00000000..88318a52 --- /dev/null +++ b/ptvsd/pydevd/_pydevd_bundle/pydevd_console.py @@ -0,0 +1,247 @@ +'''An helper file for the pydev debugger (REPL) console +''' +import sys +import traceback +from code import InteractiveConsole + +from _pydev_bundle import _pydev_completer +from _pydev_bundle.pydev_console_utils import BaseInterpreterInterface, BaseStdIn +from _pydev_bundle.pydev_imports import Exec +from _pydev_bundle.pydev_override import overrides +from _pydevd_bundle import pydevd_save_locals +from _pydevd_bundle.pydevd_io import IOBuf +from pydevd_tracing import get_exception_traceback_str +from _pydevd_bundle.pydevd_xml import make_valid_xml_value + +CONSOLE_OUTPUT = "output" +CONSOLE_ERROR = "error" + + +#======================================================================================================================= +# ConsoleMessage +#======================================================================================================================= +class ConsoleMessage: + """Console Messages + """ + def __init__(self): + self.more = False + # List of tuple [('error', 'error_message'), ('message_list', 'output_message')] + self.console_messages = [] + + def add_console_message(self, message_type, message): + """add messages in the console_messages list + """ + for m in message.split("\n"): + if m.strip(): + self.console_messages.append((message_type, m)) + + def update_more(self, more): + """more is set to true if further input is required from the user + else more is set to false + """ + self.more = more + + def to_xml(self): + """Create an XML for console message_list, error and more (true/false) + + console message_list + console error + true/false + + """ + makeValid = make_valid_xml_value + + xml = '%s' % (self.more) + + for message_type, message in self.console_messages: + xml += '<%s message="%s">' % (message_type, makeValid(message), message_type) + + xml += '' + + return xml + + +#======================================================================================================================= +# DebugConsoleStdIn +#======================================================================================================================= +class DebugConsoleStdIn(BaseStdIn): + + overrides(BaseStdIn.readline) + def readline(self, *args, **kwargs): + sys.stderr.write('Warning: Reading from stdin is still not supported in this console.\n') + return '\n' + +#======================================================================================================================= +# DebugConsole +#======================================================================================================================= +class DebugConsole(InteractiveConsole, BaseInterpreterInterface): + """Wrapper around code.InteractiveConsole, in order to send + errors and outputs to the debug console + """ + + overrides(BaseInterpreterInterface.create_std_in) + def create_std_in(self, *args, **kwargs): + try: + if not self.__buffer_output: + return sys.stdin + except: + pass + + return DebugConsoleStdIn() #If buffered, raw_input is not supported in this console. + + + overrides(InteractiveConsole.push) + def push(self, line, frame, buffer_output=True): + """Change built-in stdout and stderr methods by the + new custom StdMessage. + execute the InteractiveConsole.push. + Change the stdout and stderr back be the original built-ins + + :param buffer_output: if False won't redirect the output. + + Return boolean (True if more input is required else False), + output_messages and input_messages + """ + self.__buffer_output = buffer_output + more = False + if buffer_output: + original_stdout = sys.stdout + original_stderr = sys.stderr + try: + try: + self.frame = frame + if buffer_output: + out = sys.stdout = IOBuf() + err = sys.stderr = IOBuf() + more = self.add_exec(line) + except Exception: + exc = get_exception_traceback_str() + if buffer_output: + err.buflist.append("Internal Error: %s" % (exc,)) + else: + sys.stderr.write("Internal Error: %s\n" % (exc,)) + finally: + #Remove frame references. + self.frame = None + frame = None + if buffer_output: + sys.stdout = original_stdout + sys.stderr = original_stderr + + if buffer_output: + return more, out.buflist, err.buflist + else: + return more, [], [] + + + overrides(BaseInterpreterInterface.do_add_exec) + def do_add_exec(self, line): + return InteractiveConsole.push(self, line) + + + overrides(InteractiveConsole.runcode) + def runcode(self, code): + """Execute a code object. + + When an exception occurs, self.showtraceback() is called to + display a traceback. All exceptions are caught except + SystemExit, which is reraised. + + A note about KeyboardInterrupt: this exception may occur + elsewhere in this code, and may not always be caught. The + caller should be prepared to deal with it. + + """ + try: + Exec(code, self.frame.f_globals, self.frame.f_locals) + pydevd_save_locals.save_locals(self.frame) + except SystemExit: + raise + except: + self.showtraceback() + + def get_namespace(self): + dbg_namespace = {} + dbg_namespace.update(self.frame.f_globals) + dbg_namespace.update(self.frame.f_locals) # locals later because it has precedence over the actual globals + return dbg_namespace + + +#======================================================================================================================= +# InteractiveConsoleCache +#======================================================================================================================= +class InteractiveConsoleCache: + + thread_id = None + frame_id = None + interactive_console_instance = None + + +#Note: On Jython 2.1 we can't use classmethod or staticmethod, so, just make the functions below free-functions. +def get_interactive_console(thread_id, frame_id, frame, console_message): + """returns the global interactive console. + interactive console should have been initialized by this time + :rtype: DebugConsole + """ + if InteractiveConsoleCache.thread_id == thread_id and InteractiveConsoleCache.frame_id == frame_id: + return InteractiveConsoleCache.interactive_console_instance + + InteractiveConsoleCache.interactive_console_instance = DebugConsole() + InteractiveConsoleCache.thread_id = thread_id + InteractiveConsoleCache.frame_id = frame_id + + console_stacktrace = traceback.extract_stack(frame, limit=1) + if console_stacktrace: + current_context = console_stacktrace[0] # top entry from stacktrace + context_message = 'File "%s", line %s, in %s' % (current_context[0], current_context[1], current_context[2]) + console_message.add_console_message(CONSOLE_OUTPUT, "[Current context]: %s" % (context_message,)) + return InteractiveConsoleCache.interactive_console_instance + + +def clear_interactive_console(): + InteractiveConsoleCache.thread_id = None + InteractiveConsoleCache.frame_id = None + InteractiveConsoleCache.interactive_console_instance = None + + +def execute_console_command(frame, thread_id, frame_id, line, buffer_output=True): + """fetch an interactive console instance from the cache and + push the received command to the console. + + create and return an instance of console_message + """ + console_message = ConsoleMessage() + + interpreter = get_interactive_console(thread_id, frame_id, frame, console_message) + more, output_messages, error_messages = interpreter.push(line, frame, buffer_output) + console_message.update_more(more) + + for message in output_messages: + console_message.add_console_message(CONSOLE_OUTPUT, message) + + for message in error_messages: + console_message.add_console_message(CONSOLE_ERROR, message) + + return console_message + + +def get_description(frame, thread_id, frame_id, expression): + console_message = ConsoleMessage() + interpreter = get_interactive_console(thread_id, frame_id, frame, console_message) + try: + interpreter.frame = frame + return interpreter.getDescription(expression) + finally: + interpreter.frame = None + + +def get_completions(frame, act_tok): + """ fetch all completions, create xml for the same + return the completions xml + """ + return _pydev_completer.generate_completions_as_xml(frame, act_tok) + + + + + diff --git a/ptvsd/pydevd/_pydevd_bundle/pydevd_constants.py b/ptvsd/pydevd/_pydevd_bundle/pydevd_constants.py new file mode 100644 index 00000000..ad1ec8b4 --- /dev/null +++ b/ptvsd/pydevd/_pydevd_bundle/pydevd_constants.py @@ -0,0 +1,337 @@ +''' +This module holds the constants used for specifying the states of the debugger. +''' +from __future__ import nested_scopes + +STATE_RUN = 1 +STATE_SUSPEND = 2 + +PYTHON_SUSPEND = 1 +DJANGO_SUSPEND = 2 +JINJA2_SUSPEND = 3 + + +class DebugInfoHolder: + #we have to put it here because it can be set through the command line (so, the + #already imported references would not have it). + DEBUG_RECORD_SOCKET_READS = False + DEBUG_TRACE_LEVEL = -1 + DEBUG_TRACE_BREAKPOINTS = -1 + +#Hold a reference to the original _getframe (because psyco will change that as soon as it's imported) +import sys #Note: the sys import must be here anyways (others depend on it) +try: + get_frame = sys._getframe +except AttributeError: + def get_frame(): + raise AssertionError('sys._getframe not available (possible causes: enable -X:Frames on IronPython?)') + +#Used to determine the maximum size of each variable passed to eclipse -- having a big value here may make +#the communication slower -- as the variables are being gathered lazily in the latest version of eclipse, +#this value was raised from 200 to 1000. +MAXIMUM_VARIABLE_REPRESENTATION_SIZE = 1000 +# Prefix for saving functions return values in locals +RETURN_VALUES_DICT = '__pydevd_ret_val_dict' + +import os + +from _pydevd_bundle import pydevd_vm_type + +IS_JYTHON = pydevd_vm_type.get_vm_type() == pydevd_vm_type.PydevdVmType.JYTHON +IS_IRONPYTHON = sys.platform == 'cli' + +IS_JYTH_LESS25 = False +if IS_JYTHON: + if sys.version_info[0] == 2 and sys.version_info[1] < 5: + IS_JYTH_LESS25 = True + +IS_PYTHON_STACKLESS = "stackless" in sys.version.lower() +CYTHON_SUPPORTED = False + +try: + import platform + python_implementation = platform.python_implementation() +except: + pass +else: + if python_implementation == 'CPython' and not IS_PYTHON_STACKLESS: + # Only available for CPython! + if ( + (sys.version_info[0] == 2 and sys.version_info[1] >= 7) + or (sys.version_info[0] == 3 and sys.version_info[1] >= 3) + or (sys.version_info[0] > 3) + ): + # Supported in 2.7 or 3.3 onwards (32 or 64) + CYTHON_SUPPORTED = True + + +#======================================================================================================================= +# Python 3? +#======================================================================================================================= +IS_PY3K = False +IS_PY34_OLDER = False +IS_PY2 = True +IS_PY27 = False +IS_PY24 = False +try: + if sys.version_info[0] >= 3: + IS_PY3K = True + IS_PY2 = False + if (sys.version_info[0] == 3 and sys.version_info[1] >= 4) or sys.version_info[0] > 3: + IS_PY34_OLDER = True + elif sys.version_info[0] == 2 and sys.version_info[1] == 7: + IS_PY27 = True + elif sys.version_info[0] == 2 and sys.version_info[1] == 4: + IS_PY24 = True +except AttributeError: + pass #Not all versions have sys.version_info + +try: + SUPPORT_GEVENT = os.getenv('GEVENT_SUPPORT', 'False') == 'True' +except: + # Jython 2.1 doesn't accept that construct + SUPPORT_GEVENT = False + +# At the moment gevent supports Python >= 2.6 and Python >= 3.3 +USE_LIB_COPY = SUPPORT_GEVENT and \ + ((not IS_PY3K and sys.version_info[1] >= 6) or + (IS_PY3K and sys.version_info[1] >= 3)) + + +INTERACTIVE_MODE_AVAILABLE = sys.platform in ('darwin', 'win32') or os.getenv('DISPLAY') is not None + + +def protect_libraries_from_patching(): + """ + In this function we delete some modules from `sys.modules` dictionary and import them again inside + `_pydev_saved_modules` in order to save their original copies there. After that we can use these + saved modules within the debugger to protect them from patching by external libraries (e.g. gevent). + """ + patched = ['threading', 'thread', '_thread', 'time', 'socket', 'Queue', 'queue', 'select', + 'xmlrpclib', 'SimpleXMLRPCServer', 'BaseHTTPServer', 'SocketServer', + 'xmlrpc.client', 'xmlrpc.server', 'http.server', 'socketserver'] + + for name in patched: + try: + __import__(name) + except: + pass + + patched_modules = dict([(k, v) for k, v in sys.modules.items() + if k in patched]) + + for name in patched_modules: + del sys.modules[name] + + # import for side effects + import _pydev_imps._pydev_saved_modules + + for name in patched_modules: + sys.modules[name] = patched_modules[name] + + +if USE_LIB_COPY: + protect_libraries_from_patching() + + +from _pydev_imps._pydev_saved_modules import thread +_nextThreadIdLock = thread.allocate_lock() + +if IS_PY3K: + def dict_keys(d): + return list(d.keys()) + + def dict_values(d): + return list(d.values()) + + dict_iter_values = dict.values + + def dict_iter_items(d): + return d.items() + + def dict_items(d): + return list(d.items()) + +else: + dict_keys = None + try: + dict_keys = dict.keys + except: + pass + + if IS_JYTHON or not dict_keys: + def dict_keys(d): + return d.keys() + + try: + dict_iter_values = dict.itervalues + except: + try: + dict_iter_values = dict.values #Older versions don't have the itervalues + except: + def dict_iter_values(d): + return d.values() + + try: + dict_values = dict.values + except: + def dict_values(d): + return d.values() + + def dict_iter_items(d): + try: + return d.iteritems() + except: + return d.items() + + def dict_items(d): + return d.items() + + +try: + xrange = xrange +except: + #Python 3k does not have it + xrange = range + +try: + import itertools + izip = itertools.izip +except: + izip = zip + + +#======================================================================================================================= +# StringIO +#======================================================================================================================= +try: + from StringIO import StringIO +except: + from io import StringIO + + +#======================================================================================================================= +# get_pid +#======================================================================================================================= +def get_pid(): + try: + return os.getpid() + except AttributeError: + try: + #Jython does not have it! + import java.lang.management.ManagementFactory #@UnresolvedImport -- just for jython + pid = java.lang.management.ManagementFactory.getRuntimeMXBean().getName() + return pid.replace('@', '_') + except: + #ok, no pid available (will be unable to debug multiple processes) + return '000001' + +def clear_cached_thread_id(thread): + try: + del thread.__pydevd_id__ + except AttributeError: + pass + +#======================================================================================================================= +# get_thread_id +#======================================================================================================================= +def get_thread_id(thread): + try: + tid = thread.__pydevd_id__ + if tid is None: + # Fix for https://sw-brainwy.rhcloud.com/tracker/PyDev/645 + # if __pydevd_id__ is None, recalculate it... also, use an heuristic + # that gives us always the same id for the thread (using thread.ident or id(thread)). + raise AttributeError() + except AttributeError: + _nextThreadIdLock.acquire() + try: + #We do a new check with the lock in place just to be sure that nothing changed + tid = getattr(thread, '__pydevd_id__', None) + if tid is None: + pid = get_pid() + try: + tid = thread.__pydevd_id__ = 'pid_%s_id_%s' % (pid, thread.get_ident()) + except: + # thread.ident isn't always there... (use id(thread) instead if it's not there). + tid = thread.__pydevd_id__ = 'pid_%s_id_%s' % (pid, id(thread)) + finally: + _nextThreadIdLock.release() + + return tid + +#=============================================================================== +# Null +#=============================================================================== +class Null: + """ + Gotten from: http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/68205 + """ + + def __init__(self, *args, **kwargs): + return None + + def __call__(self, *args, **kwargs): + return self + + def __getattr__(self, mname): + if len(mname) > 4 and mname[:2] == '__' and mname[-2:] == '__': + # Don't pretend to implement special method names. + raise AttributeError(mname) + return self + + def __setattr__(self, name, value): + return self + + def __delattr__(self, name): + return self + + def __repr__(self): + return "" + + def __str__(self): + return "Null" + + def __len__(self): + return 0 + + def __getitem__(self): + return self + + def __setitem__(self, *args, **kwargs): + pass + + def write(self, *args, **kwargs): + pass + + def __nonzero__(self): + return 0 + + def __iter__(self): + return iter(()) + + +def call_only_once(func): + ''' + To be used as a decorator + + @call_only_once + def func(): + print 'Calling func only this time' + + Actually, in PyDev it must be called as: + + func = call_only_once(func) to support older versions of Python. + ''' + def new_func(*args, **kwargs): + if not new_func._called: + new_func._called = True + return func(*args, **kwargs) + + new_func._called = False + return new_func + +if __name__ == '__main__': + if Null(): + sys.stdout.write('here\n') + diff --git a/ptvsd/pydevd/_pydevd_bundle/pydevd_custom_frames.py b/ptvsd/pydevd/_pydevd_bundle/pydevd_custom_frames.py new file mode 100644 index 00000000..ca4e0e9d --- /dev/null +++ b/ptvsd/pydevd/_pydevd_bundle/pydevd_custom_frames.py @@ -0,0 +1,133 @@ +from _pydevd_bundle.pydevd_constants import get_thread_id, Null +from pydevd_file_utils import get_abs_path_real_path_and_base_from_frame +from _pydev_imps._pydev_saved_modules import thread, threading +import sys + +DEBUG = False + +#======================================================================================================================= +# CustomFramesContainer +#======================================================================================================================= +class CustomFramesContainer: + + # Actual Values initialized later on. + custom_frames_lock = None #: :type custom_frames_lock: threading.Lock + + custom_frames = None + + _next_frame_id = None + + _py_db_command_thread_event = None + + +def custom_frames_container_init(): #Note: no staticmethod on jython 2.1 (so, use free-function) + + CustomFramesContainer.custom_frames_lock = thread.allocate_lock() + + # custom_frames can only be accessed if properly locked with custom_frames_lock! + # Key is a string identifying the frame (as well as the thread it belongs to). + # Value is a CustomFrame. + # + CustomFramesContainer.custom_frames = {} + + # Only to be used in this module + CustomFramesContainer._next_frame_id = 0 + + # This is the event we must set to release an internal process events. It's later set by the actual debugger + # when we do create the debugger. + CustomFramesContainer._py_db_command_thread_event = Null() + +#Initialize it the first time (it may be reinitialized later on when dealing with a fork). +custom_frames_container_init() + + +#======================================================================================================================= +# CustomFrame +#======================================================================================================================= +class CustomFrame: + + def __init__(self, name, frame, thread_id): + # 0 = string with the representation of that frame + self.name = name + + # 1 = the frame to show + self.frame = frame + + # 2 = an integer identifying the last time the frame was changed. + self.mod_time = 0 + + # 3 = the thread id of the given frame + self.thread_id = thread_id + + +def add_custom_frame(frame, name, thread_id): + CustomFramesContainer.custom_frames_lock.acquire() + try: + curr_thread_id = get_thread_id(threading.currentThread()) + next_id = CustomFramesContainer._next_frame_id = CustomFramesContainer._next_frame_id + 1 + + # Note: the frame id kept contains an id and thread information on the thread where the frame was added + # so that later on we can check if the frame is from the current thread by doing frame_id.endswith('|'+thread_id). + frame_id = '__frame__:%s|%s' % (next_id, curr_thread_id) + if DEBUG: + sys.stderr.write('add_custom_frame: %s (%s) %s %s\n' % ( + frame_id, get_abs_path_real_path_and_base_from_frame(frame)[-1], frame.f_lineno, frame.f_code.co_name)) + + CustomFramesContainer.custom_frames[frame_id] = CustomFrame(name, frame, thread_id) + CustomFramesContainer._py_db_command_thread_event.set() + return frame_id + finally: + CustomFramesContainer.custom_frames_lock.release() + +addCustomFrame = add_custom_frame # Backward compatibility + +def update_custom_frame(frame_id, frame, thread_id, name=None): + CustomFramesContainer.custom_frames_lock.acquire() + try: + if DEBUG: + sys.stderr.write('update_custom_frame: %s\n' % frame_id) + try: + old = CustomFramesContainer.custom_frames[frame_id] + if name is not None: + old.name = name + old.mod_time += 1 + old.thread_id = thread_id + except: + sys.stderr.write('Unable to get frame to replace: %s\n' % (frame_id,)) + import traceback;traceback.print_exc() + + CustomFramesContainer._py_db_command_thread_event.set() + finally: + CustomFramesContainer.custom_frames_lock.release() + + +def get_custom_frame(thread_id, frame_id): + ''' + :param thread_id: This should actually be the frame_id which is returned by add_custom_frame. + :param frame_id: This is the actual id() of the frame + ''' + + CustomFramesContainer.custom_frames_lock.acquire() + try: + frame_id = int(frame_id) + f = CustomFramesContainer.custom_frames[thread_id].frame + while f is not None: + if id(f) == frame_id: + return f + f = f.f_back + finally: + f = None + CustomFramesContainer.custom_frames_lock.release() + + +def remove_custom_frame(frame_id): + CustomFramesContainer.custom_frames_lock.acquire() + try: + if DEBUG: + sys.stderr.write('remove_custom_frame: %s\n' % frame_id) + CustomFramesContainer.custom_frames.pop(frame_id, None) + CustomFramesContainer._py_db_command_thread_event.set() + finally: + CustomFramesContainer.custom_frames_lock.release() + +removeCustomFrame = remove_custom_frame # Backward compatibility diff --git a/ptvsd/pydevd/_pydevd_bundle/pydevd_cython.c b/ptvsd/pydevd/_pydevd_bundle/pydevd_cython.c new file mode 100644 index 00000000..d73fe5c9 --- /dev/null +++ b/ptvsd/pydevd/_pydevd_bundle/pydevd_cython.c @@ -0,0 +1,30941 @@ +/* Generated by Cython 0.26 */ + +/* BEGIN: Cython Metadata +{ + "distutils": { + "depends": [], + "name": "_pydevd_bundle.pydevd_cython", + "sources": [ + "_pydevd_bundle/pydevd_cython.pyx" + ] + }, + "module_name": "_pydevd_bundle.pydevd_cython" +} +END: Cython Metadata */ + +#define PY_SSIZE_T_CLEAN +#include "Python.h" +#ifndef Py_PYTHON_H + #error Python headers needed to compile C extensions, please install development version of Python. +#elif PY_VERSION_HEX < 0x02060000 || (0x03000000 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x03020000) + #error Cython requires Python 2.6+ or Python 3.2+. +#else +#define CYTHON_ABI "0_26" +#include +#ifndef offsetof + #define offsetof(type, member) ( (size_t) & ((type*)0) -> member ) +#endif +#if !defined(WIN32) && !defined(MS_WINDOWS) + #ifndef __stdcall + #define __stdcall + #endif + #ifndef __cdecl + #define __cdecl + #endif + #ifndef __fastcall + #define __fastcall + #endif +#endif +#ifndef DL_IMPORT + #define DL_IMPORT(t) t +#endif +#ifndef DL_EXPORT + #define DL_EXPORT(t) t +#endif +#define __PYX_COMMA , +#ifndef HAVE_LONG_LONG + #if PY_VERSION_HEX >= 0x03030000 || (PY_MAJOR_VERSION == 2 && PY_VERSION_HEX >= 0x02070000) + #define HAVE_LONG_LONG + #endif +#endif +#ifndef PY_LONG_LONG + #define PY_LONG_LONG LONG_LONG +#endif +#ifndef Py_HUGE_VAL + #define Py_HUGE_VAL HUGE_VAL +#endif +#ifdef PYPY_VERSION + #define CYTHON_COMPILING_IN_PYPY 1 + #define CYTHON_COMPILING_IN_PYSTON 0 + #define CYTHON_COMPILING_IN_CPYTHON 0 + #undef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 0 + #undef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 0 + #undef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 0 + #undef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 0 + #undef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #undef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #undef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 1 + #undef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 0 + #undef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 0 + #undef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 0 + #undef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 0 +#elif defined(PYSTON_VERSION) + #define CYTHON_COMPILING_IN_PYPY 0 + #define CYTHON_COMPILING_IN_PYSTON 1 + #define CYTHON_COMPILING_IN_CPYTHON 0 + #ifndef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 1 + #endif + #undef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 0 + #undef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 0 + #ifndef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 1 + #endif + #undef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #undef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #ifndef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 0 + #endif + #ifndef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 1 + #endif + #ifndef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 1 + #endif + #undef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 0 + #undef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 0 +#else + #define CYTHON_COMPILING_IN_PYPY 0 + #define CYTHON_COMPILING_IN_PYSTON 0 + #define CYTHON_COMPILING_IN_CPYTHON 1 + #ifndef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 1 + #endif + #if PY_MAJOR_VERSION < 3 + #undef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 0 + #elif !defined(CYTHON_USE_ASYNC_SLOTS) + #define CYTHON_USE_ASYNC_SLOTS 1 + #endif + #if PY_VERSION_HEX < 0x02070000 + #undef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #elif !defined(CYTHON_USE_PYLONG_INTERNALS) + #define CYTHON_USE_PYLONG_INTERNALS 1 + #endif + #ifndef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 1 + #endif + #ifndef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 1 + #endif + #if PY_VERSION_HEX < 0x030300F0 + #undef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #elif !defined(CYTHON_USE_UNICODE_WRITER) + #define CYTHON_USE_UNICODE_WRITER 1 + #endif + #ifndef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 0 + #endif + #ifndef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 1 + #endif + #ifndef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 1 + #endif + #ifndef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 1 + #endif + #ifndef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 1 + #endif +#endif +#if !defined(CYTHON_FAST_PYCCALL) +#define CYTHON_FAST_PYCCALL (CYTHON_FAST_PYCALL && PY_VERSION_HEX >= 0x030600B1) +#endif +#if CYTHON_USE_PYLONG_INTERNALS + #include "longintrepr.h" + #undef SHIFT + #undef BASE + #undef MASK +#endif +#if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x02070600 && !defined(Py_OptimizeFlag) + #define Py_OptimizeFlag 0 +#endif +#define __PYX_BUILD_PY_SSIZE_T "n" +#define CYTHON_FORMAT_SSIZE_T "z" +#if PY_MAJOR_VERSION < 3 + #define __Pyx_BUILTIN_MODULE_NAME "__builtin__" + #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ + PyCode_New(a+k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) + #define __Pyx_DefaultClassType PyClass_Type +#else + #define __Pyx_BUILTIN_MODULE_NAME "builtins" + #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ + PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) + #define __Pyx_DefaultClassType PyType_Type +#endif +#ifndef Py_TPFLAGS_CHECKTYPES + #define Py_TPFLAGS_CHECKTYPES 0 +#endif +#ifndef Py_TPFLAGS_HAVE_INDEX + #define Py_TPFLAGS_HAVE_INDEX 0 +#endif +#ifndef Py_TPFLAGS_HAVE_NEWBUFFER + #define Py_TPFLAGS_HAVE_NEWBUFFER 0 +#endif +#ifndef Py_TPFLAGS_HAVE_FINALIZE + #define Py_TPFLAGS_HAVE_FINALIZE 0 +#endif +#if PY_VERSION_HEX < 0x030700A0 || !defined(METH_FASTCALL) + #ifndef METH_FASTCALL + #define METH_FASTCALL 0x80 + #endif + typedef PyObject *(*__Pyx_PyCFunctionFast) (PyObject *self, PyObject **args, Py_ssize_t nargs); + typedef PyObject *(*__Pyx_PyCFunctionFastWithKeywords) (PyObject *self, PyObject **args, + Py_ssize_t nargs, PyObject *kwnames); +#else + #define __Pyx_PyCFunctionFast _PyCFunctionFast + #define __Pyx_PyCFunctionFastWithKeywords _PyCFunctionFastWithKeywords +#endif +#if CYTHON_FAST_PYCCALL +#define __Pyx_PyFastCFunction_Check(func)\ + ((PyCFunction_Check(func) && (METH_FASTCALL == (PyCFunction_GET_FLAGS(func) & ~(METH_CLASS | METH_STATIC | METH_COEXIST | METH_KEYWORDS))))) +#else +#define __Pyx_PyFastCFunction_Check(func) 0 +#endif +#if PY_VERSION_HEX > 0x03030000 && defined(PyUnicode_KIND) + #define CYTHON_PEP393_ENABLED 1 + #define __Pyx_PyUnicode_READY(op) (likely(PyUnicode_IS_READY(op)) ?\ + 0 : _PyUnicode_Ready((PyObject *)(op))) + #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_LENGTH(u) + #define __Pyx_PyUnicode_READ_CHAR(u, i) PyUnicode_READ_CHAR(u, i) + #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) PyUnicode_MAX_CHAR_VALUE(u) + #define __Pyx_PyUnicode_KIND(u) PyUnicode_KIND(u) + #define __Pyx_PyUnicode_DATA(u) PyUnicode_DATA(u) + #define __Pyx_PyUnicode_READ(k, d, i) PyUnicode_READ(k, d, i) + #define __Pyx_PyUnicode_WRITE(k, d, i, ch) PyUnicode_WRITE(k, d, i, ch) + #define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : PyUnicode_GET_SIZE(u))) +#else + #define CYTHON_PEP393_ENABLED 0 + #define PyUnicode_1BYTE_KIND 1 + #define PyUnicode_2BYTE_KIND 2 + #define PyUnicode_4BYTE_KIND 4 + #define __Pyx_PyUnicode_READY(op) (0) + #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_SIZE(u) + #define __Pyx_PyUnicode_READ_CHAR(u, i) ((Py_UCS4)(PyUnicode_AS_UNICODE(u)[i])) + #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) ((sizeof(Py_UNICODE) == 2) ? 65535 : 1114111) + #define __Pyx_PyUnicode_KIND(u) (sizeof(Py_UNICODE)) + #define __Pyx_PyUnicode_DATA(u) ((void*)PyUnicode_AS_UNICODE(u)) + #define __Pyx_PyUnicode_READ(k, d, i) ((void)(k), (Py_UCS4)(((Py_UNICODE*)d)[i])) + #define __Pyx_PyUnicode_WRITE(k, d, i, ch) (((void)(k)), ((Py_UNICODE*)d)[i] = ch) + #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_SIZE(u)) +#endif +#if CYTHON_COMPILING_IN_PYPY + #define __Pyx_PyUnicode_Concat(a, b) PyNumber_Add(a, b) + #define __Pyx_PyUnicode_ConcatSafe(a, b) PyNumber_Add(a, b) +#else + #define __Pyx_PyUnicode_Concat(a, b) PyUnicode_Concat(a, b) + #define __Pyx_PyUnicode_ConcatSafe(a, b) ((unlikely((a) == Py_None) || unlikely((b) == Py_None)) ?\ + PyNumber_Add(a, b) : __Pyx_PyUnicode_Concat(a, b)) +#endif +#if CYTHON_COMPILING_IN_PYPY && !defined(PyUnicode_Contains) + #define PyUnicode_Contains(u, s) PySequence_Contains(u, s) +#endif +#if CYTHON_COMPILING_IN_PYPY && !defined(PyByteArray_Check) + #define PyByteArray_Check(obj) PyObject_TypeCheck(obj, &PyByteArray_Type) +#endif +#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Format) + #define PyObject_Format(obj, fmt) PyObject_CallMethod(obj, "__format__", "O", fmt) +#endif +#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Malloc) + #define PyObject_Malloc(s) PyMem_Malloc(s) + #define PyObject_Free(p) PyMem_Free(p) + #define PyObject_Realloc(p) PyMem_Realloc(p) +#endif +#if CYTHON_COMPILING_IN_PYSTON + #define __Pyx_PyCode_HasFreeVars(co) PyCode_HasFreeVars(co) + #define __Pyx_PyFrame_SetLineNumber(frame, lineno) PyFrame_SetLineNumber(frame, lineno) +#else + #define __Pyx_PyCode_HasFreeVars(co) (PyCode_GetNumFree(co) > 0) + #define __Pyx_PyFrame_SetLineNumber(frame, lineno) (frame)->f_lineno = (lineno) +#endif +#define __Pyx_PyString_FormatSafe(a, b) ((unlikely((a) == Py_None)) ? PyNumber_Remainder(a, b) : __Pyx_PyString_Format(a, b)) +#define __Pyx_PyUnicode_FormatSafe(a, b) ((unlikely((a) == Py_None)) ? PyNumber_Remainder(a, b) : PyUnicode_Format(a, b)) +#if PY_MAJOR_VERSION >= 3 + #define __Pyx_PyString_Format(a, b) PyUnicode_Format(a, b) +#else + #define __Pyx_PyString_Format(a, b) PyString_Format(a, b) +#endif +#if PY_MAJOR_VERSION < 3 && !defined(PyObject_ASCII) + #define PyObject_ASCII(o) PyObject_Repr(o) +#endif +#if PY_MAJOR_VERSION >= 3 + #define PyBaseString_Type PyUnicode_Type + #define PyStringObject PyUnicodeObject + #define PyString_Type PyUnicode_Type + #define PyString_Check PyUnicode_Check + #define PyString_CheckExact PyUnicode_CheckExact +#endif +#if PY_MAJOR_VERSION >= 3 + #define __Pyx_PyBaseString_Check(obj) PyUnicode_Check(obj) + #define __Pyx_PyBaseString_CheckExact(obj) PyUnicode_CheckExact(obj) +#else + #define __Pyx_PyBaseString_Check(obj) (PyString_Check(obj) || PyUnicode_Check(obj)) + #define __Pyx_PyBaseString_CheckExact(obj) (PyString_CheckExact(obj) || PyUnicode_CheckExact(obj)) +#endif +#ifndef PySet_CheckExact + #define PySet_CheckExact(obj) (Py_TYPE(obj) == &PySet_Type) +#endif +#define __Pyx_TypeCheck(obj, type) PyObject_TypeCheck(obj, (PyTypeObject *)type) +#define __Pyx_PyException_Check(obj) __Pyx_TypeCheck(obj, PyExc_Exception) +#if PY_MAJOR_VERSION >= 3 + #define PyIntObject PyLongObject + #define PyInt_Type PyLong_Type + #define PyInt_Check(op) PyLong_Check(op) + #define PyInt_CheckExact(op) PyLong_CheckExact(op) + #define PyInt_FromString PyLong_FromString + #define PyInt_FromUnicode PyLong_FromUnicode + #define PyInt_FromLong PyLong_FromLong + #define PyInt_FromSize_t PyLong_FromSize_t + #define PyInt_FromSsize_t PyLong_FromSsize_t + #define PyInt_AsLong PyLong_AsLong + #define PyInt_AS_LONG PyLong_AS_LONG + #define PyInt_AsSsize_t PyLong_AsSsize_t + #define PyInt_AsUnsignedLongMask PyLong_AsUnsignedLongMask + #define PyInt_AsUnsignedLongLongMask PyLong_AsUnsignedLongLongMask + #define PyNumber_Int PyNumber_Long +#endif +#if PY_MAJOR_VERSION >= 3 + #define PyBoolObject PyLongObject +#endif +#if PY_MAJOR_VERSION >= 3 && CYTHON_COMPILING_IN_PYPY + #ifndef PyUnicode_InternFromString + #define PyUnicode_InternFromString(s) PyUnicode_FromString(s) + #endif +#endif +#if PY_VERSION_HEX < 0x030200A4 + typedef long Py_hash_t; + #define __Pyx_PyInt_FromHash_t PyInt_FromLong + #define __Pyx_PyInt_AsHash_t PyInt_AsLong +#else + #define __Pyx_PyInt_FromHash_t PyInt_FromSsize_t + #define __Pyx_PyInt_AsHash_t PyInt_AsSsize_t +#endif +#if PY_MAJOR_VERSION >= 3 + #define __Pyx_PyMethod_New(func, self, klass) ((self) ? PyMethod_New(func, self) : PyInstanceMethod_New(func)) +#else + #define __Pyx_PyMethod_New(func, self, klass) PyMethod_New(func, self, klass) +#endif +#ifndef __has_attribute + #define __has_attribute(x) 0 +#endif +#ifndef __has_cpp_attribute + #define __has_cpp_attribute(x) 0 +#endif +#if CYTHON_USE_ASYNC_SLOTS + #if PY_VERSION_HEX >= 0x030500B1 + #define __Pyx_PyAsyncMethodsStruct PyAsyncMethods + #define __Pyx_PyType_AsAsync(obj) (Py_TYPE(obj)->tp_as_async) + #else + typedef struct { + unaryfunc am_await; + unaryfunc am_aiter; + unaryfunc am_anext; + } __Pyx_PyAsyncMethodsStruct; + #define __Pyx_PyType_AsAsync(obj) ((__Pyx_PyAsyncMethodsStruct*) (Py_TYPE(obj)->tp_reserved)) + #endif +#else + #define __Pyx_PyType_AsAsync(obj) NULL +#endif +#ifndef CYTHON_RESTRICT + #if defined(__GNUC__) + #define CYTHON_RESTRICT __restrict__ + #elif defined(_MSC_VER) && _MSC_VER >= 1400 + #define CYTHON_RESTRICT __restrict + #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L + #define CYTHON_RESTRICT restrict + #else + #define CYTHON_RESTRICT + #endif +#endif +#ifndef CYTHON_UNUSED +# if defined(__GNUC__) +# if !(defined(__cplusplus)) || (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 4)) +# define CYTHON_UNUSED __attribute__ ((__unused__)) +# else +# define CYTHON_UNUSED +# endif +# elif defined(__ICC) || (defined(__INTEL_COMPILER) && !defined(_MSC_VER)) +# define CYTHON_UNUSED __attribute__ ((__unused__)) +# else +# define CYTHON_UNUSED +# endif +#endif +#ifndef CYTHON_MAYBE_UNUSED_VAR +# if defined(__cplusplus) + template void CYTHON_MAYBE_UNUSED_VAR( const T& ) { } +# else +# define CYTHON_MAYBE_UNUSED_VAR(x) (void)(x) +# endif +#endif +#ifndef CYTHON_NCP_UNUSED +# if CYTHON_COMPILING_IN_CPYTHON +# define CYTHON_NCP_UNUSED +# else +# define CYTHON_NCP_UNUSED CYTHON_UNUSED +# endif +#endif +#define __Pyx_void_to_None(void_result) ((void)(void_result), Py_INCREF(Py_None), Py_None) +#ifdef _MSC_VER + #ifndef _MSC_STDINT_H_ + #if _MSC_VER < 1300 + typedef unsigned char uint8_t; + typedef unsigned int uint32_t; + #else + typedef unsigned __int8 uint8_t; + typedef unsigned __int32 uint32_t; + #endif + #endif +#else + #include +#endif +#ifndef CYTHON_FALLTHROUGH + #ifdef __cplusplus + #if __has_cpp_attribute(fallthrough) + #define CYTHON_FALLTHROUGH [[fallthrough]] + #elif __has_cpp_attribute(clang::fallthrough) + #define CYTHON_FALLTHROUGH [[clang::fallthrough]] + #endif + #endif + #ifndef CYTHON_FALLTHROUGH + #if __has_attribute(fallthrough) || (defined(__GNUC__) && defined(__attribute__)) + #define CYTHON_FALLTHROUGH __attribute__((fallthrough)) + #else + #define CYTHON_FALLTHROUGH + #endif + #endif +#endif + +#ifndef CYTHON_INLINE + #if defined(__clang__) + #define CYTHON_INLINE __inline__ __attribute__ ((__unused__)) + #elif defined(__GNUC__) + #define CYTHON_INLINE __inline__ + #elif defined(_MSC_VER) + #define CYTHON_INLINE __inline + #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L + #define CYTHON_INLINE inline + #else + #define CYTHON_INLINE + #endif +#endif + +#if defined(WIN32) || defined(MS_WINDOWS) + #define _USE_MATH_DEFINES +#endif +#include +#ifdef NAN +#define __PYX_NAN() ((float) NAN) +#else +static CYTHON_INLINE float __PYX_NAN() { + float value; + memset(&value, 0xFF, sizeof(value)); + return value; +} +#endif +#if defined(__CYGWIN__) && defined(_LDBL_EQ_DBL) +#define __Pyx_truncl trunc +#else +#define __Pyx_truncl truncl +#endif + + +#define __PYX_ERR(f_index, lineno, Ln_error) \ +{ \ + __pyx_filename = __pyx_f[f_index]; __pyx_lineno = lineno; __pyx_clineno = __LINE__; goto Ln_error; \ +} + +#if PY_MAJOR_VERSION >= 3 + #define __Pyx_PyNumber_Divide(x,y) PyNumber_TrueDivide(x,y) + #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceTrueDivide(x,y) +#else + #define __Pyx_PyNumber_Divide(x,y) PyNumber_Divide(x,y) + #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceDivide(x,y) +#endif + +#ifndef __PYX_EXTERN_C + #ifdef __cplusplus + #define __PYX_EXTERN_C extern "C" + #else + #define __PYX_EXTERN_C extern + #endif +#endif + +#define __PYX_HAVE___pydevd_bundle__pydevd_cython +#define __PYX_HAVE_API___pydevd_bundle__pydevd_cython +#include +#include +#ifdef _OPENMP +#include +#endif /* _OPENMP */ + +#ifdef PYREX_WITHOUT_ASSERTIONS +#define CYTHON_WITHOUT_ASSERTIONS +#endif + +typedef struct {PyObject **p; const char *s; const Py_ssize_t n; const char* encoding; + const char is_unicode; const char is_str; const char intern; } __Pyx_StringTabEntry; + +#define __PYX_DEFAULT_STRING_ENCODING_IS_ASCII 0 +#define __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT 0 +#define __PYX_DEFAULT_STRING_ENCODING "" +#define __Pyx_PyObject_FromString __Pyx_PyBytes_FromString +#define __Pyx_PyObject_FromStringAndSize __Pyx_PyBytes_FromStringAndSize +#define __Pyx_uchar_cast(c) ((unsigned char)c) +#define __Pyx_long_cast(x) ((long)x) +#define __Pyx_fits_Py_ssize_t(v, type, is_signed) (\ + (sizeof(type) < sizeof(Py_ssize_t)) ||\ + (sizeof(type) > sizeof(Py_ssize_t) &&\ + likely(v < (type)PY_SSIZE_T_MAX ||\ + v == (type)PY_SSIZE_T_MAX) &&\ + (!is_signed || likely(v > (type)PY_SSIZE_T_MIN ||\ + v == (type)PY_SSIZE_T_MIN))) ||\ + (sizeof(type) == sizeof(Py_ssize_t) &&\ + (is_signed || likely(v < (type)PY_SSIZE_T_MAX ||\ + v == (type)PY_SSIZE_T_MAX))) ) +#if defined (__cplusplus) && __cplusplus >= 201103L + #include + #define __Pyx_sst_abs(value) std::abs(value) +#elif SIZEOF_INT >= SIZEOF_SIZE_T + #define __Pyx_sst_abs(value) abs(value) +#elif SIZEOF_LONG >= SIZEOF_SIZE_T + #define __Pyx_sst_abs(value) labs(value) +#elif defined (_MSC_VER) && defined (_M_X64) + #define __Pyx_sst_abs(value) _abs64(value) +#elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L + #define __Pyx_sst_abs(value) llabs(value) +#elif defined (__GNUC__) + #define __Pyx_sst_abs(value) __builtin_llabs(value) +#else + #define __Pyx_sst_abs(value) ((value<0) ? -value : value) +#endif +static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject*); +static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject*, Py_ssize_t* length); +#define __Pyx_PyByteArray_FromString(s) PyByteArray_FromStringAndSize((const char*)s, strlen((const char*)s)) +#define __Pyx_PyByteArray_FromStringAndSize(s, l) PyByteArray_FromStringAndSize((const char*)s, l) +#define __Pyx_PyBytes_FromString PyBytes_FromString +#define __Pyx_PyBytes_FromStringAndSize PyBytes_FromStringAndSize +static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char*); +#if PY_MAJOR_VERSION < 3 + #define __Pyx_PyStr_FromString __Pyx_PyBytes_FromString + #define __Pyx_PyStr_FromStringAndSize __Pyx_PyBytes_FromStringAndSize +#else + #define __Pyx_PyStr_FromString __Pyx_PyUnicode_FromString + #define __Pyx_PyStr_FromStringAndSize __Pyx_PyUnicode_FromStringAndSize +#endif +#define __Pyx_PyObject_AsWritableString(s) ((char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsWritableSString(s) ((signed char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsWritableUString(s) ((unsigned char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsSString(s) ((const signed char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsUString(s) ((const unsigned char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_FromCString(s) __Pyx_PyObject_FromString((const char*)s) +#define __Pyx_PyBytes_FromCString(s) __Pyx_PyBytes_FromString((const char*)s) +#define __Pyx_PyByteArray_FromCString(s) __Pyx_PyByteArray_FromString((const char*)s) +#define __Pyx_PyStr_FromCString(s) __Pyx_PyStr_FromString((const char*)s) +#define __Pyx_PyUnicode_FromCString(s) __Pyx_PyUnicode_FromString((const char*)s) +#if PY_MAJOR_VERSION < 3 +static CYTHON_INLINE size_t __Pyx_Py_UNICODE_strlen(const Py_UNICODE *u) +{ + const Py_UNICODE *u_end = u; + while (*u_end++) ; + return (size_t)(u_end - u - 1); +} +#else +#define __Pyx_Py_UNICODE_strlen Py_UNICODE_strlen +#endif +#define __Pyx_PyUnicode_FromUnicode(u) PyUnicode_FromUnicode(u, __Pyx_Py_UNICODE_strlen(u)) +#define __Pyx_PyUnicode_FromUnicodeAndLength PyUnicode_FromUnicode +#define __Pyx_PyUnicode_AsUnicode PyUnicode_AsUnicode +#define __Pyx_NewRef(obj) (Py_INCREF(obj), obj) +#define __Pyx_Owned_Py_None(b) __Pyx_NewRef(Py_None) +#define __Pyx_PyBool_FromLong(b) ((b) ? __Pyx_NewRef(Py_True) : __Pyx_NewRef(Py_False)) +static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject*); +static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x); +static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject*); +static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t); +#if CYTHON_ASSUME_SAFE_MACROS +#define __pyx_PyFloat_AsDouble(x) (PyFloat_CheckExact(x) ? PyFloat_AS_DOUBLE(x) : PyFloat_AsDouble(x)) +#else +#define __pyx_PyFloat_AsDouble(x) PyFloat_AsDouble(x) +#endif +#define __pyx_PyFloat_AsFloat(x) ((float) __pyx_PyFloat_AsDouble(x)) +#if PY_MAJOR_VERSION >= 3 +#define __Pyx_PyNumber_Int(x) (PyLong_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Long(x)) +#else +#define __Pyx_PyNumber_Int(x) (PyInt_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Int(x)) +#endif +#define __Pyx_PyNumber_Float(x) (PyFloat_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Float(x)) +#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII +static int __Pyx_sys_getdefaultencoding_not_ascii; +static int __Pyx_init_sys_getdefaultencoding_params(void) { + PyObject* sys; + PyObject* default_encoding = NULL; + PyObject* ascii_chars_u = NULL; + PyObject* ascii_chars_b = NULL; + const char* default_encoding_c; + sys = PyImport_ImportModule("sys"); + if (!sys) goto bad; + default_encoding = PyObject_CallMethod(sys, (char*) "getdefaultencoding", NULL); + Py_DECREF(sys); + if (!default_encoding) goto bad; + default_encoding_c = PyBytes_AsString(default_encoding); + if (!default_encoding_c) goto bad; + if (strcmp(default_encoding_c, "ascii") == 0) { + __Pyx_sys_getdefaultencoding_not_ascii = 0; + } else { + char ascii_chars[128]; + int c; + for (c = 0; c < 128; c++) { + ascii_chars[c] = c; + } + __Pyx_sys_getdefaultencoding_not_ascii = 1; + ascii_chars_u = PyUnicode_DecodeASCII(ascii_chars, 128, NULL); + if (!ascii_chars_u) goto bad; + ascii_chars_b = PyUnicode_AsEncodedString(ascii_chars_u, default_encoding_c, NULL); + if (!ascii_chars_b || !PyBytes_Check(ascii_chars_b) || memcmp(ascii_chars, PyBytes_AS_STRING(ascii_chars_b), 128) != 0) { + PyErr_Format( + PyExc_ValueError, + "This module compiled with c_string_encoding=ascii, but default encoding '%.200s' is not a superset of ascii.", + default_encoding_c); + goto bad; + } + Py_DECREF(ascii_chars_u); + Py_DECREF(ascii_chars_b); + } + Py_DECREF(default_encoding); + return 0; +bad: + Py_XDECREF(default_encoding); + Py_XDECREF(ascii_chars_u); + Py_XDECREF(ascii_chars_b); + return -1; +} +#endif +#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT && PY_MAJOR_VERSION >= 3 +#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_DecodeUTF8(c_str, size, NULL) +#else +#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_Decode(c_str, size, __PYX_DEFAULT_STRING_ENCODING, NULL) +#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT +static char* __PYX_DEFAULT_STRING_ENCODING; +static int __Pyx_init_sys_getdefaultencoding_params(void) { + PyObject* sys; + PyObject* default_encoding = NULL; + char* default_encoding_c; + sys = PyImport_ImportModule("sys"); + if (!sys) goto bad; + default_encoding = PyObject_CallMethod(sys, (char*) (const char*) "getdefaultencoding", NULL); + Py_DECREF(sys); + if (!default_encoding) goto bad; + default_encoding_c = PyBytes_AsString(default_encoding); + if (!default_encoding_c) goto bad; + __PYX_DEFAULT_STRING_ENCODING = (char*) malloc(strlen(default_encoding_c)); + if (!__PYX_DEFAULT_STRING_ENCODING) goto bad; + strcpy(__PYX_DEFAULT_STRING_ENCODING, default_encoding_c); + Py_DECREF(default_encoding); + return 0; +bad: + Py_XDECREF(default_encoding); + return -1; +} +#endif +#endif + + +/* Test for GCC > 2.95 */ +#if defined(__GNUC__) && (__GNUC__ > 2 || (__GNUC__ == 2 && (__GNUC_MINOR__ > 95))) + #define likely(x) __builtin_expect(!!(x), 1) + #define unlikely(x) __builtin_expect(!!(x), 0) +#else /* !__GNUC__ or GCC < 2.95 */ + #define likely(x) (x) + #define unlikely(x) (x) +#endif /* __GNUC__ */ +static CYTHON_INLINE void __Pyx_pretend_to_initialize(void* ptr) { (void)ptr; } + +static PyObject *__pyx_m; +static PyObject *__pyx_d; +static PyObject *__pyx_b; +static PyObject *__pyx_cython_runtime; +static PyObject *__pyx_empty_tuple; +static PyObject *__pyx_empty_bytes; +static PyObject *__pyx_empty_unicode; +static int __pyx_lineno; +static int __pyx_clineno = 0; +static const char * __pyx_cfilenm= __FILE__; +static const char *__pyx_filename; + + +static const char *__pyx_f[] = { + "_pydevd_bundle\\pydevd_cython.pyx", + "stringsource", + "type.pxd", +}; + +/*--- Type declarations ---*/ +struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo; +struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame; +struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper; +struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_ThreadTracer; + +/* "_pydevd_bundle/pydevd_cython.pyx":61 + * #======================================================================================================================= + * # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + * cdef class PyDBAdditionalThreadInfo: # <<<<<<<<<<<<<< + * # ELSE + * # class PyDBAdditionalThreadInfo(object): + */ +struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo { + PyObject_HEAD + int pydev_state; + PyObject *pydev_step_stop; + int pydev_step_cmd; + int pydev_notify_kill; + PyObject *pydev_smart_step_stop; + int pydev_django_resolve_frame; + PyObject *pydev_call_from_jinja2; + PyObject *pydev_call_inside_jinja2; + int is_tracing; + PyObject *conditional_breakpoint_exception; + PyObject *pydev_message; + int suspend_type; + int pydev_next_line; + PyObject *pydev_func_name; +}; + + +/* "_pydevd_bundle/pydevd_cython.pyx":218 + * #======================================================================================================================= + * # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + * cdef class PyDBFrame: # <<<<<<<<<<<<<< + * # ELSE + * # class PyDBFrame: + */ +struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame { + PyObject_HEAD + struct __pyx_vtabstruct_14_pydevd_bundle_13pydevd_cython_PyDBFrame *__pyx_vtab; + PyObject *_args; + int should_skip; +}; + + +/* "_pydevd_bundle/pydevd_cython.pyx":973 + * + * # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + * cdef class SafeCallWrapper: # <<<<<<<<<<<<<< + * cdef method_object + * def __init__(self, method_object): + */ +struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper { + PyObject_HEAD + PyObject *method_object; +}; + + +/* "_pydevd_bundle/pydevd_cython.pyx":985 + * Py_XDECREF (method_obj) + * return SafeCallWrapper(ret) if ret is not None else None + * cdef class ThreadTracer: # <<<<<<<<<<<<<< + * cdef public tuple _args; + * def __init__(self, tuple args): + */ +struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_ThreadTracer { + PyObject_HEAD + PyObject *_args; +}; + + + +/* "_pydevd_bundle/pydevd_cython.pyx":218 + * #======================================================================================================================= + * # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + * cdef class PyDBFrame: # <<<<<<<<<<<<<< + * # ELSE + * # class PyDBFrame: + */ + +struct __pyx_vtabstruct_14_pydevd_bundle_13pydevd_cython_PyDBFrame { + PyObject *(*trace_dispatch)(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *, PyObject *, PyObject *, PyObject *, int __pyx_skip_dispatch); +}; +static struct __pyx_vtabstruct_14_pydevd_bundle_13pydevd_cython_PyDBFrame *__pyx_vtabptr_14_pydevd_bundle_13pydevd_cython_PyDBFrame; + +/* --- Runtime support code (head) --- */ +/* Refnanny.proto */ +#ifndef CYTHON_REFNANNY + #define CYTHON_REFNANNY 0 +#endif +#if CYTHON_REFNANNY + typedef struct { + void (*INCREF)(void*, PyObject*, int); + void (*DECREF)(void*, PyObject*, int); + void (*GOTREF)(void*, PyObject*, int); + void (*GIVEREF)(void*, PyObject*, int); + void* (*SetupContext)(const char*, int, const char*); + void (*FinishContext)(void**); + } __Pyx_RefNannyAPIStruct; + static __Pyx_RefNannyAPIStruct *__Pyx_RefNanny = NULL; + static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname); + #define __Pyx_RefNannyDeclarations void *__pyx_refnanny = NULL; +#ifdef WITH_THREAD + #define __Pyx_RefNannySetupContext(name, acquire_gil)\ + if (acquire_gil) {\ + PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();\ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__);\ + PyGILState_Release(__pyx_gilstate_save);\ + } else {\ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__);\ + } +#else + #define __Pyx_RefNannySetupContext(name, acquire_gil)\ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__) +#endif + #define __Pyx_RefNannyFinishContext()\ + __Pyx_RefNanny->FinishContext(&__pyx_refnanny) + #define __Pyx_INCREF(r) __Pyx_RefNanny->INCREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_DECREF(r) __Pyx_RefNanny->DECREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_GOTREF(r) __Pyx_RefNanny->GOTREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_GIVEREF(r) __Pyx_RefNanny->GIVEREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_XINCREF(r) do { if((r) != NULL) {__Pyx_INCREF(r); }} while(0) + #define __Pyx_XDECREF(r) do { if((r) != NULL) {__Pyx_DECREF(r); }} while(0) + #define __Pyx_XGOTREF(r) do { if((r) != NULL) {__Pyx_GOTREF(r); }} while(0) + #define __Pyx_XGIVEREF(r) do { if((r) != NULL) {__Pyx_GIVEREF(r);}} while(0) +#else + #define __Pyx_RefNannyDeclarations + #define __Pyx_RefNannySetupContext(name, acquire_gil) + #define __Pyx_RefNannyFinishContext() + #define __Pyx_INCREF(r) Py_INCREF(r) + #define __Pyx_DECREF(r) Py_DECREF(r) + #define __Pyx_GOTREF(r) + #define __Pyx_GIVEREF(r) + #define __Pyx_XINCREF(r) Py_XINCREF(r) + #define __Pyx_XDECREF(r) Py_XDECREF(r) + #define __Pyx_XGOTREF(r) + #define __Pyx_XGIVEREF(r) +#endif +#define __Pyx_XDECREF_SET(r, v) do {\ + PyObject *tmp = (PyObject *) r;\ + r = v; __Pyx_XDECREF(tmp);\ + } while (0) +#define __Pyx_DECREF_SET(r, v) do {\ + PyObject *tmp = (PyObject *) r;\ + r = v; __Pyx_DECREF(tmp);\ + } while (0) +#define __Pyx_CLEAR(r) do { PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);} while(0) +#define __Pyx_XCLEAR(r) do { if((r) != NULL) {PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);}} while(0) + +/* PyObjectGetAttrStr.proto */ +#if CYTHON_USE_TYPE_SLOTS +static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name) { + PyTypeObject* tp = Py_TYPE(obj); + if (likely(tp->tp_getattro)) + return tp->tp_getattro(obj, attr_name); +#if PY_MAJOR_VERSION < 3 + if (likely(tp->tp_getattr)) + return tp->tp_getattr(obj, PyString_AS_STRING(attr_name)); +#endif + return PyObject_GetAttr(obj, attr_name); +} +#else +#define __Pyx_PyObject_GetAttrStr(o,n) PyObject_GetAttr(o,n) +#endif + +/* GetBuiltinName.proto */ +static PyObject *__Pyx_GetBuiltinName(PyObject *name); + +/* GetModuleGlobalName.proto */ +static CYTHON_INLINE PyObject *__Pyx_GetModuleGlobalName(PyObject *name); + +/* PyCFunctionFastCall.proto */ +#if CYTHON_FAST_PYCCALL +static CYTHON_INLINE PyObject *__Pyx_PyCFunction_FastCall(PyObject *func, PyObject **args, Py_ssize_t nargs); +#else +#define __Pyx_PyCFunction_FastCall(func, args, nargs) (assert(0), NULL) +#endif + +/* PyFunctionFastCall.proto */ +#if CYTHON_FAST_PYCALL +#define __Pyx_PyFunction_FastCall(func, args, nargs)\ + __Pyx_PyFunction_FastCallDict((func), (args), (nargs), NULL) +#if 1 || PY_VERSION_HEX < 0x030600B1 +static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, int nargs, PyObject *kwargs); +#else +#define __Pyx_PyFunction_FastCallDict(func, args, nargs, kwargs) _PyFunction_FastCallDict(func, args, nargs, kwargs) +#endif +#endif + +/* PyObjectCall.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw); +#else +#define __Pyx_PyObject_Call(func, arg, kw) PyObject_Call(func, arg, kw) +#endif + +/* PyObjectCallMethO.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg); +#endif + +/* PyObjectCallOneArg.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg); + +/* PyObjectCallNoArg.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallNoArg(PyObject *func); +#else +#define __Pyx_PyObject_CallNoArg(func) __Pyx_PyObject_Call(func, __pyx_empty_tuple, NULL) +#endif + +/* RaiseArgTupleInvalid.proto */ +static void __Pyx_RaiseArgtupleInvalid(const char* func_name, int exact, + Py_ssize_t num_min, Py_ssize_t num_max, Py_ssize_t num_found); + +/* KeywordStringCheck.proto */ +static CYTHON_INLINE int __Pyx_CheckKeywordStrings(PyObject *kwdict, const char* function_name, int kw_allowed); + +/* GetAttr.proto */ +static CYTHON_INLINE PyObject *__Pyx_GetAttr(PyObject *, PyObject *); + +/* GetAttr3.proto */ +static CYTHON_INLINE PyObject *__Pyx_GetAttr3(PyObject *, PyObject *, PyObject *); + +/* RaiseDoubleKeywords.proto */ +static void __Pyx_RaiseDoubleKeywordsError(const char* func_name, PyObject* kw_name); + +/* ParseKeywords.proto */ +static int __Pyx_ParseOptionalKeywords(PyObject *kwds, PyObject **argnames[],\ + PyObject *kwds2, PyObject *values[], Py_ssize_t num_pos_args,\ + const char* function_name); + +/* PyThreadStateGet.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_PyThreadState_declare PyThreadState *__pyx_tstate; +#define __Pyx_PyThreadState_assign __pyx_tstate = PyThreadState_GET(); +#else +#define __Pyx_PyThreadState_declare +#define __Pyx_PyThreadState_assign +#endif + +/* SaveResetException.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_ExceptionSave(type, value, tb) __Pyx__ExceptionSave(__pyx_tstate, type, value, tb) +static CYTHON_INLINE void __Pyx__ExceptionSave(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); +#define __Pyx_ExceptionReset(type, value, tb) __Pyx__ExceptionReset(__pyx_tstate, type, value, tb) +static CYTHON_INLINE void __Pyx__ExceptionReset(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb); +#else +#define __Pyx_ExceptionSave(type, value, tb) PyErr_GetExcInfo(type, value, tb) +#define __Pyx_ExceptionReset(type, value, tb) PyErr_SetExcInfo(type, value, tb) +#endif + +/* GetException.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_GetException(type, value, tb) __Pyx__GetException(__pyx_tstate, type, value, tb) +static int __Pyx__GetException(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); +#else +static int __Pyx_GetException(PyObject **type, PyObject **value, PyObject **tb); +#endif + +/* HasAttr.proto */ +static CYTHON_INLINE int __Pyx_HasAttr(PyObject *, PyObject *); + +/* RaiseTooManyValuesToUnpack.proto */ +static CYTHON_INLINE void __Pyx_RaiseTooManyValuesError(Py_ssize_t expected); + +/* RaiseNeedMoreValuesToUnpack.proto */ +static CYTHON_INLINE void __Pyx_RaiseNeedMoreValuesError(Py_ssize_t index); + +/* IterFinish.proto */ +static CYTHON_INLINE int __Pyx_IterFinish(void); + +/* UnpackItemEndCheck.proto */ +static int __Pyx_IternextUnpackEndCheck(PyObject *retval, Py_ssize_t expected); + +/* StringJoin.proto */ +#if PY_MAJOR_VERSION < 3 +#define __Pyx_PyString_Join __Pyx_PyBytes_Join +#define __Pyx_PyBaseString_Join(s, v) (PyUnicode_CheckExact(s) ? PyUnicode_Join(s, v) : __Pyx_PyBytes_Join(s, v)) +#else +#define __Pyx_PyString_Join PyUnicode_Join +#define __Pyx_PyBaseString_Join PyUnicode_Join +#endif +#if CYTHON_COMPILING_IN_CPYTHON + #if PY_MAJOR_VERSION < 3 + #define __Pyx_PyBytes_Join _PyString_Join + #else + #define __Pyx_PyBytes_Join _PyBytes_Join + #endif +#else +static CYTHON_INLINE PyObject* __Pyx_PyBytes_Join(PyObject* sep, PyObject* values); +#endif + +/* PyObjectSetAttrStr.proto */ +#if CYTHON_USE_TYPE_SLOTS +#define __Pyx_PyObject_DelAttrStr(o,n) __Pyx_PyObject_SetAttrStr(o,n,NULL) +static CYTHON_INLINE int __Pyx_PyObject_SetAttrStr(PyObject* obj, PyObject* attr_name, PyObject* value) { + PyTypeObject* tp = Py_TYPE(obj); + if (likely(tp->tp_setattro)) + return tp->tp_setattro(obj, attr_name, value); +#if PY_MAJOR_VERSION < 3 + if (likely(tp->tp_setattr)) + return tp->tp_setattr(obj, PyString_AS_STRING(attr_name), value); +#endif + return PyObject_SetAttr(obj, attr_name, value); +} +#else +#define __Pyx_PyObject_DelAttrStr(o,n) PyObject_DelAttr(o,n) +#define __Pyx_PyObject_SetAttrStr(o,n,v) PyObject_SetAttr(o,n,v) +#endif + +/* PyErrFetchRestore.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_ErrRestoreWithState(type, value, tb) __Pyx_ErrRestoreInState(PyThreadState_GET(), type, value, tb) +#define __Pyx_ErrFetchWithState(type, value, tb) __Pyx_ErrFetchInState(PyThreadState_GET(), type, value, tb) +#define __Pyx_ErrRestore(type, value, tb) __Pyx_ErrRestoreInState(__pyx_tstate, type, value, tb) +#define __Pyx_ErrFetch(type, value, tb) __Pyx_ErrFetchInState(__pyx_tstate, type, value, tb) +static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb); +static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); +#else +#define __Pyx_ErrRestoreWithState(type, value, tb) PyErr_Restore(type, value, tb) +#define __Pyx_ErrFetchWithState(type, value, tb) PyErr_Fetch(type, value, tb) +#define __Pyx_ErrRestore(type, value, tb) PyErr_Restore(type, value, tb) +#define __Pyx_ErrFetch(type, value, tb) PyErr_Fetch(type, value, tb) +#endif + +/* SwapException.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_ExceptionSwap(type, value, tb) __Pyx__ExceptionSwap(__pyx_tstate, type, value, tb) +static CYTHON_INLINE void __Pyx__ExceptionSwap(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); +#else +static CYTHON_INLINE void __Pyx_ExceptionSwap(PyObject **type, PyObject **value, PyObject **tb); +#endif + +/* GetItemInt.proto */ +#define __Pyx_GetItemInt(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ + (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ + __Pyx_GetItemInt_Fast(o, (Py_ssize_t)i, is_list, wraparound, boundscheck) :\ + (is_list ? (PyErr_SetString(PyExc_IndexError, "list index out of range"), (PyObject*)NULL) :\ + __Pyx_GetItemInt_Generic(o, to_py_func(i)))) +#define __Pyx_GetItemInt_List(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ + (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ + __Pyx_GetItemInt_List_Fast(o, (Py_ssize_t)i, wraparound, boundscheck) :\ + (PyErr_SetString(PyExc_IndexError, "list index out of range"), (PyObject*)NULL)) +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_List_Fast(PyObject *o, Py_ssize_t i, + int wraparound, int boundscheck); +#define __Pyx_GetItemInt_Tuple(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ + (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ + __Pyx_GetItemInt_Tuple_Fast(o, (Py_ssize_t)i, wraparound, boundscheck) :\ + (PyErr_SetString(PyExc_IndexError, "tuple index out of range"), (PyObject*)NULL)) +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Tuple_Fast(PyObject *o, Py_ssize_t i, + int wraparound, int boundscheck); +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Generic(PyObject *o, PyObject* j); +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Fast(PyObject *o, Py_ssize_t i, + int is_list, int wraparound, int boundscheck); + +/* None.proto */ +static CYTHON_INLINE void __Pyx_RaiseUnboundLocalError(const char *varname); + +/* ArgTypeTest.proto */ +static CYTHON_INLINE int __Pyx_ArgTypeTest(PyObject *obj, PyTypeObject *type, int none_allowed, + const char *name, int exact); + +/* IncludeStringH.proto */ +#include + +/* BytesEquals.proto */ +static CYTHON_INLINE int __Pyx_PyBytes_Equals(PyObject* s1, PyObject* s2, int equals); + +/* UnicodeEquals.proto */ +static CYTHON_INLINE int __Pyx_PyUnicode_Equals(PyObject* s1, PyObject* s2, int equals); + +/* StrEquals.proto */ +#if PY_MAJOR_VERSION >= 3 +#define __Pyx_PyString_Equals __Pyx_PyUnicode_Equals +#else +#define __Pyx_PyString_Equals __Pyx_PyBytes_Equals +#endif + +/* ExtTypeTest.proto */ +static CYTHON_INLINE int __Pyx_TypeTest(PyObject *obj, PyTypeObject *type); + +/* PySequenceContains.proto */ +static CYTHON_INLINE int __Pyx_PySequence_ContainsTF(PyObject* item, PyObject* seq, int eq) { + int result = PySequence_Contains(seq, item); + return unlikely(result < 0) ? result : (result == (eq == Py_EQ)); +} + +/* RaiseNoneIterError.proto */ +static CYTHON_INLINE void __Pyx_RaiseNoneNotIterableError(void); + +/* PyIntBinop.proto */ +#if !CYTHON_COMPILING_IN_PYPY +static PyObject* __Pyx_PyInt_AndObjC(PyObject *op1, PyObject *op2, long intval, int inplace); +#else +#define __Pyx_PyInt_AndObjC(op1, op2, intval, inplace)\ + (inplace ? PyNumber_InPlaceAnd(op1, op2) : PyNumber_And(op1, op2)) +#endif + +/* dict_getitem_default.proto */ +static PyObject* __Pyx_PyDict_GetItemDefault(PyObject* d, PyObject* key, PyObject* default_value); + +/* PyErrExceptionMatches.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_PyErr_ExceptionMatches(err) __Pyx_PyErr_ExceptionMatchesInState(__pyx_tstate, err) +static CYTHON_INLINE int __Pyx_PyErr_ExceptionMatchesInState(PyThreadState* tstate, PyObject* err); +#else +#define __Pyx_PyErr_ExceptionMatches(err) PyErr_ExceptionMatches(err) +#endif + +/* RaiseException.proto */ +static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause); + +/* PyDictContains.proto */ +static CYTHON_INLINE int __Pyx_PyDict_ContainsTF(PyObject* item, PyObject* dict, int eq) { + int result = PyDict_Contains(dict, item); + return unlikely(result < 0) ? result : (result == (eq == Py_EQ)); +} + +/* PyIntBinop.proto */ +#if !CYTHON_COMPILING_IN_PYPY +static PyObject* __Pyx_PyInt_EqObjC(PyObject *op1, PyObject *op2, long intval, int inplace); +#else +#define __Pyx_PyInt_EqObjC(op1, op2, intval, inplace)\ + PyObject_RichCompare(op1, op2, Py_EQ) + #endif + +/* Import.proto */ +static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level); + +/* ImportFrom.proto */ +static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name); + +/* SetupReduce.proto */ +static int __Pyx_setup_reduce(PyObject* type_obj); + +/* SetVTable.proto */ +static int __Pyx_SetVtable(PyObject *dict, void *vtable); + +/* PatchModuleWithCoroutine.proto */ +static PyObject* __Pyx_Coroutine_patch_module(PyObject* module, const char* py_code); + +/* PatchInspect.proto */ +static PyObject* __Pyx_patch_inspect(PyObject* module); + +/* CLineInTraceback.proto */ +static int __Pyx_CLineForTraceback(int c_line); + +/* CodeObjectCache.proto */ +typedef struct { + PyCodeObject* code_object; + int code_line; +} __Pyx_CodeObjectCacheEntry; +struct __Pyx_CodeObjectCache { + int count; + int max_count; + __Pyx_CodeObjectCacheEntry* entries; +}; +static struct __Pyx_CodeObjectCache __pyx_code_cache = {0,0,NULL}; +static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line); +static PyCodeObject *__pyx_find_code_object(int code_line); +static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object); + +/* AddTraceback.proto */ +static void __Pyx_AddTraceback(const char *funcname, int c_line, + int py_line, const char *filename); + +/* CIntToPy.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_int(int value); + +/* CIntToPy.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value); + +/* CIntFromPy.proto */ +static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *); + +/* CIntFromPy.proto */ +static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *); + +/* CheckBinaryVersion.proto */ +static int __Pyx_check_binary_version(void); + +/* PyIdentifierFromString.proto */ +#if !defined(__Pyx_PyIdentifier_FromString) +#if PY_MAJOR_VERSION < 3 + #define __Pyx_PyIdentifier_FromString(s) PyString_FromString(s) +#else + #define __Pyx_PyIdentifier_FromString(s) PyUnicode_FromString(s) +#endif +#endif + +/* ModuleImport.proto */ +static PyObject *__Pyx_ImportModule(const char *name); + +/* TypeImport.proto */ +static PyTypeObject *__Pyx_ImportType(const char *module_name, const char *class_name, size_t size, int strict); + +/* InitStrings.proto */ +static int __Pyx_InitStrings(__Pyx_StringTabEntry *t); + +static PyObject *__pyx_f_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_trace_dispatch(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *__pyx_v_self, PyObject *__pyx_v_frame, PyObject *__pyx_v_event, PyObject *__pyx_v_arg, int __pyx_skip_dispatch); /* proto*/ + +/* Module declarations from 'libc.string' */ + +/* Module declarations from 'libc.stdio' */ + +/* Module declarations from '__builtin__' */ + +/* Module declarations from 'cpython.type' */ +static PyTypeObject *__pyx_ptype_7cpython_4type_type = 0; + +/* Module declarations from 'cpython' */ + +/* Module declarations from 'cpython.object' */ + +/* Module declarations from 'cpython.ref' */ + +/* Module declarations from '_pydevd_bundle.pydevd_cython' */ +static PyTypeObject *__pyx_ptype_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo = 0; +static PyTypeObject *__pyx_ptype_14_pydevd_bundle_13pydevd_cython_PyDBFrame = 0; +static PyTypeObject *__pyx_ptype_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper = 0; +static PyTypeObject *__pyx_ptype_14_pydevd_bundle_13pydevd_cython_ThreadTracer = 0; +static PyObject *__pyx_f_14_pydevd_bundle_13pydevd_cython___pyx_unpickle_PyDBAdditionalThreadInfo__set_state(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *, PyObject *); /*proto*/ +static PyObject *__pyx_f_14_pydevd_bundle_13pydevd_cython___pyx_unpickle_PyDBFrame__set_state(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *, PyObject *); /*proto*/ +static PyObject *__pyx_f_14_pydevd_bundle_13pydevd_cython___pyx_unpickle_SafeCallWrapper__set_state(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper *, PyObject *); /*proto*/ +static PyObject *__pyx_f_14_pydevd_bundle_13pydevd_cython___pyx_unpickle_ThreadTracer__set_state(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_ThreadTracer *, PyObject *); /*proto*/ +#define __Pyx_MODULE_NAME "_pydevd_bundle.pydevd_cython" +int __pyx_module_is_main__pydevd_bundle__pydevd_cython = 0; + +/* Implementation of '_pydevd_bundle.pydevd_cython' */ +static PyObject *__pyx_builtin_RuntimeError; +static PyObject *__pyx_builtin_ImportError; +static PyObject *__pyx_builtin_eval; +static PyObject *__pyx_builtin_id; +static PyObject *__pyx_builtin_StopIteration; +static PyObject *__pyx_builtin_GeneratorExit; +static PyObject *__pyx_builtin_KeyboardInterrupt; +static PyObject *__pyx_builtin_AttributeError; +static PyObject *__pyx_builtin_SystemExit; +static const char __pyx_k_[] = ""; +static const char __pyx_k_f[] = "f"; +static const char __pyx_k_t[] = "t"; +static const char __pyx_k__5[] = "?"; +static const char __pyx_k_id[] = "id"; +static const char __pyx_k_os[] = "os"; +static const char __pyx_k_re[] = "re"; +static const char __pyx_k_tb[] = "tb"; +static const char __pyx_k_ALL[] = "ALL"; +static const char __pyx_k_arg[] = "arg"; +static const char __pyx_k_get[] = "get"; +static const char __pyx_k_msg[] = "msg"; +static const char __pyx_k_new[] = "__new__"; +static const char __pyx_k_pop[] = "pop"; +static const char __pyx_k_ret[] = "ret"; +static const char __pyx_k_run[] = "run"; +static const char __pyx_k_s_s[] = "%s.%s"; +static const char __pyx_k_sys[] = "sys"; +static const char __pyx_k_val[] = "val"; +static const char __pyx_k_None[] = "None"; +static const char __pyx_k_args[] = "args"; +static const char __pyx_k_call[] = "call"; +static const char __pyx_k_dict[] = "__dict__"; +static const char __pyx_k_eval[] = "eval"; +static const char __pyx_k_info[] = "info"; +static const char __pyx_k_join[] = "join"; +static const char __pyx_k_line[] = "line"; +static const char __pyx_k_main[] = "__main__"; +static const char __pyx_k_path[] = "path"; +static const char __pyx_k_self[] = "self"; +static const char __pyx_k_stat[] = "stat"; +static const char __pyx_k_stop[] = "stop"; +static const char __pyx_k_test[] = "__test__"; +static const char __pyx_k_Error[] = "\n\nError:\n"; +static const char __pyx_k_clear[] = "clear"; +static const char __pyx_k_debug[] = "debug"; +static const char __pyx_k_error[] = "error"; +static const char __pyx_k_etype[] = "etype"; +static const char __pyx_k_event[] = "event"; +static const char __pyx_k_frame[] = "frame"; +static const char __pyx_k_getId[] = "getId"; +static const char __pyx_k_ident[] = "ident"; +static const char __pyx_k_match[] = "match"; +static const char __pyx_k_py_db[] = "py_db"; +static const char __pyx_k_qname[] = "qname"; +static const char __pyx_k_stack[] = "stack"; +static const char __pyx_k_utf_8[] = "utf-8"; +static const char __pyx_k_value[] = "value"; +static const char __pyx_k_write[] = "write"; +static const char __pyx_k_args_2[] = "_args"; +static const char __pyx_k_call_2[] = "__call__"; +static const char __pyx_k_encode[] = "encode"; +static const char __pyx_k_f_back[] = "f_back"; +static const char __pyx_k_f_code[] = "f_code"; +static const char __pyx_k_getKey[] = "getKey"; +static const char __pyx_k_import[] = "__import__"; +static const char __pyx_k_kwargs[] = "kwargs"; +static const char __pyx_k_module[] = ""; +static const char __pyx_k_pickle[] = "pickle"; +static const char __pyx_k_plugin[] = "plugin"; +static const char __pyx_k_result[] = "result"; +static const char __pyx_k_return[] = "return"; +static const char __pyx_k_stderr[] = "stderr"; +static const char __pyx_k_thread[] = "thread"; +static const char __pyx_k_tracer[] = "_tracer"; +static const char __pyx_k_update[] = "update"; +static const char __pyx_k_IS_PY3K[] = "IS_PY3K"; +static const char __pyx_k_co_name[] = "co_name"; +static const char __pyx_k_compile[] = "compile"; +static const char __pyx_k_f_trace[] = "f_trace"; +static const char __pyx_k_getline[] = "getline"; +static const char __pyx_k_inspect[] = "inspect"; +static const char __pyx_k_invalid[] = ".invalid."; +static const char __pyx_k_os_path[] = "os.path"; +static const char __pyx_k_st_size[] = "st_size"; +static const char __pyx_k_suspend[] = "suspend"; +static const char __pyx_k_tb_next[] = "tb_next"; +static const char __pyx_k_toArray[] = "toArray"; +static const char __pyx_k_version[] = "version"; +static const char __pyx_k_SetTrace[] = "SetTrace"; +static const char __pyx_k_as_array[] = "as_array"; +static const char __pyx_k_basename[] = "basename"; +static const char __pyx_k_co_flags[] = "co_flags"; +static const char __pyx_k_entrySet[] = "entrySet"; +static const char __pyx_k_exc_info[] = "exc_info"; +static const char __pyx_k_execfile[] = "execfile"; +static const char __pyx_k_f_lineno[] = "f_lineno"; +static const char __pyx_k_f_locals[] = "f_locals"; +static const char __pyx_k_getValue[] = "getValue"; +static const char __pyx_k_pyx_type[] = "__pyx_type"; +static const char __pyx_k_quitting[] = "quitting"; +static const char __pyx_k_st_mtime[] = "st_mtime"; +static const char __pyx_k_tb_frame[] = "tb_frame"; +static const char __pyx_k_Condition[] = "Condition:\n"; +static const char __pyx_k_IS_JYTHON[] = "IS_JYTHON"; +static const char __pyx_k_STATE_RUN[] = "STATE_RUN"; +static const char __pyx_k_condition[] = "condition"; +static const char __pyx_k_exception[] = "exception"; +static const char __pyx_k_f_globals[] = "f_globals"; +static const char __pyx_k_func_name[] = "func_name"; +static const char __pyx_k_java_lang[] = "java.lang"; +static const char __pyx_k_linecache[] = "linecache"; +static const char __pyx_k_log_event[] = "log_event"; +static const char __pyx_k_new_frame[] = "new_frame"; +static const char __pyx_k_print_exc[] = "print_exc"; +static const char __pyx_k_pydev_log[] = "pydev_log"; +static const char __pyx_k_pydevd_py[] = "pydevd.py"; +static const char __pyx_k_pyx_state[] = "__pyx_state"; +static const char __pyx_k_tb_lineno[] = "tb_lineno"; +static const char __pyx_k_threading[] = "threading"; +static const char __pyx_k_traceback[] = "traceback"; +static const char __pyx_k_DONT_TRACE[] = "DONT_TRACE"; +static const char __pyx_k_PYDEV_FILE[] = "PYDEV_FILE"; +static const char __pyx_k_SystemExit[] = "SystemExit"; +static const char __pyx_k_accessible[] = "accessible"; +static const char __pyx_k_breakpoint[] = "breakpoint"; +static const char __pyx_k_checkcache[] = "checkcache"; +static const char __pyx_k_expression[] = "expression"; +static const char __pyx_k_pyx_vtable[] = "__pyx_vtable__"; +static const char __pyx_k_DEBUG_START[] = "DEBUG_START"; +static const char __pyx_k_ImportError[] = "ImportError"; +static const char __pyx_k_PickleError[] = "PickleError"; +static const char __pyx_k_breakpoints[] = "breakpoints"; +static const char __pyx_k_co_filename[] = "co_filename"; +static const char __pyx_k_just_raised[] = "just_raised"; +static const char __pyx_k_pydevd_vars[] = "pydevd_vars"; +static const char __pyx_k_set_suspend[] = "set_suspend"; +static const char __pyx_k_CO_GENERATOR[] = "CO_GENERATOR"; +static const char __pyx_k_RuntimeError[] = "RuntimeError"; +static const char __pyx_k_can_not_skip[] = "can_not_skip"; +static const char __pyx_k_not_in_scope[] = "not_in_scope"; +static const char __pyx_k_pydev_bundle[] = "_pydev_bundle"; +static const char __pyx_k_pyx_checksum[] = "__pyx_checksum"; +static const char __pyx_k_stringsource[] = "stringsource"; +static const char __pyx_k_thread_state[] = "thread_state"; +static const char __pyx_k_trace_return[] = "trace_return"; +static const char __pyx_k_CMD_SET_BREAK[] = "CMD_SET_BREAK"; +static const char __pyx_k_CMD_STEP_INTO[] = "CMD_STEP_INTO"; +static const char __pyx_k_CMD_STEP_OVER[] = "CMD_STEP_OVER"; +static const char __pyx_k_GeneratorExit[] = "GeneratorExit"; +static const char __pyx_k_IS_IRONPYTHON[] = "IS_IRONPYTHON"; +static const char __pyx_k_STATE_SUSPEND[] = "STATE_SUSPEND"; +static const char __pyx_k_StopIteration[] = "StopIteration"; +static const char __pyx_k_cmd_step_into[] = "cmd_step_into"; +static const char __pyx_k_cmd_step_over[] = "cmd_step_over"; +static const char __pyx_k_currentThread[] = "currentThread"; +static const char __pyx_k_extract_stack[] = "extract_stack"; +static const char __pyx_k_get_file_type[] = "get_file_type"; +static const char __pyx_k_get_func_name[] = "get_func_name"; +static const char __pyx_k_get_thread_id[] = "get_thread_id"; +static const char __pyx_k_main_debugger[] = "main_debugger"; +static const char __pyx_k_method_object[] = "method_object"; +static const char __pyx_k_original_call[] = "_original_call"; +static const char __pyx_k_pydev_message[] = "pydev_message"; +static const char __pyx_k_pydevd_bundle[] = "_pydevd_bundle"; +static const char __pyx_k_reduce_cython[] = "__reduce_cython__"; +static const char __pyx_k_thread_states[] = "thread_states"; +static const char __pyx_k_thread_tracer[] = "thread_tracer"; +static const char __pyx_k_AttributeError[] = "AttributeError"; +static const char __pyx_k_PYTHON_SUSPEND[] = "PYTHON_SUSPEND"; +static const char __pyx_k_TRACE_PROPERTY[] = "TRACE_PROPERTY"; +static const char __pyx_k_co_firstlineno[] = "co_firstlineno"; +static const char __pyx_k_current_frames[] = "_current_frames"; +static const char __pyx_k_get_breakpoint[] = "get_breakpoint"; +static const char __pyx_k_output_checker[] = "output_checker"; +static const char __pyx_k_pydevd_tracing[] = "pydevd_tracing"; +static const char __pyx_k_suspend_policy[] = "suspend_policy"; +static const char __pyx_k_trace_dispatch[] = "trace_dispatch"; +static const char __pyx_k_CMD_RUN_TO_LINE[] = "CMD_RUN_TO_LINE"; +static const char __pyx_k_CMD_STEP_RETURN[] = "CMD_STEP_RETURN"; +static const char __pyx_k_IgnoreException[] = "[^#]*#.*@IgnoreException"; +static const char __pyx_k_additional_info[] = "additional_info"; +static const char __pyx_k_do_wait_suspend[] = "do_wait_suspend"; +static const char __pyx_k_exception_break[] = "exception_break"; +static const char __pyx_k_is_thread_alive[] = "is_thread_alive"; +static const char __pyx_k_org_python_core[] = "org.python.core"; +static const char __pyx_k_setstate_cython[] = "__setstate_cython__"; +static const char __pyx_k_thread_analyser[] = "thread_analyser"; +static const char __pyx_k_thread_to_state[] = "thread_to_state"; +static const char __pyx_k_trace_exception[] = "trace_exception"; +static const char __pyx_k_DEBUG_START_PY3K[] = "DEBUG_START_PY3K"; +static const char __pyx_k_asyncio_analyser[] = "asyncio_analyser"; +static const char __pyx_k_dict_iter_values[] = "dict_iter_values"; +static const char __pyx_k_getDeclaredField[] = "getDeclaredField"; +static const char __pyx_k_handle_exception[] = "handle_exception"; +static const char __pyx_k_ignore_libraries[] = "ignore_libraries"; +static const char __pyx_k_KeyboardInterrupt[] = "KeyboardInterrupt"; +static const char __pyx_k_cachedThreadState[] = "cachedThreadState"; +static const char __pyx_k_is_filter_enabled[] = "is_filter_enabled"; +static const char __pyx_k_pydev_execfile_py[] = "_pydev_execfile.py"; +static const char __pyx_k_pydevd_dont_trace[] = "pydevd_dont_trace"; +static const char __pyx_k_pydevd_file_utils[] = "pydevd_file_utils"; +static const char __pyx_k_should_trace_hook[] = "should_trace_hook"; +static const char __pyx_k_signature_factory[] = "signature_factory"; +static const char __pyx_k_tid_to_last_frame[] = "_tid_to_last_frame"; +static const char __pyx_k_RETURN_VALUES_DICT[] = "RETURN_VALUES_DICT"; +static const char __pyx_k_ThreadStateMapping[] = "ThreadStateMapping"; +static const char __pyx_k_cline_in_traceback[] = "cline_in_traceback"; +static const char __pyx_k_globalThreadStates[] = "globalThreadStates"; +static const char __pyx_k_global_cache_skips[] = "global_cache_skips"; +static const char __pyx_k_pydev_do_not_trace[] = "pydev_do_not_trace"; +static const char __pyx_k_show_return_values[] = "show_return_values"; +static const char __pyx_k_CMD_SMART_STEP_INTO[] = "CMD_SMART_STEP_INTO"; +static const char __pyx_k_is_filter_libraries[] = "is_filter_libraries"; +static const char __pyx_k_IGNORE_EXCEPTION_TAG[] = "IGNORE_EXCEPTION_TAG"; +static const char __pyx_k_NoSuchFieldException[] = "NoSuchFieldException"; +static const char __pyx_k_default_return_value[] = "default_return_value"; +static const char __pyx_k_get_clsname_for_code[] = "get_clsname_for_code"; +static const char __pyx_k_overwrite_prev_trace[] = "overwrite_prev_trace"; +static const char __pyx_k_remove_return_values[] = "remove_return_values"; +static const char __pyx_k_CMD_STEP_INTO_MY_CODE[] = "CMD_STEP_INTO_MY_CODE"; +static const char __pyx_k_filename_to_stat_info[] = "filename_to_stat_info"; +static const char __pyx_k_format_exception_only[] = "format_exception_only"; +static const char __pyx_k_is_ignored_by_filters[] = "is_ignored_by_filters"; +static const char __pyx_k_termination_event_set[] = "_termination_event_set"; +static const char __pyx_k_CMD_SET_NEXT_STATEMENT[] = "CMD_SET_NEXT_STATEMENT"; +static const char __pyx_k_add_exception_to_frame[] = "add_exception_to_frame"; +static const char __pyx_k_has_plugin_line_breaks[] = "has_plugin_line_breaks"; +static const char __pyx_k_kill_all_pydev_threads[] = "kill_all_pydev_threads"; +static const char __pyx_k_pyx_unpickle_PyDBFrame[] = "__pyx_unpickle_PyDBFrame"; +static const char __pyx_k_threadingCurrentThread[] = "threadingCurrentThread"; +static const char __pyx_k_pydevd_traceproperty_py[] = "pydevd_traceproperty.py"; +static const char __pyx_k_finish_debugging_session[] = "_finish_debugging_session"; +static const char __pyx_k_first_breakpoint_reached[] = "first_breakpoint_reached"; +static const char __pyx_k_get_exception_breakpoint[] = "get_exception_breakpoint"; +static const char __pyx_k_global_cache_frame_skips[] = "global_cache_frame_skips"; +static const char __pyx_k_process_thread_not_alive[] = "_process_thread_not_alive"; +static const char __pyx_k_should_stop_on_exception[] = "should_stop_on_exception"; +static const char __pyx_k_CMD_STEP_CAUGHT_EXCEPTION[] = "CMD_STEP_CAUGHT_EXCEPTION"; +static const char __pyx_k_first_appearance_in_scope[] = "first_appearance_in_scope"; +static const char __pyx_k_pydevd_bundle_pydevd_comm[] = "_pydevd_bundle.pydevd_comm"; +static const char __pyx_k_pyx_unpickle_ThreadTracer[] = "__pyx_unpickle_ThreadTracer"; +static const char __pyx_k_remove_return_values_flag[] = "remove_return_values_flag"; +static const char __pyx_k_send_signature_call_trace[] = "send_signature_call_trace"; +static const char __pyx_k_suspend_all_other_threads[] = "suspend_all_other_threads"; +static const char __pyx_k_add_additional_frame_by_id[] = "add_additional_frame_by_id"; +static const char __pyx_k_break_on_caught_exceptions[] = "break_on_caught_exceptions"; +static const char __pyx_k_notify_on_first_raise_only[] = "notify_on_first_raise_only"; +static const char __pyx_k_pydevd_bundle_pydevd_utils[] = "_pydevd_bundle.pydevd_utils"; +static const char __pyx_k_State_s_Stop_s_Cmd_s_Kill_s[] = "State:%s Stop:%s Cmd: %s Kill:%s"; +static const char __pyx_k_handle_breakpoint_condition[] = "handle_breakpoint_condition"; +static const char __pyx_k_has_plugin_exception_breaks[] = "has_plugin_exception_breaks"; +static const char __pyx_k_pydevd_bundle_pydevd_cython[] = "_pydevd_bundle.pydevd_cython"; +static const char __pyx_k_send_caught_exception_stack[] = "send_caught_exception_stack"; +static const char __pyx_k_send_signature_return_trace[] = "send_signature_return_trace"; +static const char __pyx_k_handle_breakpoint_expression[] = "handle_breakpoint_expression"; +static const char __pyx_k_pyx_unpickle_SafeCallWrapper[] = "__pyx_unpickle_SafeCallWrapper"; +static const char __pyx_k_NORM_PATHS_AND_BASE_CONTAINER[] = "NORM_PATHS_AND_BASE_CONTAINER"; +static const char __pyx_k_remove_additional_frame_by_id[] = "remove_additional_frame_by_id"; +static const char __pyx_k_pydevd_bundle_pydevd_constants[] = "_pydevd_bundle.pydevd_constants"; +static const char __pyx_k_pydevd_bundle_pydevd_signature[] = "_pydevd_bundle.pydevd_signature"; +static const char __pyx_k_pyx_unpickle_PyDBAdditionalThr[] = "__pyx_unpickle_PyDBAdditionalThreadInfo"; +static const char __pyx_k_Ignore_exception_s_in_library_s[] = "Ignore exception %s in library %s"; +static const char __pyx_k_get_abs_path_real_path_and_base[] = "get_abs_path_real_path_and_base_from_frame"; +static const char __pyx_k_pydev_bundle_pydev_is_thread_al[] = "_pydev_bundle.pydev_is_thread_alive"; +static const char __pyx_k_pydev_imps__pydev_saved_modules[] = "_pydev_imps._pydev_saved_modules"; +static const char __pyx_k_pydevd_bundle_pydevd_additional[] = "_pydevd_bundle.pydevd_additional_thread_info_regular"; +static const char __pyx_k_pydevd_bundle_pydevd_breakpoint[] = "_pydevd_bundle.pydevd_breakpoints"; +static const char __pyx_k_pydevd_bundle_pydevd_cython_pyx[] = "_pydevd_bundle\\pydevd_cython.pyx"; +static const char __pyx_k_pydevd_bundle_pydevd_dont_trace[] = "_pydevd_bundle.pydevd_dont_trace_files"; +static const char __pyx_k_pydevd_bundle_pydevd_frame_util[] = "_pydevd_bundle.pydevd_frame_utils"; +static const char __pyx_k_pydevd_bundle_pydevd_kill_all_p[] = "_pydevd_bundle.pydevd_kill_all_pydevd_threads"; +static const char __pyx_k_set_trace_for_frame_and_parents[] = "set_trace_for_frame_and_parents"; +static const char __pyx_k_suspend_on_breakpoint_exception[] = "suspend_on_breakpoint_exception"; +static const char __pyx_k_Error_while_evaluating_expressio[] = "Error while evaluating expression: %s\n"; +static const char __pyx_k_Incompatible_checksums_s_vs_0x3d[] = "Incompatible checksums (%s vs 0x3d7902a = (_args))"; +static const char __pyx_k_Incompatible_checksums_s_vs_0x77[] = "Incompatible checksums (%s vs 0x77c077b = (method_object))"; +static const char __pyx_k_Incompatible_checksums_s_vs_0xa9[] = "Incompatible checksums (%s vs 0xa9a4341 = (conditional_breakpoint_exception, is_tracing, pydev_call_from_jinja2, pydev_call_inside_jinja2, pydev_django_resolve_frame, pydev_func_name, pydev_message, pydev_next_line, pydev_notify_kill, pydev_smart_step_stop, pydev_state, pydev_step_cmd, pydev_step_stop, suspend_type))"; +static const char __pyx_k_Incompatible_checksums_s_vs_0xfa[] = "Incompatible checksums (%s vs 0xfa6b183 = (_args, should_skip))"; +static const char __pyx_k_Unable_to_proceed_sys__current_f[] = "Unable to proceed (sys._current_frames not available in this Python implementation)."; +static const char __pyx_k_break_on_exceptions_thrown_in_sa[] = "break_on_exceptions_thrown_in_same_context"; +static const char __pyx_k_conditional_breakpoint_exception[] = "conditional_breakpoint_exception"; +static const char __pyx_k_filename_to_lines_where_exceptio[] = "filename_to_lines_where_exceptions_are_ignored"; +static const char __pyx_k_ignore_exceptions_thrown_in_line[] = "ignore_exceptions_thrown_in_lines_with_ignore_exception"; +static const char __pyx_k_send_caught_exception_stack_proc[] = "send_caught_exception_stack_proceeded"; +static PyObject *__pyx_kp_s_; +static PyObject *__pyx_n_s_ALL; +static PyObject *__pyx_n_s_AttributeError; +static PyObject *__pyx_n_s_CMD_RUN_TO_LINE; +static PyObject *__pyx_n_s_CMD_SET_BREAK; +static PyObject *__pyx_n_s_CMD_SET_NEXT_STATEMENT; +static PyObject *__pyx_n_s_CMD_SMART_STEP_INTO; +static PyObject *__pyx_n_s_CMD_STEP_CAUGHT_EXCEPTION; +static PyObject *__pyx_n_s_CMD_STEP_INTO; +static PyObject *__pyx_n_s_CMD_STEP_INTO_MY_CODE; +static PyObject *__pyx_n_s_CMD_STEP_OVER; +static PyObject *__pyx_n_s_CMD_STEP_RETURN; +static PyObject *__pyx_n_s_CO_GENERATOR; +static PyObject *__pyx_kp_s_Condition; +static PyObject *__pyx_n_s_DEBUG_START; +static PyObject *__pyx_n_s_DEBUG_START_PY3K; +static PyObject *__pyx_n_s_DONT_TRACE; +static PyObject *__pyx_kp_s_Error; +static PyObject *__pyx_kp_s_Error_while_evaluating_expressio; +static PyObject *__pyx_n_s_GeneratorExit; +static PyObject *__pyx_n_s_IGNORE_EXCEPTION_TAG; +static PyObject *__pyx_n_s_IS_IRONPYTHON; +static PyObject *__pyx_n_s_IS_JYTHON; +static PyObject *__pyx_n_s_IS_PY3K; +static PyObject *__pyx_kp_s_IgnoreException; +static PyObject *__pyx_kp_s_Ignore_exception_s_in_library_s; +static PyObject *__pyx_n_s_ImportError; +static PyObject *__pyx_kp_s_Incompatible_checksums_s_vs_0x3d; +static PyObject *__pyx_kp_s_Incompatible_checksums_s_vs_0x77; +static PyObject *__pyx_kp_s_Incompatible_checksums_s_vs_0xa9; +static PyObject *__pyx_kp_s_Incompatible_checksums_s_vs_0xfa; +static PyObject *__pyx_n_s_KeyboardInterrupt; +static PyObject *__pyx_n_s_NORM_PATHS_AND_BASE_CONTAINER; +static PyObject *__pyx_n_s_NoSuchFieldException; +static PyObject *__pyx_n_s_None; +static PyObject *__pyx_n_s_PYDEV_FILE; +static PyObject *__pyx_n_s_PYTHON_SUSPEND; +static PyObject *__pyx_n_s_PickleError; +static PyObject *__pyx_n_s_RETURN_VALUES_DICT; +static PyObject *__pyx_n_s_RuntimeError; +static PyObject *__pyx_n_s_STATE_RUN; +static PyObject *__pyx_n_s_STATE_SUSPEND; +static PyObject *__pyx_n_s_SetTrace; +static PyObject *__pyx_kp_s_State_s_Stop_s_Cmd_s_Kill_s; +static PyObject *__pyx_n_s_StopIteration; +static PyObject *__pyx_n_s_SystemExit; +static PyObject *__pyx_n_s_TRACE_PROPERTY; +static PyObject *__pyx_n_s_ThreadStateMapping; +static PyObject *__pyx_kp_s_Unable_to_proceed_sys__current_f; +static PyObject *__pyx_kp_s__5; +static PyObject *__pyx_n_s_accessible; +static PyObject *__pyx_n_s_add_additional_frame_by_id; +static PyObject *__pyx_n_s_add_exception_to_frame; +static PyObject *__pyx_n_s_additional_info; +static PyObject *__pyx_n_s_arg; +static PyObject *__pyx_n_s_args; +static PyObject *__pyx_n_s_args_2; +static PyObject *__pyx_n_s_as_array; +static PyObject *__pyx_n_s_asyncio_analyser; +static PyObject *__pyx_n_s_basename; +static PyObject *__pyx_n_s_break_on_caught_exceptions; +static PyObject *__pyx_n_s_break_on_exceptions_thrown_in_sa; +static PyObject *__pyx_n_s_breakpoint; +static PyObject *__pyx_n_s_breakpoints; +static PyObject *__pyx_n_s_cachedThreadState; +static PyObject *__pyx_n_s_call; +static PyObject *__pyx_n_s_call_2; +static PyObject *__pyx_n_s_can_not_skip; +static PyObject *__pyx_n_s_checkcache; +static PyObject *__pyx_n_s_clear; +static PyObject *__pyx_n_s_cline_in_traceback; +static PyObject *__pyx_n_s_cmd_step_into; +static PyObject *__pyx_n_s_cmd_step_over; +static PyObject *__pyx_n_s_co_filename; +static PyObject *__pyx_n_s_co_firstlineno; +static PyObject *__pyx_n_s_co_flags; +static PyObject *__pyx_n_s_co_name; +static PyObject *__pyx_n_s_compile; +static PyObject *__pyx_n_s_condition; +static PyObject *__pyx_n_s_conditional_breakpoint_exception; +static PyObject *__pyx_n_s_currentThread; +static PyObject *__pyx_n_s_current_frames; +static PyObject *__pyx_n_s_debug; +static PyObject *__pyx_n_s_default_return_value; +static PyObject *__pyx_n_s_dict; +static PyObject *__pyx_n_s_dict_iter_values; +static PyObject *__pyx_n_s_do_wait_suspend; +static PyObject *__pyx_n_s_encode; +static PyObject *__pyx_n_s_entrySet; +static PyObject *__pyx_n_s_error; +static PyObject *__pyx_n_s_etype; +static PyObject *__pyx_n_s_eval; +static PyObject *__pyx_n_s_event; +static PyObject *__pyx_n_s_exc_info; +static PyObject *__pyx_n_s_exception; +static PyObject *__pyx_n_s_exception_break; +static PyObject *__pyx_n_s_execfile; +static PyObject *__pyx_n_s_expression; +static PyObject *__pyx_n_s_extract_stack; +static PyObject *__pyx_n_s_f; +static PyObject *__pyx_n_s_f_back; +static PyObject *__pyx_n_s_f_code; +static PyObject *__pyx_n_s_f_globals; +static PyObject *__pyx_n_s_f_lineno; +static PyObject *__pyx_n_s_f_locals; +static PyObject *__pyx_n_s_f_trace; +static PyObject *__pyx_n_s_filename_to_lines_where_exceptio; +static PyObject *__pyx_n_s_filename_to_stat_info; +static PyObject *__pyx_n_s_finish_debugging_session; +static PyObject *__pyx_n_s_first_appearance_in_scope; +static PyObject *__pyx_n_s_first_breakpoint_reached; +static PyObject *__pyx_n_s_format_exception_only; +static PyObject *__pyx_n_s_frame; +static PyObject *__pyx_n_s_func_name; +static PyObject *__pyx_n_s_get; +static PyObject *__pyx_n_s_getDeclaredField; +static PyObject *__pyx_n_s_getId; +static PyObject *__pyx_n_s_getKey; +static PyObject *__pyx_n_s_getValue; +static PyObject *__pyx_n_s_get_abs_path_real_path_and_base; +static PyObject *__pyx_n_s_get_breakpoint; +static PyObject *__pyx_n_s_get_clsname_for_code; +static PyObject *__pyx_n_s_get_exception_breakpoint; +static PyObject *__pyx_n_s_get_file_type; +static PyObject *__pyx_n_s_get_func_name; +static PyObject *__pyx_n_s_get_thread_id; +static PyObject *__pyx_n_s_getline; +static PyObject *__pyx_n_s_globalThreadStates; +static PyObject *__pyx_n_s_global_cache_frame_skips; +static PyObject *__pyx_n_s_global_cache_skips; +static PyObject *__pyx_n_s_handle_breakpoint_condition; +static PyObject *__pyx_n_s_handle_breakpoint_expression; +static PyObject *__pyx_n_s_handle_exception; +static PyObject *__pyx_n_s_has_plugin_exception_breaks; +static PyObject *__pyx_n_s_has_plugin_line_breaks; +static PyObject *__pyx_n_s_id; +static PyObject *__pyx_n_s_ident; +static PyObject *__pyx_n_s_ignore_exceptions_thrown_in_line; +static PyObject *__pyx_n_s_ignore_libraries; +static PyObject *__pyx_n_s_import; +static PyObject *__pyx_n_s_info; +static PyObject *__pyx_n_s_inspect; +static PyObject *__pyx_kp_s_invalid; +static PyObject *__pyx_n_s_is_filter_enabled; +static PyObject *__pyx_n_s_is_filter_libraries; +static PyObject *__pyx_n_s_is_ignored_by_filters; +static PyObject *__pyx_n_s_is_thread_alive; +static PyObject *__pyx_n_s_java_lang; +static PyObject *__pyx_n_s_join; +static PyObject *__pyx_n_s_just_raised; +static PyObject *__pyx_n_s_kill_all_pydev_threads; +static PyObject *__pyx_n_s_kwargs; +static PyObject *__pyx_n_s_line; +static PyObject *__pyx_n_s_linecache; +static PyObject *__pyx_n_s_log_event; +static PyObject *__pyx_n_s_main; +static PyObject *__pyx_n_s_main_debugger; +static PyObject *__pyx_n_s_match; +static PyObject *__pyx_n_s_method_object; +static PyObject *__pyx_kp_s_module; +static PyObject *__pyx_n_s_msg; +static PyObject *__pyx_n_s_new; +static PyObject *__pyx_n_s_new_frame; +static PyObject *__pyx_n_s_not_in_scope; +static PyObject *__pyx_n_s_notify_on_first_raise_only; +static PyObject *__pyx_n_s_org_python_core; +static PyObject *__pyx_n_s_original_call; +static PyObject *__pyx_n_s_os; +static PyObject *__pyx_n_s_os_path; +static PyObject *__pyx_n_s_output_checker; +static PyObject *__pyx_n_s_overwrite_prev_trace; +static PyObject *__pyx_n_s_path; +static PyObject *__pyx_n_s_pickle; +static PyObject *__pyx_n_s_plugin; +static PyObject *__pyx_n_s_pop; +static PyObject *__pyx_n_s_print_exc; +static PyObject *__pyx_n_s_process_thread_not_alive; +static PyObject *__pyx_n_s_py_db; +static PyObject *__pyx_n_s_pydev_bundle; +static PyObject *__pyx_n_s_pydev_bundle_pydev_is_thread_al; +static PyObject *__pyx_n_s_pydev_do_not_trace; +static PyObject *__pyx_kp_s_pydev_execfile_py; +static PyObject *__pyx_n_s_pydev_imps__pydev_saved_modules; +static PyObject *__pyx_n_s_pydev_log; +static PyObject *__pyx_n_s_pydev_message; +static PyObject *__pyx_n_s_pydevd_bundle; +static PyObject *__pyx_n_s_pydevd_bundle_pydevd_additional; +static PyObject *__pyx_n_s_pydevd_bundle_pydevd_breakpoint; +static PyObject *__pyx_n_s_pydevd_bundle_pydevd_comm; +static PyObject *__pyx_n_s_pydevd_bundle_pydevd_constants; +static PyObject *__pyx_n_s_pydevd_bundle_pydevd_cython; +static PyObject *__pyx_kp_s_pydevd_bundle_pydevd_cython_pyx; +static PyObject *__pyx_n_s_pydevd_bundle_pydevd_dont_trace; +static PyObject *__pyx_n_s_pydevd_bundle_pydevd_frame_util; +static PyObject *__pyx_n_s_pydevd_bundle_pydevd_kill_all_p; +static PyObject *__pyx_n_s_pydevd_bundle_pydevd_signature; +static PyObject *__pyx_n_s_pydevd_bundle_pydevd_utils; +static PyObject *__pyx_n_s_pydevd_dont_trace; +static PyObject *__pyx_n_s_pydevd_file_utils; +static PyObject *__pyx_kp_s_pydevd_py; +static PyObject *__pyx_kp_s_pydevd_traceproperty_py; +static PyObject *__pyx_n_s_pydevd_tracing; +static PyObject *__pyx_n_s_pydevd_vars; +static PyObject *__pyx_n_s_pyx_checksum; +static PyObject *__pyx_n_s_pyx_state; +static PyObject *__pyx_n_s_pyx_type; +static PyObject *__pyx_n_s_pyx_unpickle_PyDBAdditionalThr; +static PyObject *__pyx_n_s_pyx_unpickle_PyDBFrame; +static PyObject *__pyx_n_s_pyx_unpickle_SafeCallWrapper; +static PyObject *__pyx_n_s_pyx_unpickle_ThreadTracer; +static PyObject *__pyx_n_s_pyx_vtable; +static PyObject *__pyx_n_s_qname; +static PyObject *__pyx_n_s_quitting; +static PyObject *__pyx_n_s_re; +static PyObject *__pyx_n_s_reduce_cython; +static PyObject *__pyx_n_s_remove_additional_frame_by_id; +static PyObject *__pyx_n_s_remove_return_values; +static PyObject *__pyx_n_s_remove_return_values_flag; +static PyObject *__pyx_n_s_result; +static PyObject *__pyx_n_s_ret; +static PyObject *__pyx_n_s_return; +static PyObject *__pyx_n_s_run; +static PyObject *__pyx_kp_s_s_s; +static PyObject *__pyx_n_s_self; +static PyObject *__pyx_n_s_send_caught_exception_stack; +static PyObject *__pyx_n_s_send_caught_exception_stack_proc; +static PyObject *__pyx_n_s_send_signature_call_trace; +static PyObject *__pyx_n_s_send_signature_return_trace; +static PyObject *__pyx_n_s_set_suspend; +static PyObject *__pyx_n_s_set_trace_for_frame_and_parents; +static PyObject *__pyx_n_s_setstate_cython; +static PyObject *__pyx_n_s_should_stop_on_exception; +static PyObject *__pyx_n_s_should_trace_hook; +static PyObject *__pyx_n_s_show_return_values; +static PyObject *__pyx_n_s_signature_factory; +static PyObject *__pyx_n_s_st_mtime; +static PyObject *__pyx_n_s_st_size; +static PyObject *__pyx_n_s_stack; +static PyObject *__pyx_n_s_stat; +static PyObject *__pyx_n_s_stderr; +static PyObject *__pyx_n_s_stop; +static PyObject *__pyx_kp_s_stringsource; +static PyObject *__pyx_n_s_suspend; +static PyObject *__pyx_n_s_suspend_all_other_threads; +static PyObject *__pyx_n_s_suspend_on_breakpoint_exception; +static PyObject *__pyx_n_s_suspend_policy; +static PyObject *__pyx_n_s_sys; +static PyObject *__pyx_n_s_t; +static PyObject *__pyx_n_s_tb; +static PyObject *__pyx_n_s_tb_frame; +static PyObject *__pyx_n_s_tb_lineno; +static PyObject *__pyx_n_s_tb_next; +static PyObject *__pyx_n_s_termination_event_set; +static PyObject *__pyx_n_s_test; +static PyObject *__pyx_n_s_thread; +static PyObject *__pyx_n_s_thread_analyser; +static PyObject *__pyx_n_s_thread_state; +static PyObject *__pyx_n_s_thread_states; +static PyObject *__pyx_n_s_thread_to_state; +static PyObject *__pyx_n_s_thread_tracer; +static PyObject *__pyx_n_s_threading; +static PyObject *__pyx_n_s_threadingCurrentThread; +static PyObject *__pyx_n_s_tid_to_last_frame; +static PyObject *__pyx_n_s_toArray; +static PyObject *__pyx_n_s_trace_dispatch; +static PyObject *__pyx_n_s_trace_exception; +static PyObject *__pyx_n_s_trace_return; +static PyObject *__pyx_n_s_traceback; +static PyObject *__pyx_n_s_tracer; +static PyObject *__pyx_n_s_update; +static PyObject *__pyx_kp_s_utf_8; +static PyObject *__pyx_n_s_val; +static PyObject *__pyx_n_s_value; +static PyObject *__pyx_n_s_version; +static PyObject *__pyx_n_s_write; +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython__current_frames(CYTHON_UNUSED PyObject *__pyx_self); /* proto */ +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_2_current_frames(CYTHON_UNUSED PyObject *__pyx_self); /* proto */ +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo___init__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_2iter_frames(CYTHON_UNUSED struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self, PyObject *__pyx_v_t); /* proto */ +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_4__str__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_11pydev_state___get__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self); /* proto */ +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_11pydev_state_2__set__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self, PyObject *__pyx_v_value); /* proto */ +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_step_stop___get__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self); /* proto */ +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_step_stop_2__set__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self, PyObject *__pyx_v_value); /* proto */ +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_step_stop_4__del__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_14pydev_step_cmd___get__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self); /* proto */ +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_14pydev_step_cmd_2__set__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self, PyObject *__pyx_v_value); /* proto */ +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_17pydev_notify_kill___get__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self); /* proto */ +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_17pydev_notify_kill_2__set__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self, PyObject *__pyx_v_value); /* proto */ +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_21pydev_smart_step_stop___get__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self); /* proto */ +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_21pydev_smart_step_stop_2__set__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self, PyObject *__pyx_v_value); /* proto */ +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_21pydev_smart_step_stop_4__del__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_26pydev_django_resolve_frame___get__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self); /* proto */ +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_26pydev_django_resolve_frame_2__set__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self, PyObject *__pyx_v_value); /* proto */ +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_22pydev_call_from_jinja2___get__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self); /* proto */ +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_22pydev_call_from_jinja2_2__set__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self, PyObject *__pyx_v_value); /* proto */ +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_22pydev_call_from_jinja2_4__del__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_24pydev_call_inside_jinja2___get__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self); /* proto */ +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_24pydev_call_inside_jinja2_2__set__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self, PyObject *__pyx_v_value); /* proto */ +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_24pydev_call_inside_jinja2_4__del__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_10is_tracing___get__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self); /* proto */ +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_10is_tracing_2__set__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self, PyObject *__pyx_v_value); /* proto */ +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_32conditional_breakpoint_exception___get__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self); /* proto */ +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_32conditional_breakpoint_exception_2__set__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self, PyObject *__pyx_v_value); /* proto */ +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_32conditional_breakpoint_exception_4__del__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_13pydev_message___get__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self); /* proto */ +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_13pydev_message_2__set__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self, PyObject *__pyx_v_value); /* proto */ +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_13pydev_message_4__del__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_12suspend_type___get__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self); /* proto */ +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_12suspend_type_2__set__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self, PyObject *__pyx_v_value); /* proto */ +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_next_line___get__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self); /* proto */ +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_next_line_2__set__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self, PyObject *__pyx_v_value); /* proto */ +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_func_name___get__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self); /* proto */ +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_func_name_2__set__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self, PyObject *__pyx_v_value); /* proto */ +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_func_name_4__del__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_6__reduce_cython__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_8__setstate_cython__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self, PyObject *__pyx_v___pyx_state); /* proto */ +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_4send_signature_call_trace(CYTHON_UNUSED PyObject *__pyx_self, CYTHON_UNUSED PyObject *__pyx_v_args, CYTHON_UNUSED PyObject *__pyx_v_kwargs); /* proto */ +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_6handle_breakpoint_condition(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_py_db, PyObject *__pyx_v_info, PyObject *__pyx_v_breakpoint, PyObject *__pyx_v_new_frame, PyObject *__pyx_v_default_return_value); /* proto */ +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_8handle_breakpoint_expression(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_breakpoint, PyObject *__pyx_v_info, PyObject *__pyx_v_new_frame); /* proto */ +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_9PyDBFrame___init__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *__pyx_v_self, PyObject *__pyx_v_args); /* proto */ +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_2set_suspend(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *__pyx_v_self, PyObject *__pyx_v_args, PyObject *__pyx_v_kwargs); /* proto */ +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_4do_wait_suspend(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *__pyx_v_self, PyObject *__pyx_v_args, PyObject *__pyx_v_kwargs); /* proto */ +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_6trace_exception(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *__pyx_v_self, PyObject *__pyx_v_frame, PyObject *__pyx_v_event, PyObject *__pyx_v_arg); /* proto */ +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_8trace_return(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *__pyx_v_self, PyObject *__pyx_v_frame, PyObject *__pyx_v_event, PyObject *__pyx_v_arg); /* proto */ +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_10should_stop_on_exception(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *__pyx_v_self, PyObject *__pyx_v_frame, CYTHON_UNUSED PyObject *__pyx_v_event, PyObject *__pyx_v_arg); /* proto */ +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_12handle_exception(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *__pyx_v_self, PyObject *__pyx_v_frame, PyObject *__pyx_v_event, PyObject *__pyx_v_arg); /* proto */ +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_14get_func_name(CYTHON_UNUSED struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *__pyx_v_self, PyObject *__pyx_v_frame); /* proto */ +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_16show_return_values(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *__pyx_v_self, PyObject *__pyx_v_frame, PyObject *__pyx_v_arg); /* proto */ +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_18remove_return_values(CYTHON_UNUSED struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v_main_debugger, PyObject *__pyx_v_frame); /* proto */ +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_20trace_dispatch(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *__pyx_v_self, PyObject *__pyx_v_frame, PyObject *__pyx_v_event, PyObject *__pyx_v_arg); /* proto */ +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_22__reduce_cython__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_24__setstate_cython__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *__pyx_v_self, PyObject *__pyx_v___pyx_state); /* proto */ +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_10send_signature_call_trace(CYTHON_UNUSED PyObject *__pyx_self, CYTHON_UNUSED PyObject *__pyx_v_args, CYTHON_UNUSED PyObject *__pyx_v_kwargs); /* proto */ +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_12trace_dispatch(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_py_db, PyObject *__pyx_v_frame, PyObject *__pyx_v_event, PyObject *__pyx_v_arg); /* proto */ +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_15SafeCallWrapper___init__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper *__pyx_v_self, PyObject *__pyx_v_method_object); /* proto */ +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_15SafeCallWrapper_2__call__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper *__pyx_v_self, PyObject *__pyx_v_args); /* proto */ +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_15SafeCallWrapper_4__reduce_cython__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_15SafeCallWrapper_6__setstate_cython__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper *__pyx_v_self, PyObject *__pyx_v___pyx_state); /* proto */ +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_12ThreadTracer___init__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_ThreadTracer *__pyx_v_self, PyObject *__pyx_v_args); /* proto */ +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_12ThreadTracer_2__call__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_ThreadTracer *__pyx_v_self, PyObject *__pyx_v_frame, PyObject *__pyx_v_event, PyObject *__pyx_v_arg); /* proto */ +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_12ThreadTracer_5_args___get__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_ThreadTracer *__pyx_v_self); /* proto */ +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_12ThreadTracer_5_args_2__set__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_ThreadTracer *__pyx_v_self, PyObject *__pyx_v_value); /* proto */ +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_12ThreadTracer_5_args_4__del__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_ThreadTracer *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_12ThreadTracer_4__reduce_cython__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_ThreadTracer *__pyx_v_self); /* proto */ +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_12ThreadTracer_6__setstate_cython__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_ThreadTracer *__pyx_v_self, PyObject *__pyx_v___pyx_state); /* proto */ +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_14__call__(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_self, PyObject *__pyx_v_frame, PyObject *__pyx_v_event, PyObject *__pyx_v_arg); /* proto */ +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_16__pyx_unpickle_PyDBAdditionalThreadInfo(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v___pyx_type, long __pyx_v___pyx_checksum, PyObject *__pyx_v___pyx_state); /* proto */ +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_18__pyx_unpickle_PyDBFrame(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v___pyx_type, long __pyx_v___pyx_checksum, PyObject *__pyx_v___pyx_state); /* proto */ +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_20__pyx_unpickle_SafeCallWrapper(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v___pyx_type, long __pyx_v___pyx_checksum, PyObject *__pyx_v___pyx_state); /* proto */ +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_22__pyx_unpickle_ThreadTracer(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v___pyx_type, long __pyx_v___pyx_checksum, PyObject *__pyx_v___pyx_state); /* proto */ +static PyObject *__pyx_tp_new_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo(PyTypeObject *t, PyObject *a, PyObject *k); /*proto*/ +static PyObject *__pyx_tp_new_14_pydevd_bundle_13pydevd_cython_PyDBFrame(PyTypeObject *t, PyObject *a, PyObject *k); /*proto*/ +static PyObject *__pyx_tp_new_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper(PyTypeObject *t, PyObject *a, PyObject *k); /*proto*/ +static PyObject *__pyx_tp_new_14_pydevd_bundle_13pydevd_cython_ThreadTracer(PyTypeObject *t, PyObject *a, PyObject *k); /*proto*/ +static PyObject *__pyx_int_0; +static PyObject *__pyx_int_1; +static PyObject *__pyx_int_4; +static PyObject *__pyx_int_32; +static PyObject *__pyx_int_64458794; +static PyObject *__pyx_int_125568891; +static PyObject *__pyx_int_177881921; +static PyObject *__pyx_int_262582659; +static PyObject *__pyx_int_neg_1; +static PyObject *__pyx_tuple__2; +static PyObject *__pyx_tuple__3; +static PyObject *__pyx_tuple__4; +static PyObject *__pyx_tuple__6; +static PyObject *__pyx_tuple__7; +static PyObject *__pyx_tuple__8; +static PyObject *__pyx_tuple__11; +static PyObject *__pyx_tuple__12; +static PyObject *__pyx_tuple__14; +static PyObject *__pyx_tuple__15; +static PyObject *__pyx_tuple__16; +static PyObject *__pyx_tuple__17; +static PyObject *__pyx_tuple__19; +static PyObject *__pyx_tuple__21; +static PyObject *__pyx_tuple__23; +static PyObject *__pyx_tuple__25; +static PyObject *__pyx_tuple__27; +static PyObject *__pyx_tuple__29; +static PyObject *__pyx_tuple__31; +static PyObject *__pyx_tuple__33; +static PyObject *__pyx_codeobj__9; +static PyObject *__pyx_codeobj__10; +static PyObject *__pyx_codeobj__13; +static PyObject *__pyx_codeobj__18; +static PyObject *__pyx_codeobj__20; +static PyObject *__pyx_codeobj__22; +static PyObject *__pyx_codeobj__24; +static PyObject *__pyx_codeobj__26; +static PyObject *__pyx_codeobj__28; +static PyObject *__pyx_codeobj__30; +static PyObject *__pyx_codeobj__32; +static PyObject *__pyx_codeobj__34; + +/* "_pydevd_bundle/pydevd_cython.pyx":27 + * thread_states = cachedThreadState.get(ThreadStateMapping) + * + * def _current_frames(): # <<<<<<<<<<<<<< + * as_array = thread_states.entrySet().toArray() + * ret = {} + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_1_current_frames(PyObject *__pyx_self, CYTHON_UNUSED PyObject *unused); /*proto*/ +static PyMethodDef __pyx_mdef_14_pydevd_bundle_13pydevd_cython_1_current_frames = {"_current_frames", (PyCFunction)__pyx_pw_14_pydevd_bundle_13pydevd_cython_1_current_frames, METH_NOARGS, 0}; +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_1_current_frames(PyObject *__pyx_self, CYTHON_UNUSED PyObject *unused) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("_current_frames (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython__current_frames(__pyx_self); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython__current_frames(CYTHON_UNUSED PyObject *__pyx_self) { + PyObject *__pyx_v_as_array = NULL; + PyObject *__pyx_v_ret = NULL; + PyObject *__pyx_v_thread_to_state = NULL; + PyObject *__pyx_v_thread = NULL; + PyObject *__pyx_v_thread_state = NULL; + PyObject *__pyx_v_frame = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + Py_ssize_t __pyx_t_5; + PyObject *(*__pyx_t_6)(PyObject *); + int __pyx_t_7; + int __pyx_t_8; + __Pyx_RefNannySetupContext("_current_frames", 0); + + /* "_pydevd_bundle/pydevd_cython.pyx":28 + * + * def _current_frames(): + * as_array = thread_states.entrySet().toArray() # <<<<<<<<<<<<<< + * ret = {} + * for thread_to_state in as_array: + */ + __pyx_t_3 = __Pyx_GetModuleGlobalName(__pyx_n_s_thread_states); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 28, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_entrySet); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 28, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_3 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_4))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_4); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_4, function); + } + } + if (__pyx_t_3) { + __pyx_t_2 = __Pyx_PyObject_CallOneArg(__pyx_t_4, __pyx_t_3); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 28, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + } else { + __pyx_t_2 = __Pyx_PyObject_CallNoArg(__pyx_t_4); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 28, __pyx_L1_error) + } + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_toArray); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 28, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_4))) { + __pyx_t_2 = PyMethod_GET_SELF(__pyx_t_4); + if (likely(__pyx_t_2)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); + __Pyx_INCREF(__pyx_t_2); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_4, function); + } + } + if (__pyx_t_2) { + __pyx_t_1 = __Pyx_PyObject_CallOneArg(__pyx_t_4, __pyx_t_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 28, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + } else { + __pyx_t_1 = __Pyx_PyObject_CallNoArg(__pyx_t_4); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 28, __pyx_L1_error) + } + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_v_as_array = __pyx_t_1; + __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":29 + * def _current_frames(): + * as_array = thread_states.entrySet().toArray() + * ret = {} # <<<<<<<<<<<<<< + * for thread_to_state in as_array: + * thread = thread_to_state.getKey() + */ + __pyx_t_1 = PyDict_New(); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 29, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v_ret = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":30 + * as_array = thread_states.entrySet().toArray() + * ret = {} + * for thread_to_state in as_array: # <<<<<<<<<<<<<< + * thread = thread_to_state.getKey() + * if thread is None: + */ + if (likely(PyList_CheckExact(__pyx_v_as_array)) || PyTuple_CheckExact(__pyx_v_as_array)) { + __pyx_t_1 = __pyx_v_as_array; __Pyx_INCREF(__pyx_t_1); __pyx_t_5 = 0; + __pyx_t_6 = NULL; + } else { + __pyx_t_5 = -1; __pyx_t_1 = PyObject_GetIter(__pyx_v_as_array); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 30, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_6 = Py_TYPE(__pyx_t_1)->tp_iternext; if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 30, __pyx_L1_error) + } + for (;;) { + if (likely(!__pyx_t_6)) { + if (likely(PyList_CheckExact(__pyx_t_1))) { + if (__pyx_t_5 >= PyList_GET_SIZE(__pyx_t_1)) break; + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_4 = PyList_GET_ITEM(__pyx_t_1, __pyx_t_5); __Pyx_INCREF(__pyx_t_4); __pyx_t_5++; if (unlikely(0 < 0)) __PYX_ERR(0, 30, __pyx_L1_error) + #else + __pyx_t_4 = PySequence_ITEM(__pyx_t_1, __pyx_t_5); __pyx_t_5++; if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 30, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + #endif + } else { + if (__pyx_t_5 >= PyTuple_GET_SIZE(__pyx_t_1)) break; + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_4 = PyTuple_GET_ITEM(__pyx_t_1, __pyx_t_5); __Pyx_INCREF(__pyx_t_4); __pyx_t_5++; if (unlikely(0 < 0)) __PYX_ERR(0, 30, __pyx_L1_error) + #else + __pyx_t_4 = PySequence_ITEM(__pyx_t_1, __pyx_t_5); __pyx_t_5++; if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 30, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + #endif + } + } else { + __pyx_t_4 = __pyx_t_6(__pyx_t_1); + if (unlikely(!__pyx_t_4)) { + PyObject* exc_type = PyErr_Occurred(); + if (exc_type) { + if (likely(exc_type == PyExc_StopIteration || PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear(); + else __PYX_ERR(0, 30, __pyx_L1_error) + } + break; + } + __Pyx_GOTREF(__pyx_t_4); + } + __Pyx_XDECREF_SET(__pyx_v_thread_to_state, __pyx_t_4); + __pyx_t_4 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":31 + * ret = {} + * for thread_to_state in as_array: + * thread = thread_to_state.getKey() # <<<<<<<<<<<<<< + * if thread is None: + * continue + */ + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_thread_to_state, __pyx_n_s_getKey); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 31, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + } + } + if (__pyx_t_3) { + __pyx_t_4 = __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_t_3); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 31, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + } else { + __pyx_t_4 = __Pyx_PyObject_CallNoArg(__pyx_t_2); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 31, __pyx_L1_error) + } + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_XDECREF_SET(__pyx_v_thread, __pyx_t_4); + __pyx_t_4 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":32 + * for thread_to_state in as_array: + * thread = thread_to_state.getKey() + * if thread is None: # <<<<<<<<<<<<<< + * continue + * thread_state = thread_to_state.getValue() + */ + __pyx_t_7 = (__pyx_v_thread == Py_None); + __pyx_t_8 = (__pyx_t_7 != 0); + if (__pyx_t_8) { + + /* "_pydevd_bundle/pydevd_cython.pyx":33 + * thread = thread_to_state.getKey() + * if thread is None: + * continue # <<<<<<<<<<<<<< + * thread_state = thread_to_state.getValue() + * if thread_state is None: + */ + goto __pyx_L3_continue; + + /* "_pydevd_bundle/pydevd_cython.pyx":32 + * for thread_to_state in as_array: + * thread = thread_to_state.getKey() + * if thread is None: # <<<<<<<<<<<<<< + * continue + * thread_state = thread_to_state.getValue() + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":34 + * if thread is None: + * continue + * thread_state = thread_to_state.getValue() # <<<<<<<<<<<<<< + * if thread_state is None: + * continue + */ + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_thread_to_state, __pyx_n_s_getValue); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 34, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + } + } + if (__pyx_t_3) { + __pyx_t_4 = __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_t_3); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 34, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + } else { + __pyx_t_4 = __Pyx_PyObject_CallNoArg(__pyx_t_2); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 34, __pyx_L1_error) + } + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_XDECREF_SET(__pyx_v_thread_state, __pyx_t_4); + __pyx_t_4 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":35 + * continue + * thread_state = thread_to_state.getValue() + * if thread_state is None: # <<<<<<<<<<<<<< + * continue + * + */ + __pyx_t_8 = (__pyx_v_thread_state == Py_None); + __pyx_t_7 = (__pyx_t_8 != 0); + if (__pyx_t_7) { + + /* "_pydevd_bundle/pydevd_cython.pyx":36 + * thread_state = thread_to_state.getValue() + * if thread_state is None: + * continue # <<<<<<<<<<<<<< + * + * frame = thread_state.frame + */ + goto __pyx_L3_continue; + + /* "_pydevd_bundle/pydevd_cython.pyx":35 + * continue + * thread_state = thread_to_state.getValue() + * if thread_state is None: # <<<<<<<<<<<<<< + * continue + * + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":38 + * continue + * + * frame = thread_state.frame # <<<<<<<<<<<<<< + * if frame is None: + * continue + */ + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_thread_state, __pyx_n_s_frame); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 38, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_XDECREF_SET(__pyx_v_frame, __pyx_t_4); + __pyx_t_4 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":39 + * + * frame = thread_state.frame + * if frame is None: # <<<<<<<<<<<<<< + * continue + * + */ + __pyx_t_7 = (__pyx_v_frame == Py_None); + __pyx_t_8 = (__pyx_t_7 != 0); + if (__pyx_t_8) { + + /* "_pydevd_bundle/pydevd_cython.pyx":40 + * frame = thread_state.frame + * if frame is None: + * continue # <<<<<<<<<<<<<< + * + * ret[thread.getId()] = frame + */ + goto __pyx_L3_continue; + + /* "_pydevd_bundle/pydevd_cython.pyx":39 + * + * frame = thread_state.frame + * if frame is None: # <<<<<<<<<<<<<< + * continue + * + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":42 + * continue + * + * ret[thread.getId()] = frame # <<<<<<<<<<<<<< + * return ret + * + */ + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_thread, __pyx_n_s_getId); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 42, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + } + } + if (__pyx_t_3) { + __pyx_t_4 = __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_t_3); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 42, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + } else { + __pyx_t_4 = __Pyx_PyObject_CallNoArg(__pyx_t_2); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 42, __pyx_L1_error) + } + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + if (unlikely(PyDict_SetItem(__pyx_v_ret, __pyx_t_4, __pyx_v_frame) < 0)) __PYX_ERR(0, 42, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":30 + * as_array = thread_states.entrySet().toArray() + * ret = {} + * for thread_to_state in as_array: # <<<<<<<<<<<<<< + * thread = thread_to_state.getKey() + * if thread is None: + */ + __pyx_L3_continue:; + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":43 + * + * ret[thread.getId()] = frame + * return ret # <<<<<<<<<<<<<< + * + * elif IS_IRONPYTHON: + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_ret); + __pyx_r = __pyx_v_ret; + goto __pyx_L0; + + /* "_pydevd_bundle/pydevd_cython.pyx":27 + * thread_states = cachedThreadState.get(ThreadStateMapping) + * + * def _current_frames(): # <<<<<<<<<<<<<< + * as_array = thread_states.entrySet().toArray() + * ret = {} + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython._current_frames", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_as_array); + __Pyx_XDECREF(__pyx_v_ret); + __Pyx_XDECREF(__pyx_v_thread_to_state); + __Pyx_XDECREF(__pyx_v_thread); + __Pyx_XDECREF(__pyx_v_thread_state); + __Pyx_XDECREF(__pyx_v_frame); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pyx":49 + * + * # IronPython doesn't have it. Let's use our workaround... + * def _current_frames(): # <<<<<<<<<<<<<< + * return _tid_to_last_frame + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_3_current_frames(PyObject *__pyx_self, CYTHON_UNUSED PyObject *unused); /*proto*/ +static PyMethodDef __pyx_mdef_14_pydevd_bundle_13pydevd_cython_3_current_frames = {"_current_frames", (PyCFunction)__pyx_pw_14_pydevd_bundle_13pydevd_cython_3_current_frames, METH_NOARGS, 0}; +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_3_current_frames(PyObject *__pyx_self, CYTHON_UNUSED PyObject *unused) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("_current_frames (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_2_current_frames(__pyx_self); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_2_current_frames(CYTHON_UNUSED PyObject *__pyx_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("_current_frames", 0); + + /* "_pydevd_bundle/pydevd_cython.pyx":50 + * # IronPython doesn't have it. Let's use our workaround... + * def _current_frames(): + * return _tid_to_last_frame # <<<<<<<<<<<<<< + * + * else: + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_tid_to_last_frame); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 50, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "_pydevd_bundle/pydevd_cython.pyx":49 + * + * # IronPython doesn't have it. Let's use our workaround... + * def _current_frames(): # <<<<<<<<<<<<<< + * return _tid_to_last_frame + * + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython._current_frames", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pyx":100 + * # ENDIF + * + * def __init__(self): # <<<<<<<<<<<<<< + * self.pydev_state = STATE_RUN + * self.pydev_step_stop = None + */ + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__init__ (wrapper)", 0); + if (unlikely(PyTuple_GET_SIZE(__pyx_args) > 0)) { + __Pyx_RaiseArgtupleInvalid("__init__", 1, 0, 0, PyTuple_GET_SIZE(__pyx_args)); return -1;} + if (unlikely(__pyx_kwds) && unlikely(PyDict_Size(__pyx_kwds) > 0) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "__init__", 0))) return -1; + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo___init__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo___init__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + __Pyx_RefNannySetupContext("__init__", 0); + + /* "_pydevd_bundle/pydevd_cython.pyx":101 + * + * def __init__(self): + * self.pydev_state = STATE_RUN # <<<<<<<<<<<<<< + * self.pydev_step_stop = None + * self.pydev_step_cmd = -1 # Something as CMD_STEP_INTO, CMD_STEP_OVER, etc. + */ + __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_STATE_RUN); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 101, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 101, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_v_self->pydev_state = __pyx_t_2; + + /* "_pydevd_bundle/pydevd_cython.pyx":102 + * def __init__(self): + * self.pydev_state = STATE_RUN + * self.pydev_step_stop = None # <<<<<<<<<<<<<< + * self.pydev_step_cmd = -1 # Something as CMD_STEP_INTO, CMD_STEP_OVER, etc. + * self.pydev_notify_kill = False + */ + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_self->pydev_step_stop); + __Pyx_DECREF(__pyx_v_self->pydev_step_stop); + __pyx_v_self->pydev_step_stop = Py_None; + + /* "_pydevd_bundle/pydevd_cython.pyx":103 + * self.pydev_state = STATE_RUN + * self.pydev_step_stop = None + * self.pydev_step_cmd = -1 # Something as CMD_STEP_INTO, CMD_STEP_OVER, etc. # <<<<<<<<<<<<<< + * self.pydev_notify_kill = False + * self.pydev_smart_step_stop = None + */ + __pyx_v_self->pydev_step_cmd = -1; + + /* "_pydevd_bundle/pydevd_cython.pyx":104 + * self.pydev_step_stop = None + * self.pydev_step_cmd = -1 # Something as CMD_STEP_INTO, CMD_STEP_OVER, etc. + * self.pydev_notify_kill = False # <<<<<<<<<<<<<< + * self.pydev_smart_step_stop = None + * self.pydev_django_resolve_frame = False + */ + __pyx_v_self->pydev_notify_kill = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":105 + * self.pydev_step_cmd = -1 # Something as CMD_STEP_INTO, CMD_STEP_OVER, etc. + * self.pydev_notify_kill = False + * self.pydev_smart_step_stop = None # <<<<<<<<<<<<<< + * self.pydev_django_resolve_frame = False + * self.pydev_call_from_jinja2 = None + */ + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_self->pydev_smart_step_stop); + __Pyx_DECREF(__pyx_v_self->pydev_smart_step_stop); + __pyx_v_self->pydev_smart_step_stop = Py_None; + + /* "_pydevd_bundle/pydevd_cython.pyx":106 + * self.pydev_notify_kill = False + * self.pydev_smart_step_stop = None + * self.pydev_django_resolve_frame = False # <<<<<<<<<<<<<< + * self.pydev_call_from_jinja2 = None + * self.pydev_call_inside_jinja2 = None + */ + __pyx_v_self->pydev_django_resolve_frame = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":107 + * self.pydev_smart_step_stop = None + * self.pydev_django_resolve_frame = False + * self.pydev_call_from_jinja2 = None # <<<<<<<<<<<<<< + * self.pydev_call_inside_jinja2 = None + * self.is_tracing = False + */ + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_self->pydev_call_from_jinja2); + __Pyx_DECREF(__pyx_v_self->pydev_call_from_jinja2); + __pyx_v_self->pydev_call_from_jinja2 = Py_None; + + /* "_pydevd_bundle/pydevd_cython.pyx":108 + * self.pydev_django_resolve_frame = False + * self.pydev_call_from_jinja2 = None + * self.pydev_call_inside_jinja2 = None # <<<<<<<<<<<<<< + * self.is_tracing = False + * self.conditional_breakpoint_exception = None + */ + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_self->pydev_call_inside_jinja2); + __Pyx_DECREF(__pyx_v_self->pydev_call_inside_jinja2); + __pyx_v_self->pydev_call_inside_jinja2 = Py_None; + + /* "_pydevd_bundle/pydevd_cython.pyx":109 + * self.pydev_call_from_jinja2 = None + * self.pydev_call_inside_jinja2 = None + * self.is_tracing = False # <<<<<<<<<<<<<< + * self.conditional_breakpoint_exception = None + * self.pydev_message = '' + */ + __pyx_v_self->is_tracing = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":110 + * self.pydev_call_inside_jinja2 = None + * self.is_tracing = False + * self.conditional_breakpoint_exception = None # <<<<<<<<<<<<<< + * self.pydev_message = '' + * self.suspend_type = PYTHON_SUSPEND + */ + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_self->conditional_breakpoint_exception); + __Pyx_DECREF(__pyx_v_self->conditional_breakpoint_exception); + __pyx_v_self->conditional_breakpoint_exception = ((PyObject*)Py_None); + + /* "_pydevd_bundle/pydevd_cython.pyx":111 + * self.is_tracing = False + * self.conditional_breakpoint_exception = None + * self.pydev_message = '' # <<<<<<<<<<<<<< + * self.suspend_type = PYTHON_SUSPEND + * self.pydev_next_line = -1 + */ + __Pyx_INCREF(__pyx_kp_s_); + __Pyx_GIVEREF(__pyx_kp_s_); + __Pyx_GOTREF(__pyx_v_self->pydev_message); + __Pyx_DECREF(__pyx_v_self->pydev_message); + __pyx_v_self->pydev_message = __pyx_kp_s_; + + /* "_pydevd_bundle/pydevd_cython.pyx":112 + * self.conditional_breakpoint_exception = None + * self.pydev_message = '' + * self.suspend_type = PYTHON_SUSPEND # <<<<<<<<<<<<<< + * self.pydev_next_line = -1 + * self.pydev_func_name = '.invalid.' # Must match the type in cython + */ + __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_PYTHON_SUSPEND); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 112, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 112, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_v_self->suspend_type = __pyx_t_2; + + /* "_pydevd_bundle/pydevd_cython.pyx":113 + * self.pydev_message = '' + * self.suspend_type = PYTHON_SUSPEND + * self.pydev_next_line = -1 # <<<<<<<<<<<<<< + * self.pydev_func_name = '.invalid.' # Must match the type in cython + * + */ + __pyx_v_self->pydev_next_line = -1; + + /* "_pydevd_bundle/pydevd_cython.pyx":114 + * self.suspend_type = PYTHON_SUSPEND + * self.pydev_next_line = -1 + * self.pydev_func_name = '.invalid.' # Must match the type in cython # <<<<<<<<<<<<<< + * + * + */ + __Pyx_INCREF(__pyx_kp_s_invalid); + __Pyx_GIVEREF(__pyx_kp_s_invalid); + __Pyx_GOTREF(__pyx_v_self->pydev_func_name); + __Pyx_DECREF(__pyx_v_self->pydev_func_name); + __pyx_v_self->pydev_func_name = __pyx_kp_s_invalid; + + /* "_pydevd_bundle/pydevd_cython.pyx":100 + * # ENDIF + * + * def __init__(self): # <<<<<<<<<<<<<< + * self.pydev_state = STATE_RUN + * self.pydev_step_stop = None + */ + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBAdditionalThreadInfo.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pyx":117 + * + * + * def iter_frames(self, t): # <<<<<<<<<<<<<< + * #sys._current_frames(): dictionary with thread id -> topmost frame + * current_frames = _current_frames() + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_3iter_frames(PyObject *__pyx_v_self, PyObject *__pyx_v_t); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_3iter_frames(PyObject *__pyx_v_self, PyObject *__pyx_v_t) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("iter_frames (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_2iter_frames(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self), ((PyObject *)__pyx_v_t)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_2iter_frames(CYTHON_UNUSED struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self, PyObject *__pyx_v_t) { + PyObject *__pyx_v_current_frames = NULL; + PyObject *__pyx_v_v = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + int __pyx_t_6; + int __pyx_t_7; + __Pyx_RefNannySetupContext("iter_frames", 0); + + /* "_pydevd_bundle/pydevd_cython.pyx":119 + * def iter_frames(self, t): + * #sys._current_frames(): dictionary with thread id -> topmost frame + * current_frames = _current_frames() # <<<<<<<<<<<<<< + * v = current_frames.get(t.ident) + * if v is not None: + */ + __pyx_t_2 = __Pyx_GetModuleGlobalName(__pyx_n_s_current_frames); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 119, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + } + } + if (__pyx_t_3) { + __pyx_t_1 = __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_t_3); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 119, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + } else { + __pyx_t_1 = __Pyx_PyObject_CallNoArg(__pyx_t_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 119, __pyx_L1_error) + } + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_v_current_frames = __pyx_t_1; + __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":120 + * #sys._current_frames(): dictionary with thread id -> topmost frame + * current_frames = _current_frames() + * v = current_frames.get(t.ident) # <<<<<<<<<<<<<< + * if v is not None: + * return [v] + */ + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_current_frames, __pyx_n_s_get); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 120, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_t, __pyx_n_s_ident); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 120, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + } + } + if (!__pyx_t_4) { + __pyx_t_1 = __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_t_3); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 120, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_GOTREF(__pyx_t_1); + } else { + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_2)) { + PyObject *__pyx_temp[2] = {__pyx_t_4, __pyx_t_3}; + __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_2, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 120, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_2)) { + PyObject *__pyx_temp[2] = {__pyx_t_4, __pyx_t_3}; + __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_2, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 120, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + } else + #endif + { + __pyx_t_5 = PyTuple_New(1+1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 120, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_GIVEREF(__pyx_t_4); PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_t_4); __pyx_t_4 = NULL; + __Pyx_GIVEREF(__pyx_t_3); + PyTuple_SET_ITEM(__pyx_t_5, 0+1, __pyx_t_3); + __pyx_t_3 = 0; + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_2, __pyx_t_5, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 120, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + } + } + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_v_v = __pyx_t_1; + __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":121 + * current_frames = _current_frames() + * v = current_frames.get(t.ident) + * if v is not None: # <<<<<<<<<<<<<< + * return [v] + * return [] + */ + __pyx_t_6 = (__pyx_v_v != Py_None); + __pyx_t_7 = (__pyx_t_6 != 0); + if (__pyx_t_7) { + + /* "_pydevd_bundle/pydevd_cython.pyx":122 + * v = current_frames.get(t.ident) + * if v is not None: + * return [v] # <<<<<<<<<<<<<< + * return [] + * + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = PyList_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 122, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_v_v); + __Pyx_GIVEREF(__pyx_v_v); + PyList_SET_ITEM(__pyx_t_1, 0, __pyx_v_v); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "_pydevd_bundle/pydevd_cython.pyx":121 + * current_frames = _current_frames() + * v = current_frames.get(t.ident) + * if v is not None: # <<<<<<<<<<<<<< + * return [v] + * return [] + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":123 + * if v is not None: + * return [v] + * return [] # <<<<<<<<<<<<<< + * + * def __str__(self): + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = PyList_New(0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 123, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "_pydevd_bundle/pydevd_cython.pyx":117 + * + * + * def iter_frames(self, t): # <<<<<<<<<<<<<< + * #sys._current_frames(): dictionary with thread id -> topmost frame + * current_frames = _current_frames() + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBAdditionalThreadInfo.iter_frames", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_current_frames); + __Pyx_XDECREF(__pyx_v_v); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pyx":125 + * return [] + * + * def __str__(self): # <<<<<<<<<<<<<< + * return 'State:%s Stop:%s Cmd: %s Kill:%s' % ( + * self.pydev_state, self.pydev_step_stop, self.pydev_step_cmd, self.pydev_notify_kill) + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_5__str__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_5__str__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__str__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_4__str__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_4__str__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + __Pyx_RefNannySetupContext("__str__", 0); + + /* "_pydevd_bundle/pydevd_cython.pyx":126 + * + * def __str__(self): + * return 'State:%s Stop:%s Cmd: %s Kill:%s' % ( # <<<<<<<<<<<<<< + * self.pydev_state, self.pydev_step_stop, self.pydev_step_cmd, self.pydev_notify_kill) + * + */ + __Pyx_XDECREF(__pyx_r); + + /* "_pydevd_bundle/pydevd_cython.pyx":127 + * def __str__(self): + * return 'State:%s Stop:%s Cmd: %s Kill:%s' % ( + * self.pydev_state, self.pydev_step_stop, self.pydev_step_cmd, self.pydev_notify_kill) # <<<<<<<<<<<<<< + * + * import linecache + */ + __pyx_t_1 = __Pyx_PyInt_From_int(__pyx_v_self->pydev_state); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 127, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __Pyx_PyInt_From_int(__pyx_v_self->pydev_step_cmd); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 127, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = __Pyx_PyBool_FromLong(__pyx_v_self->pydev_notify_kill); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 127, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = PyTuple_New(4); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 127, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_GIVEREF(__pyx_t_1); + PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_1); + __Pyx_INCREF(__pyx_v_self->pydev_step_stop); + __Pyx_GIVEREF(__pyx_v_self->pydev_step_stop); + PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_v_self->pydev_step_stop); + __Pyx_GIVEREF(__pyx_t_2); + PyTuple_SET_ITEM(__pyx_t_4, 2, __pyx_t_2); + __Pyx_GIVEREF(__pyx_t_3); + PyTuple_SET_ITEM(__pyx_t_4, 3, __pyx_t_3); + __pyx_t_1 = 0; + __pyx_t_2 = 0; + __pyx_t_3 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":126 + * + * def __str__(self): + * return 'State:%s Stop:%s Cmd: %s Kill:%s' % ( # <<<<<<<<<<<<<< + * self.pydev_state, self.pydev_step_stop, self.pydev_step_cmd, self.pydev_notify_kill) + * + */ + __pyx_t_3 = __Pyx_PyString_Format(__pyx_kp_s_State_s_Stop_s_Cmd_s_Kill_s, __pyx_t_4); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 126, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_r = __pyx_t_3; + __pyx_t_3 = 0; + goto __pyx_L0; + + /* "_pydevd_bundle/pydevd_cython.pyx":125 + * return [] + * + * def __str__(self): # <<<<<<<<<<<<<< + * return 'State:%s Stop:%s Cmd: %s Kill:%s' % ( + * self.pydev_state, self.pydev_step_stop, self.pydev_step_cmd, self.pydev_notify_kill) + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBAdditionalThreadInfo.__str__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pyx":67 + * + * # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + * cdef public int pydev_state; # <<<<<<<<<<<<<< + * cdef public object pydev_step_stop; # Actually, it's a frame or None + * cdef public int pydev_step_cmd; + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_11pydev_state_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_11pydev_state_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_11pydev_state___get__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_11pydev_state___get__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __Pyx_PyInt_From_int(__pyx_v_self->pydev_state); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 67, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBAdditionalThreadInfo.pydev_state.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_11pydev_state_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_11pydev_state_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_11pydev_state_2__set__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_11pydev_state_2__set__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + __Pyx_RefNannySetupContext("__set__", 0); + __pyx_t_1 = __Pyx_PyInt_As_int(__pyx_v_value); if (unlikely((__pyx_t_1 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 67, __pyx_L1_error) + __pyx_v_self->pydev_state = __pyx_t_1; + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBAdditionalThreadInfo.pydev_state.__set__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pyx":68 + * # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + * cdef public int pydev_state; + * cdef public object pydev_step_stop; # Actually, it's a frame or None # <<<<<<<<<<<<<< + * cdef public int pydev_step_cmd; + * cdef public bint pydev_notify_kill; + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_step_stop_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_step_stop_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_step_stop___get__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_step_stop___get__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_self->pydev_step_stop); + __pyx_r = __pyx_v_self->pydev_step_stop; + goto __pyx_L0; + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_step_stop_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_step_stop_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_step_stop_2__set__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_step_stop_2__set__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__", 0); + __Pyx_INCREF(__pyx_v_value); + __Pyx_GIVEREF(__pyx_v_value); + __Pyx_GOTREF(__pyx_v_self->pydev_step_stop); + __Pyx_DECREF(__pyx_v_self->pydev_step_stop); + __pyx_v_self->pydev_step_stop = __pyx_v_value; + + /* function exit code */ + __pyx_r = 0; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_step_stop_5__del__(PyObject *__pyx_v_self); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_step_stop_5__del__(PyObject *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__del__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_step_stop_4__del__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_step_stop_4__del__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__del__", 0); + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_self->pydev_step_stop); + __Pyx_DECREF(__pyx_v_self->pydev_step_stop); + __pyx_v_self->pydev_step_stop = Py_None; + + /* function exit code */ + __pyx_r = 0; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pyx":69 + * cdef public int pydev_state; + * cdef public object pydev_step_stop; # Actually, it's a frame or None + * cdef public int pydev_step_cmd; # <<<<<<<<<<<<<< + * cdef public bint pydev_notify_kill; + * cdef public object pydev_smart_step_stop; # Actually, it's a frame or None + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_14pydev_step_cmd_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_14pydev_step_cmd_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_14pydev_step_cmd___get__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_14pydev_step_cmd___get__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __Pyx_PyInt_From_int(__pyx_v_self->pydev_step_cmd); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 69, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBAdditionalThreadInfo.pydev_step_cmd.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_14pydev_step_cmd_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_14pydev_step_cmd_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_14pydev_step_cmd_2__set__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_14pydev_step_cmd_2__set__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + __Pyx_RefNannySetupContext("__set__", 0); + __pyx_t_1 = __Pyx_PyInt_As_int(__pyx_v_value); if (unlikely((__pyx_t_1 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 69, __pyx_L1_error) + __pyx_v_self->pydev_step_cmd = __pyx_t_1; + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBAdditionalThreadInfo.pydev_step_cmd.__set__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pyx":70 + * cdef public object pydev_step_stop; # Actually, it's a frame or None + * cdef public int pydev_step_cmd; + * cdef public bint pydev_notify_kill; # <<<<<<<<<<<<<< + * cdef public object pydev_smart_step_stop; # Actually, it's a frame or None + * cdef public bint pydev_django_resolve_frame; + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_17pydev_notify_kill_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_17pydev_notify_kill_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_17pydev_notify_kill___get__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_17pydev_notify_kill___get__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __Pyx_PyBool_FromLong(__pyx_v_self->pydev_notify_kill); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 70, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBAdditionalThreadInfo.pydev_notify_kill.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_17pydev_notify_kill_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_17pydev_notify_kill_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_17pydev_notify_kill_2__set__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_17pydev_notify_kill_2__set__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + __Pyx_RefNannySetupContext("__set__", 0); + __pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_v_value); if (unlikely((__pyx_t_1 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 70, __pyx_L1_error) + __pyx_v_self->pydev_notify_kill = __pyx_t_1; + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBAdditionalThreadInfo.pydev_notify_kill.__set__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pyx":71 + * cdef public int pydev_step_cmd; + * cdef public bint pydev_notify_kill; + * cdef public object pydev_smart_step_stop; # Actually, it's a frame or None # <<<<<<<<<<<<<< + * cdef public bint pydev_django_resolve_frame; + * cdef public object pydev_call_from_jinja2; + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_21pydev_smart_step_stop_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_21pydev_smart_step_stop_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_21pydev_smart_step_stop___get__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_21pydev_smart_step_stop___get__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_self->pydev_smart_step_stop); + __pyx_r = __pyx_v_self->pydev_smart_step_stop; + goto __pyx_L0; + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_21pydev_smart_step_stop_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_21pydev_smart_step_stop_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_21pydev_smart_step_stop_2__set__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_21pydev_smart_step_stop_2__set__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__", 0); + __Pyx_INCREF(__pyx_v_value); + __Pyx_GIVEREF(__pyx_v_value); + __Pyx_GOTREF(__pyx_v_self->pydev_smart_step_stop); + __Pyx_DECREF(__pyx_v_self->pydev_smart_step_stop); + __pyx_v_self->pydev_smart_step_stop = __pyx_v_value; + + /* function exit code */ + __pyx_r = 0; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_21pydev_smart_step_stop_5__del__(PyObject *__pyx_v_self); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_21pydev_smart_step_stop_5__del__(PyObject *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__del__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_21pydev_smart_step_stop_4__del__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_21pydev_smart_step_stop_4__del__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__del__", 0); + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_self->pydev_smart_step_stop); + __Pyx_DECREF(__pyx_v_self->pydev_smart_step_stop); + __pyx_v_self->pydev_smart_step_stop = Py_None; + + /* function exit code */ + __pyx_r = 0; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pyx":72 + * cdef public bint pydev_notify_kill; + * cdef public object pydev_smart_step_stop; # Actually, it's a frame or None + * cdef public bint pydev_django_resolve_frame; # <<<<<<<<<<<<<< + * cdef public object pydev_call_from_jinja2; + * cdef public object pydev_call_inside_jinja2; + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_26pydev_django_resolve_frame_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_26pydev_django_resolve_frame_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_26pydev_django_resolve_frame___get__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_26pydev_django_resolve_frame___get__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __Pyx_PyBool_FromLong(__pyx_v_self->pydev_django_resolve_frame); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 72, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBAdditionalThreadInfo.pydev_django_resolve_frame.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_26pydev_django_resolve_frame_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_26pydev_django_resolve_frame_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_26pydev_django_resolve_frame_2__set__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_26pydev_django_resolve_frame_2__set__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + __Pyx_RefNannySetupContext("__set__", 0); + __pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_v_value); if (unlikely((__pyx_t_1 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 72, __pyx_L1_error) + __pyx_v_self->pydev_django_resolve_frame = __pyx_t_1; + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBAdditionalThreadInfo.pydev_django_resolve_frame.__set__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pyx":73 + * cdef public object pydev_smart_step_stop; # Actually, it's a frame or None + * cdef public bint pydev_django_resolve_frame; + * cdef public object pydev_call_from_jinja2; # <<<<<<<<<<<<<< + * cdef public object pydev_call_inside_jinja2; + * cdef public bint is_tracing; + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_22pydev_call_from_jinja2_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_22pydev_call_from_jinja2_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_22pydev_call_from_jinja2___get__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_22pydev_call_from_jinja2___get__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_self->pydev_call_from_jinja2); + __pyx_r = __pyx_v_self->pydev_call_from_jinja2; + goto __pyx_L0; + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_22pydev_call_from_jinja2_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_22pydev_call_from_jinja2_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_22pydev_call_from_jinja2_2__set__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_22pydev_call_from_jinja2_2__set__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__", 0); + __Pyx_INCREF(__pyx_v_value); + __Pyx_GIVEREF(__pyx_v_value); + __Pyx_GOTREF(__pyx_v_self->pydev_call_from_jinja2); + __Pyx_DECREF(__pyx_v_self->pydev_call_from_jinja2); + __pyx_v_self->pydev_call_from_jinja2 = __pyx_v_value; + + /* function exit code */ + __pyx_r = 0; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_22pydev_call_from_jinja2_5__del__(PyObject *__pyx_v_self); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_22pydev_call_from_jinja2_5__del__(PyObject *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__del__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_22pydev_call_from_jinja2_4__del__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_22pydev_call_from_jinja2_4__del__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__del__", 0); + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_self->pydev_call_from_jinja2); + __Pyx_DECREF(__pyx_v_self->pydev_call_from_jinja2); + __pyx_v_self->pydev_call_from_jinja2 = Py_None; + + /* function exit code */ + __pyx_r = 0; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pyx":74 + * cdef public bint pydev_django_resolve_frame; + * cdef public object pydev_call_from_jinja2; + * cdef public object pydev_call_inside_jinja2; # <<<<<<<<<<<<<< + * cdef public bint is_tracing; + * cdef public tuple conditional_breakpoint_exception; + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_24pydev_call_inside_jinja2_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_24pydev_call_inside_jinja2_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_24pydev_call_inside_jinja2___get__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_24pydev_call_inside_jinja2___get__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_self->pydev_call_inside_jinja2); + __pyx_r = __pyx_v_self->pydev_call_inside_jinja2; + goto __pyx_L0; + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_24pydev_call_inside_jinja2_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_24pydev_call_inside_jinja2_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_24pydev_call_inside_jinja2_2__set__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_24pydev_call_inside_jinja2_2__set__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__", 0); + __Pyx_INCREF(__pyx_v_value); + __Pyx_GIVEREF(__pyx_v_value); + __Pyx_GOTREF(__pyx_v_self->pydev_call_inside_jinja2); + __Pyx_DECREF(__pyx_v_self->pydev_call_inside_jinja2); + __pyx_v_self->pydev_call_inside_jinja2 = __pyx_v_value; + + /* function exit code */ + __pyx_r = 0; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_24pydev_call_inside_jinja2_5__del__(PyObject *__pyx_v_self); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_24pydev_call_inside_jinja2_5__del__(PyObject *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__del__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_24pydev_call_inside_jinja2_4__del__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_24pydev_call_inside_jinja2_4__del__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__del__", 0); + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_self->pydev_call_inside_jinja2); + __Pyx_DECREF(__pyx_v_self->pydev_call_inside_jinja2); + __pyx_v_self->pydev_call_inside_jinja2 = Py_None; + + /* function exit code */ + __pyx_r = 0; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pyx":75 + * cdef public object pydev_call_from_jinja2; + * cdef public object pydev_call_inside_jinja2; + * cdef public bint is_tracing; # <<<<<<<<<<<<<< + * cdef public tuple conditional_breakpoint_exception; + * cdef public str pydev_message; + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_10is_tracing_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_10is_tracing_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_10is_tracing___get__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_10is_tracing___get__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __Pyx_PyBool_FromLong(__pyx_v_self->is_tracing); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 75, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBAdditionalThreadInfo.is_tracing.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_10is_tracing_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_10is_tracing_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_10is_tracing_2__set__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_10is_tracing_2__set__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + __Pyx_RefNannySetupContext("__set__", 0); + __pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_v_value); if (unlikely((__pyx_t_1 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 75, __pyx_L1_error) + __pyx_v_self->is_tracing = __pyx_t_1; + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBAdditionalThreadInfo.is_tracing.__set__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pyx":76 + * cdef public object pydev_call_inside_jinja2; + * cdef public bint is_tracing; + * cdef public tuple conditional_breakpoint_exception; # <<<<<<<<<<<<<< + * cdef public str pydev_message; + * cdef public int suspend_type; + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_32conditional_breakpoint_exception_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_32conditional_breakpoint_exception_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_32conditional_breakpoint_exception___get__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_32conditional_breakpoint_exception___get__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_self->conditional_breakpoint_exception); + __pyx_r = __pyx_v_self->conditional_breakpoint_exception; + goto __pyx_L0; + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_32conditional_breakpoint_exception_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_32conditional_breakpoint_exception_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_32conditional_breakpoint_exception_2__set__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_32conditional_breakpoint_exception_2__set__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("__set__", 0); + if (!(likely(PyTuple_CheckExact(__pyx_v_value))||((__pyx_v_value) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_v_value)->tp_name), 0))) __PYX_ERR(0, 76, __pyx_L1_error) + __pyx_t_1 = __pyx_v_value; + __Pyx_INCREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v_self->conditional_breakpoint_exception); + __Pyx_DECREF(__pyx_v_self->conditional_breakpoint_exception); + __pyx_v_self->conditional_breakpoint_exception = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBAdditionalThreadInfo.conditional_breakpoint_exception.__set__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_32conditional_breakpoint_exception_5__del__(PyObject *__pyx_v_self); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_32conditional_breakpoint_exception_5__del__(PyObject *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__del__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_32conditional_breakpoint_exception_4__del__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_32conditional_breakpoint_exception_4__del__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__del__", 0); + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_self->conditional_breakpoint_exception); + __Pyx_DECREF(__pyx_v_self->conditional_breakpoint_exception); + __pyx_v_self->conditional_breakpoint_exception = ((PyObject*)Py_None); + + /* function exit code */ + __pyx_r = 0; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pyx":77 + * cdef public bint is_tracing; + * cdef public tuple conditional_breakpoint_exception; + * cdef public str pydev_message; # <<<<<<<<<<<<<< + * cdef public int suspend_type; + * cdef public int pydev_next_line; + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_13pydev_message_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_13pydev_message_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_13pydev_message___get__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_13pydev_message___get__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_self->pydev_message); + __pyx_r = __pyx_v_self->pydev_message; + goto __pyx_L0; + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_13pydev_message_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_13pydev_message_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_13pydev_message_2__set__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_13pydev_message_2__set__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("__set__", 0); + if (!(likely(PyString_CheckExact(__pyx_v_value))||((__pyx_v_value) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "str", Py_TYPE(__pyx_v_value)->tp_name), 0))) __PYX_ERR(0, 77, __pyx_L1_error) + __pyx_t_1 = __pyx_v_value; + __Pyx_INCREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v_self->pydev_message); + __Pyx_DECREF(__pyx_v_self->pydev_message); + __pyx_v_self->pydev_message = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBAdditionalThreadInfo.pydev_message.__set__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_13pydev_message_5__del__(PyObject *__pyx_v_self); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_13pydev_message_5__del__(PyObject *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__del__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_13pydev_message_4__del__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_13pydev_message_4__del__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__del__", 0); + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_self->pydev_message); + __Pyx_DECREF(__pyx_v_self->pydev_message); + __pyx_v_self->pydev_message = ((PyObject*)Py_None); + + /* function exit code */ + __pyx_r = 0; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pyx":78 + * cdef public tuple conditional_breakpoint_exception; + * cdef public str pydev_message; + * cdef public int suspend_type; # <<<<<<<<<<<<<< + * cdef public int pydev_next_line; + * cdef public str pydev_func_name; + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_12suspend_type_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_12suspend_type_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_12suspend_type___get__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_12suspend_type___get__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __Pyx_PyInt_From_int(__pyx_v_self->suspend_type); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 78, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBAdditionalThreadInfo.suspend_type.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_12suspend_type_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_12suspend_type_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_12suspend_type_2__set__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_12suspend_type_2__set__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + __Pyx_RefNannySetupContext("__set__", 0); + __pyx_t_1 = __Pyx_PyInt_As_int(__pyx_v_value); if (unlikely((__pyx_t_1 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 78, __pyx_L1_error) + __pyx_v_self->suspend_type = __pyx_t_1; + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBAdditionalThreadInfo.suspend_type.__set__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pyx":79 + * cdef public str pydev_message; + * cdef public int suspend_type; + * cdef public int pydev_next_line; # <<<<<<<<<<<<<< + * cdef public str pydev_func_name; + * # ELSE + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_next_line_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_next_line_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_next_line___get__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_next_line___get__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __Pyx_PyInt_From_int(__pyx_v_self->pydev_next_line); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 79, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBAdditionalThreadInfo.pydev_next_line.__get__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_next_line_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_next_line_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_next_line_2__set__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_next_line_2__set__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + __Pyx_RefNannySetupContext("__set__", 0); + __pyx_t_1 = __Pyx_PyInt_As_int(__pyx_v_value); if (unlikely((__pyx_t_1 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 79, __pyx_L1_error) + __pyx_v_self->pydev_next_line = __pyx_t_1; + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBAdditionalThreadInfo.pydev_next_line.__set__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pyx":80 + * cdef public int suspend_type; + * cdef public int pydev_next_line; + * cdef public str pydev_func_name; # <<<<<<<<<<<<<< + * # ELSE + * # __slots__ = [ + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_func_name_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_func_name_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_func_name___get__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_func_name___get__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_self->pydev_func_name); + __pyx_r = __pyx_v_self->pydev_func_name; + goto __pyx_L0; + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_func_name_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_func_name_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_func_name_2__set__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_func_name_2__set__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("__set__", 0); + if (!(likely(PyString_CheckExact(__pyx_v_value))||((__pyx_v_value) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "str", Py_TYPE(__pyx_v_value)->tp_name), 0))) __PYX_ERR(0, 80, __pyx_L1_error) + __pyx_t_1 = __pyx_v_value; + __Pyx_INCREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v_self->pydev_func_name); + __Pyx_DECREF(__pyx_v_self->pydev_func_name); + __pyx_v_self->pydev_func_name = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBAdditionalThreadInfo.pydev_func_name.__set__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_func_name_5__del__(PyObject *__pyx_v_self); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_func_name_5__del__(PyObject *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__del__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_func_name_4__del__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_func_name_4__del__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__del__", 0); + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_self->pydev_func_name); + __Pyx_DECREF(__pyx_v_self->pydev_func_name); + __pyx_v_self->pydev_func_name = ((PyObject*)Py_None); + + /* function exit code */ + __pyx_r = 0; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":1 + * def __reduce_cython__(self): # <<<<<<<<<<<<<< + * cdef bint use_setstate + * state = (self.conditional_breakpoint_exception, self.is_tracing, self.pydev_call_from_jinja2, self.pydev_call_inside_jinja2, self.pydev_django_resolve_frame, self.pydev_func_name, self.pydev_message, self.pydev_next_line, self.pydev_notify_kill, self.pydev_smart_step_stop, self.pydev_state, self.pydev_step_cmd, self.pydev_step_stop, self.suspend_type) + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_7__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_7__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__reduce_cython__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_6__reduce_cython__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_6__reduce_cython__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self) { + int __pyx_v_use_setstate; + PyObject *__pyx_v_state = NULL; + PyObject *__pyx_v__dict = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + PyObject *__pyx_t_7 = NULL; + PyObject *__pyx_t_8 = NULL; + int __pyx_t_9; + int __pyx_t_10; + int __pyx_t_11; + __Pyx_RefNannySetupContext("__reduce_cython__", 0); + + /* "(tree fragment)":3 + * def __reduce_cython__(self): + * cdef bint use_setstate + * state = (self.conditional_breakpoint_exception, self.is_tracing, self.pydev_call_from_jinja2, self.pydev_call_inside_jinja2, self.pydev_django_resolve_frame, self.pydev_func_name, self.pydev_message, self.pydev_next_line, self.pydev_notify_kill, self.pydev_smart_step_stop, self.pydev_state, self.pydev_step_cmd, self.pydev_step_stop, self.suspend_type) # <<<<<<<<<<<<<< + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: + */ + __pyx_t_1 = __Pyx_PyBool_FromLong(__pyx_v_self->is_tracing); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 3, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __Pyx_PyBool_FromLong(__pyx_v_self->pydev_django_resolve_frame); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 3, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = __Pyx_PyInt_From_int(__pyx_v_self->pydev_next_line); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 3, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = __Pyx_PyBool_FromLong(__pyx_v_self->pydev_notify_kill); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 3, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_5 = __Pyx_PyInt_From_int(__pyx_v_self->pydev_state); if (unlikely(!__pyx_t_5)) __PYX_ERR(1, 3, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_6 = __Pyx_PyInt_From_int(__pyx_v_self->pydev_step_cmd); if (unlikely(!__pyx_t_6)) __PYX_ERR(1, 3, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_7 = __Pyx_PyInt_From_int(__pyx_v_self->suspend_type); if (unlikely(!__pyx_t_7)) __PYX_ERR(1, 3, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_8 = PyTuple_New(14); if (unlikely(!__pyx_t_8)) __PYX_ERR(1, 3, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_INCREF(__pyx_v_self->conditional_breakpoint_exception); + __Pyx_GIVEREF(__pyx_v_self->conditional_breakpoint_exception); + PyTuple_SET_ITEM(__pyx_t_8, 0, __pyx_v_self->conditional_breakpoint_exception); + __Pyx_GIVEREF(__pyx_t_1); + PyTuple_SET_ITEM(__pyx_t_8, 1, __pyx_t_1); + __Pyx_INCREF(__pyx_v_self->pydev_call_from_jinja2); + __Pyx_GIVEREF(__pyx_v_self->pydev_call_from_jinja2); + PyTuple_SET_ITEM(__pyx_t_8, 2, __pyx_v_self->pydev_call_from_jinja2); + __Pyx_INCREF(__pyx_v_self->pydev_call_inside_jinja2); + __Pyx_GIVEREF(__pyx_v_self->pydev_call_inside_jinja2); + PyTuple_SET_ITEM(__pyx_t_8, 3, __pyx_v_self->pydev_call_inside_jinja2); + __Pyx_GIVEREF(__pyx_t_2); + PyTuple_SET_ITEM(__pyx_t_8, 4, __pyx_t_2); + __Pyx_INCREF(__pyx_v_self->pydev_func_name); + __Pyx_GIVEREF(__pyx_v_self->pydev_func_name); + PyTuple_SET_ITEM(__pyx_t_8, 5, __pyx_v_self->pydev_func_name); + __Pyx_INCREF(__pyx_v_self->pydev_message); + __Pyx_GIVEREF(__pyx_v_self->pydev_message); + PyTuple_SET_ITEM(__pyx_t_8, 6, __pyx_v_self->pydev_message); + __Pyx_GIVEREF(__pyx_t_3); + PyTuple_SET_ITEM(__pyx_t_8, 7, __pyx_t_3); + __Pyx_GIVEREF(__pyx_t_4); + PyTuple_SET_ITEM(__pyx_t_8, 8, __pyx_t_4); + __Pyx_INCREF(__pyx_v_self->pydev_smart_step_stop); + __Pyx_GIVEREF(__pyx_v_self->pydev_smart_step_stop); + PyTuple_SET_ITEM(__pyx_t_8, 9, __pyx_v_self->pydev_smart_step_stop); + __Pyx_GIVEREF(__pyx_t_5); + PyTuple_SET_ITEM(__pyx_t_8, 10, __pyx_t_5); + __Pyx_GIVEREF(__pyx_t_6); + PyTuple_SET_ITEM(__pyx_t_8, 11, __pyx_t_6); + __Pyx_INCREF(__pyx_v_self->pydev_step_stop); + __Pyx_GIVEREF(__pyx_v_self->pydev_step_stop); + PyTuple_SET_ITEM(__pyx_t_8, 12, __pyx_v_self->pydev_step_stop); + __Pyx_GIVEREF(__pyx_t_7); + PyTuple_SET_ITEM(__pyx_t_8, 13, __pyx_t_7); + __pyx_t_1 = 0; + __pyx_t_2 = 0; + __pyx_t_3 = 0; + __pyx_t_4 = 0; + __pyx_t_5 = 0; + __pyx_t_6 = 0; + __pyx_t_7 = 0; + __pyx_v_state = ((PyObject*)__pyx_t_8); + __pyx_t_8 = 0; + + /* "(tree fragment)":4 + * cdef bint use_setstate + * state = (self.conditional_breakpoint_exception, self.is_tracing, self.pydev_call_from_jinja2, self.pydev_call_inside_jinja2, self.pydev_django_resolve_frame, self.pydev_func_name, self.pydev_message, self.pydev_next_line, self.pydev_notify_kill, self.pydev_smart_step_stop, self.pydev_state, self.pydev_step_cmd, self.pydev_step_stop, self.suspend_type) + * _dict = getattr(self, '__dict__', None) # <<<<<<<<<<<<<< + * if _dict is not None: + * state += _dict, + */ + __pyx_t_8 = __Pyx_GetAttr3(((PyObject *)__pyx_v_self), __pyx_n_s_dict, Py_None); if (unlikely(!__pyx_t_8)) __PYX_ERR(1, 4, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_v__dict = __pyx_t_8; + __pyx_t_8 = 0; + + /* "(tree fragment)":5 + * state = (self.conditional_breakpoint_exception, self.is_tracing, self.pydev_call_from_jinja2, self.pydev_call_inside_jinja2, self.pydev_django_resolve_frame, self.pydev_func_name, self.pydev_message, self.pydev_next_line, self.pydev_notify_kill, self.pydev_smart_step_stop, self.pydev_state, self.pydev_step_cmd, self.pydev_step_stop, self.suspend_type) + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: # <<<<<<<<<<<<<< + * state += _dict, + * use_setstate = True + */ + __pyx_t_9 = (__pyx_v__dict != Py_None); + __pyx_t_10 = (__pyx_t_9 != 0); + if (__pyx_t_10) { + + /* "(tree fragment)":6 + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: + * state += _dict, # <<<<<<<<<<<<<< + * use_setstate = True + * else: + */ + __pyx_t_8 = PyTuple_New(1); if (unlikely(!__pyx_t_8)) __PYX_ERR(1, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_INCREF(__pyx_v__dict); + __Pyx_GIVEREF(__pyx_v__dict); + PyTuple_SET_ITEM(__pyx_t_8, 0, __pyx_v__dict); + __pyx_t_7 = PyNumber_InPlaceAdd(__pyx_v_state, __pyx_t_8); if (unlikely(!__pyx_t_7)) __PYX_ERR(1, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_DECREF_SET(__pyx_v_state, ((PyObject*)__pyx_t_7)); + __pyx_t_7 = 0; + + /* "(tree fragment)":7 + * if _dict is not None: + * state += _dict, + * use_setstate = True # <<<<<<<<<<<<<< + * else: + * use_setstate = self.conditional_breakpoint_exception is not None or self.pydev_call_from_jinja2 is not None or self.pydev_call_inside_jinja2 is not None or self.pydev_func_name is not None or self.pydev_message is not None or self.pydev_smart_step_stop is not None or self.pydev_step_stop is not None + */ + __pyx_v_use_setstate = 1; + + /* "(tree fragment)":5 + * state = (self.conditional_breakpoint_exception, self.is_tracing, self.pydev_call_from_jinja2, self.pydev_call_inside_jinja2, self.pydev_django_resolve_frame, self.pydev_func_name, self.pydev_message, self.pydev_next_line, self.pydev_notify_kill, self.pydev_smart_step_stop, self.pydev_state, self.pydev_step_cmd, self.pydev_step_stop, self.suspend_type) + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: # <<<<<<<<<<<<<< + * state += _dict, + * use_setstate = True + */ + goto __pyx_L3; + } + + /* "(tree fragment)":9 + * use_setstate = True + * else: + * use_setstate = self.conditional_breakpoint_exception is not None or self.pydev_call_from_jinja2 is not None or self.pydev_call_inside_jinja2 is not None or self.pydev_func_name is not None or self.pydev_message is not None or self.pydev_smart_step_stop is not None or self.pydev_step_stop is not None # <<<<<<<<<<<<<< + * if use_setstate: + * return __pyx_unpickle_PyDBAdditionalThreadInfo, (type(self), 0xa9a4341, None), state + */ + /*else*/ { + __pyx_t_9 = (__pyx_v_self->conditional_breakpoint_exception != ((PyObject*)Py_None)); + __pyx_t_11 = (__pyx_t_9 != 0); + if (!__pyx_t_11) { + } else { + __pyx_t_10 = __pyx_t_11; + goto __pyx_L4_bool_binop_done; + } + __pyx_t_11 = (__pyx_v_self->pydev_call_from_jinja2 != Py_None); + __pyx_t_9 = (__pyx_t_11 != 0); + if (!__pyx_t_9) { + } else { + __pyx_t_10 = __pyx_t_9; + goto __pyx_L4_bool_binop_done; + } + __pyx_t_9 = (__pyx_v_self->pydev_call_inside_jinja2 != Py_None); + __pyx_t_11 = (__pyx_t_9 != 0); + if (!__pyx_t_11) { + } else { + __pyx_t_10 = __pyx_t_11; + goto __pyx_L4_bool_binop_done; + } + __pyx_t_11 = (__pyx_v_self->pydev_func_name != ((PyObject*)Py_None)); + __pyx_t_9 = (__pyx_t_11 != 0); + if (!__pyx_t_9) { + } else { + __pyx_t_10 = __pyx_t_9; + goto __pyx_L4_bool_binop_done; + } + __pyx_t_9 = (__pyx_v_self->pydev_message != ((PyObject*)Py_None)); + __pyx_t_11 = (__pyx_t_9 != 0); + if (!__pyx_t_11) { + } else { + __pyx_t_10 = __pyx_t_11; + goto __pyx_L4_bool_binop_done; + } + __pyx_t_11 = (__pyx_v_self->pydev_smart_step_stop != Py_None); + __pyx_t_9 = (__pyx_t_11 != 0); + if (!__pyx_t_9) { + } else { + __pyx_t_10 = __pyx_t_9; + goto __pyx_L4_bool_binop_done; + } + __pyx_t_9 = (__pyx_v_self->pydev_step_stop != Py_None); + __pyx_t_11 = (__pyx_t_9 != 0); + __pyx_t_10 = __pyx_t_11; + __pyx_L4_bool_binop_done:; + __pyx_v_use_setstate = __pyx_t_10; + } + __pyx_L3:; + + /* "(tree fragment)":10 + * else: + * use_setstate = self.conditional_breakpoint_exception is not None or self.pydev_call_from_jinja2 is not None or self.pydev_call_inside_jinja2 is not None or self.pydev_func_name is not None or self.pydev_message is not None or self.pydev_smart_step_stop is not None or self.pydev_step_stop is not None + * if use_setstate: # <<<<<<<<<<<<<< + * return __pyx_unpickle_PyDBAdditionalThreadInfo, (type(self), 0xa9a4341, None), state + * else: + */ + __pyx_t_10 = (__pyx_v_use_setstate != 0); + if (__pyx_t_10) { + + /* "(tree fragment)":11 + * use_setstate = self.conditional_breakpoint_exception is not None or self.pydev_call_from_jinja2 is not None or self.pydev_call_inside_jinja2 is not None or self.pydev_func_name is not None or self.pydev_message is not None or self.pydev_smart_step_stop is not None or self.pydev_step_stop is not None + * if use_setstate: + * return __pyx_unpickle_PyDBAdditionalThreadInfo, (type(self), 0xa9a4341, None), state # <<<<<<<<<<<<<< + * else: + * return __pyx_unpickle_PyDBAdditionalThreadInfo, (type(self), 0xa9a4341, state) + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_7 = __Pyx_GetModuleGlobalName(__pyx_n_s_pyx_unpickle_PyDBAdditionalThr); if (unlikely(!__pyx_t_7)) __PYX_ERR(1, 11, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_8 = PyTuple_New(3); if (unlikely(!__pyx_t_8)) __PYX_ERR(1, 11, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_INCREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_GIVEREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + PyTuple_SET_ITEM(__pyx_t_8, 0, ((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_INCREF(__pyx_int_177881921); + __Pyx_GIVEREF(__pyx_int_177881921); + PyTuple_SET_ITEM(__pyx_t_8, 1, __pyx_int_177881921); + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + PyTuple_SET_ITEM(__pyx_t_8, 2, Py_None); + __pyx_t_6 = PyTuple_New(3); if (unlikely(!__pyx_t_6)) __PYX_ERR(1, 11, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_GIVEREF(__pyx_t_7); + PyTuple_SET_ITEM(__pyx_t_6, 0, __pyx_t_7); + __Pyx_GIVEREF(__pyx_t_8); + PyTuple_SET_ITEM(__pyx_t_6, 1, __pyx_t_8); + __Pyx_INCREF(__pyx_v_state); + __Pyx_GIVEREF(__pyx_v_state); + PyTuple_SET_ITEM(__pyx_t_6, 2, __pyx_v_state); + __pyx_t_7 = 0; + __pyx_t_8 = 0; + __pyx_r = __pyx_t_6; + __pyx_t_6 = 0; + goto __pyx_L0; + + /* "(tree fragment)":10 + * else: + * use_setstate = self.conditional_breakpoint_exception is not None or self.pydev_call_from_jinja2 is not None or self.pydev_call_inside_jinja2 is not None or self.pydev_func_name is not None or self.pydev_message is not None or self.pydev_smart_step_stop is not None or self.pydev_step_stop is not None + * if use_setstate: # <<<<<<<<<<<<<< + * return __pyx_unpickle_PyDBAdditionalThreadInfo, (type(self), 0xa9a4341, None), state + * else: + */ + } + + /* "(tree fragment)":13 + * return __pyx_unpickle_PyDBAdditionalThreadInfo, (type(self), 0xa9a4341, None), state + * else: + * return __pyx_unpickle_PyDBAdditionalThreadInfo, (type(self), 0xa9a4341, state) # <<<<<<<<<<<<<< + * def __setstate_cython__(self, __pyx_state): + * __pyx_unpickle_PyDBAdditionalThreadInfo__set_state(self, __pyx_state) + */ + /*else*/ { + __Pyx_XDECREF(__pyx_r); + __pyx_t_6 = __Pyx_GetModuleGlobalName(__pyx_n_s_pyx_unpickle_PyDBAdditionalThr); if (unlikely(!__pyx_t_6)) __PYX_ERR(1, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_8 = PyTuple_New(3); if (unlikely(!__pyx_t_8)) __PYX_ERR(1, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_INCREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_GIVEREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + PyTuple_SET_ITEM(__pyx_t_8, 0, ((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_INCREF(__pyx_int_177881921); + __Pyx_GIVEREF(__pyx_int_177881921); + PyTuple_SET_ITEM(__pyx_t_8, 1, __pyx_int_177881921); + __Pyx_INCREF(__pyx_v_state); + __Pyx_GIVEREF(__pyx_v_state); + PyTuple_SET_ITEM(__pyx_t_8, 2, __pyx_v_state); + __pyx_t_7 = PyTuple_New(2); if (unlikely(!__pyx_t_7)) __PYX_ERR(1, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_GIVEREF(__pyx_t_6); + PyTuple_SET_ITEM(__pyx_t_7, 0, __pyx_t_6); + __Pyx_GIVEREF(__pyx_t_8); + PyTuple_SET_ITEM(__pyx_t_7, 1, __pyx_t_8); + __pyx_t_6 = 0; + __pyx_t_8 = 0; + __pyx_r = __pyx_t_7; + __pyx_t_7 = 0; + goto __pyx_L0; + } + + /* "(tree fragment)":1 + * def __reduce_cython__(self): # <<<<<<<<<<<<<< + * cdef bint use_setstate + * state = (self.conditional_breakpoint_exception, self.is_tracing, self.pydev_call_from_jinja2, self.pydev_call_inside_jinja2, self.pydev_django_resolve_frame, self.pydev_func_name, self.pydev_message, self.pydev_next_line, self.pydev_notify_kill, self.pydev_smart_step_stop, self.pydev_state, self.pydev_step_cmd, self.pydev_step_stop, self.suspend_type) + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_XDECREF(__pyx_t_8); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBAdditionalThreadInfo.__reduce_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_state); + __Pyx_XDECREF(__pyx_v__dict); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":14 + * else: + * return __pyx_unpickle_PyDBAdditionalThreadInfo, (type(self), 0xa9a4341, state) + * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< + * __pyx_unpickle_PyDBAdditionalThreadInfo__set_state(self, __pyx_state) + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_9__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_9__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__setstate_cython__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_8__setstate_cython__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_self), ((PyObject *)__pyx_v___pyx_state)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_8__setstate_cython__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_self, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("__setstate_cython__", 0); + + /* "(tree fragment)":15 + * return __pyx_unpickle_PyDBAdditionalThreadInfo, (type(self), 0xa9a4341, state) + * def __setstate_cython__(self, __pyx_state): + * __pyx_unpickle_PyDBAdditionalThreadInfo__set_state(self, __pyx_state) # <<<<<<<<<<<<<< + */ + if (!(likely(PyTuple_CheckExact(__pyx_v___pyx_state))||((__pyx_v___pyx_state) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_v___pyx_state)->tp_name), 0))) __PYX_ERR(1, 15, __pyx_L1_error) + __pyx_t_1 = __pyx_f_14_pydevd_bundle_13pydevd_cython___pyx_unpickle_PyDBAdditionalThreadInfo__set_state(__pyx_v_self, ((PyObject*)__pyx_v___pyx_state)); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "(tree fragment)":14 + * else: + * return __pyx_unpickle_PyDBAdditionalThreadInfo, (type(self), 0xa9a4341, state) + * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< + * __pyx_unpickle_PyDBAdditionalThreadInfo__set_state(self, __pyx_state) + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBAdditionalThreadInfo.__setstate_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pyx":155 + * from _pydevd_bundle.pydevd_signature import send_signature_call_trace, send_signature_return_trace + * except ImportError: + * def send_signature_call_trace(*args, **kwargs): # <<<<<<<<<<<<<< + * pass + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_5send_signature_call_trace(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static PyMethodDef __pyx_mdef_14_pydevd_bundle_13pydevd_cython_5send_signature_call_trace = {"send_signature_call_trace", (PyCFunction)__pyx_pw_14_pydevd_bundle_13pydevd_cython_5send_signature_call_trace, METH_VARARGS|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_5send_signature_call_trace(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + CYTHON_UNUSED PyObject *__pyx_v_args = 0; + CYTHON_UNUSED PyObject *__pyx_v_kwargs = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("send_signature_call_trace (wrapper)", 0); + if (unlikely(__pyx_kwds) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "send_signature_call_trace", 1))) return NULL; + __Pyx_INCREF(__pyx_args); + __pyx_v_args = __pyx_args; + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_4send_signature_call_trace(__pyx_self, __pyx_v_args, __pyx_v_kwargs); + + /* function exit code */ + __Pyx_XDECREF(__pyx_v_args); + __Pyx_XDECREF(__pyx_v_kwargs); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_4send_signature_call_trace(CYTHON_UNUSED PyObject *__pyx_self, CYTHON_UNUSED PyObject *__pyx_v_args, CYTHON_UNUSED PyObject *__pyx_v_kwargs) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("send_signature_call_trace", 0); + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pyx":167 + * + * + * def handle_breakpoint_condition(py_db, info, breakpoint, new_frame, default_return_value): # <<<<<<<<<<<<<< + * condition = breakpoint.condition + * try: + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_7handle_breakpoint_condition(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static PyMethodDef __pyx_mdef_14_pydevd_bundle_13pydevd_cython_7handle_breakpoint_condition = {"handle_breakpoint_condition", (PyCFunction)__pyx_pw_14_pydevd_bundle_13pydevd_cython_7handle_breakpoint_condition, METH_VARARGS|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_7handle_breakpoint_condition(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_py_db = 0; + PyObject *__pyx_v_info = 0; + PyObject *__pyx_v_breakpoint = 0; + PyObject *__pyx_v_new_frame = 0; + PyObject *__pyx_v_default_return_value = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("handle_breakpoint_condition (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_py_db,&__pyx_n_s_info,&__pyx_n_s_breakpoint,&__pyx_n_s_new_frame,&__pyx_n_s_default_return_value,0}; + PyObject* values[5] = {0,0,0,0,0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 5: values[4] = PyTuple_GET_ITEM(__pyx_args, 4); + CYTHON_FALLTHROUGH; + case 4: values[3] = PyTuple_GET_ITEM(__pyx_args, 3); + CYTHON_FALLTHROUGH; + case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + CYTHON_FALLTHROUGH; + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_py_db)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (likely((values[1] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_info)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("handle_breakpoint_condition", 1, 5, 5, 1); __PYX_ERR(0, 167, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 2: + if (likely((values[2] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_breakpoint)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("handle_breakpoint_condition", 1, 5, 5, 2); __PYX_ERR(0, 167, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 3: + if (likely((values[3] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_new_frame)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("handle_breakpoint_condition", 1, 5, 5, 3); __PYX_ERR(0, 167, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 4: + if (likely((values[4] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_default_return_value)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("handle_breakpoint_condition", 1, 5, 5, 4); __PYX_ERR(0, 167, __pyx_L3_error) + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "handle_breakpoint_condition") < 0)) __PYX_ERR(0, 167, __pyx_L3_error) + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 5) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + values[3] = PyTuple_GET_ITEM(__pyx_args, 3); + values[4] = PyTuple_GET_ITEM(__pyx_args, 4); + } + __pyx_v_py_db = values[0]; + __pyx_v_info = values[1]; + __pyx_v_breakpoint = values[2]; + __pyx_v_new_frame = values[3]; + __pyx_v_default_return_value = values[4]; + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("handle_breakpoint_condition", 1, 5, 5, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 167, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.handle_breakpoint_condition", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_6handle_breakpoint_condition(__pyx_self, __pyx_v_py_db, __pyx_v_info, __pyx_v_breakpoint, __pyx_v_new_frame, __pyx_v_default_return_value); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_6handle_breakpoint_condition(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_py_db, PyObject *__pyx_v_info, PyObject *__pyx_v_breakpoint, PyObject *__pyx_v_new_frame, PyObject *__pyx_v_default_return_value) { + PyObject *__pyx_v_condition = NULL; + PyObject *__pyx_v_val = NULL; + PyObject *__pyx_v_msg = NULL; + CYTHON_UNUSED int __pyx_v_stop; + PyObject *__pyx_v_etype = NULL; + PyObject *__pyx_v_value = NULL; + PyObject *__pyx_v_tb = NULL; + PyObject *__pyx_v_error = NULL; + PyObject *__pyx_v_stack = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + int __pyx_t_7; + int __pyx_t_8; + PyObject *__pyx_t_9 = NULL; + PyObject *__pyx_t_10 = NULL; + PyObject *__pyx_t_11 = NULL; + PyObject *__pyx_t_12 = NULL; + PyObject *__pyx_t_13 = NULL; + PyObject *__pyx_t_14 = NULL; + PyObject *__pyx_t_15 = NULL; + PyObject *__pyx_t_16 = NULL; + PyObject *(*__pyx_t_17)(PyObject *); + int __pyx_t_18; + int __pyx_t_19; + char const *__pyx_t_20; + PyObject *__pyx_t_21 = NULL; + PyObject *__pyx_t_22 = NULL; + PyObject *__pyx_t_23 = NULL; + PyObject *__pyx_t_24 = NULL; + PyObject *__pyx_t_25 = NULL; + PyObject *__pyx_t_26 = NULL; + PyObject *__pyx_t_27 = NULL; + __Pyx_RefNannySetupContext("handle_breakpoint_condition", 0); + + /* "_pydevd_bundle/pydevd_cython.pyx":168 + * + * def handle_breakpoint_condition(py_db, info, breakpoint, new_frame, default_return_value): + * condition = breakpoint.condition # <<<<<<<<<<<<<< + * try: + * val = eval(condition, new_frame.f_globals, new_frame.f_locals) + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_breakpoint, __pyx_n_s_condition); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 168, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v_condition = __pyx_t_1; + __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":169 + * def handle_breakpoint_condition(py_db, info, breakpoint, new_frame, default_return_value): + * condition = breakpoint.condition + * try: # <<<<<<<<<<<<<< + * val = eval(condition, new_frame.f_globals, new_frame.f_locals) + * if not val: + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_2, &__pyx_t_3, &__pyx_t_4); + __Pyx_XGOTREF(__pyx_t_2); + __Pyx_XGOTREF(__pyx_t_3); + __Pyx_XGOTREF(__pyx_t_4); + /*try:*/ { + + /* "_pydevd_bundle/pydevd_cython.pyx":170 + * condition = breakpoint.condition + * try: + * val = eval(condition, new_frame.f_globals, new_frame.f_locals) # <<<<<<<<<<<<<< + * if not val: + * return default_return_value + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_new_frame, __pyx_n_s_f_globals); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 170, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_v_new_frame, __pyx_n_s_f_locals); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 170, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_6 = PyTuple_New(3); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 170, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_INCREF(__pyx_v_condition); + __Pyx_GIVEREF(__pyx_v_condition); + PyTuple_SET_ITEM(__pyx_t_6, 0, __pyx_v_condition); + __Pyx_GIVEREF(__pyx_t_1); + PyTuple_SET_ITEM(__pyx_t_6, 1, __pyx_t_1); + __Pyx_GIVEREF(__pyx_t_5); + PyTuple_SET_ITEM(__pyx_t_6, 2, __pyx_t_5); + __pyx_t_1 = 0; + __pyx_t_5 = 0; + __pyx_t_5 = __Pyx_PyObject_Call(__pyx_builtin_eval, __pyx_t_6, NULL); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 170, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __pyx_v_val = __pyx_t_5; + __pyx_t_5 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":171 + * try: + * val = eval(condition, new_frame.f_globals, new_frame.f_locals) + * if not val: # <<<<<<<<<<<<<< + * return default_return_value + * + */ + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_v_val); if (unlikely(__pyx_t_7 < 0)) __PYX_ERR(0, 171, __pyx_L3_error) + __pyx_t_8 = ((!__pyx_t_7) != 0); + if (__pyx_t_8) { + + /* "_pydevd_bundle/pydevd_cython.pyx":172 + * val = eval(condition, new_frame.f_globals, new_frame.f_locals) + * if not val: + * return default_return_value # <<<<<<<<<<<<<< + * + * except: + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_default_return_value); + __pyx_r = __pyx_v_default_return_value; + goto __pyx_L7_try_return; + + /* "_pydevd_bundle/pydevd_cython.pyx":171 + * try: + * val = eval(condition, new_frame.f_globals, new_frame.f_locals) + * if not val: # <<<<<<<<<<<<<< + * return default_return_value + * + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":169 + * def handle_breakpoint_condition(py_db, info, breakpoint, new_frame, default_return_value): + * condition = breakpoint.condition + * try: # <<<<<<<<<<<<<< + * val = eval(condition, new_frame.f_globals, new_frame.f_locals) + * if not val: + */ + } + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + goto __pyx_L8_try_end; + __pyx_L3_error:; + __Pyx_PyThreadState_assign + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":174 + * return default_return_value + * + * except: # <<<<<<<<<<<<<< + * if type(condition) != type(''): + * if hasattr(condition, 'encode'): + */ + /*except:*/ { + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.handle_breakpoint_condition", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_5, &__pyx_t_6, &__pyx_t_1) < 0) __PYX_ERR(0, 174, __pyx_L5_except_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_GOTREF(__pyx_t_6); + __Pyx_GOTREF(__pyx_t_1); + + /* "_pydevd_bundle/pydevd_cython.pyx":175 + * + * except: + * if type(condition) != type(''): # <<<<<<<<<<<<<< + * if hasattr(condition, 'encode'): + * condition = condition.encode('utf-8') + */ + __pyx_t_9 = PyObject_RichCompare(((PyObject *)Py_TYPE(__pyx_v_condition)), ((PyObject *)Py_TYPE(__pyx_kp_s_)), Py_NE); __Pyx_XGOTREF(__pyx_t_9); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 175, __pyx_L5_except_error) + __pyx_t_8 = __Pyx_PyObject_IsTrue(__pyx_t_9); if (unlikely(__pyx_t_8 < 0)) __PYX_ERR(0, 175, __pyx_L5_except_error) + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + if (__pyx_t_8) { + + /* "_pydevd_bundle/pydevd_cython.pyx":176 + * except: + * if type(condition) != type(''): + * if hasattr(condition, 'encode'): # <<<<<<<<<<<<<< + * condition = condition.encode('utf-8') + * + */ + __pyx_t_8 = __Pyx_HasAttr(__pyx_v_condition, __pyx_n_s_encode); if (unlikely(__pyx_t_8 == -1)) __PYX_ERR(0, 176, __pyx_L5_except_error) + __pyx_t_7 = (__pyx_t_8 != 0); + if (__pyx_t_7) { + + /* "_pydevd_bundle/pydevd_cython.pyx":177 + * if type(condition) != type(''): + * if hasattr(condition, 'encode'): + * condition = condition.encode('utf-8') # <<<<<<<<<<<<<< + * + * msg = 'Error while evaluating expression: %s\n' % (condition,) + */ + __pyx_t_9 = __Pyx_PyObject_GetAttrStr(__pyx_v_condition, __pyx_n_s_encode); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 177, __pyx_L5_except_error) + __Pyx_GOTREF(__pyx_t_9); + __pyx_t_10 = __Pyx_PyObject_Call(__pyx_t_9, __pyx_tuple__2, NULL); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 177, __pyx_L5_except_error) + __Pyx_GOTREF(__pyx_t_10); + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_DECREF_SET(__pyx_v_condition, __pyx_t_10); + __pyx_t_10 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":176 + * except: + * if type(condition) != type(''): + * if hasattr(condition, 'encode'): # <<<<<<<<<<<<<< + * condition = condition.encode('utf-8') + * + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":175 + * + * except: + * if type(condition) != type(''): # <<<<<<<<<<<<<< + * if hasattr(condition, 'encode'): + * condition = condition.encode('utf-8') + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":179 + * condition = condition.encode('utf-8') + * + * msg = 'Error while evaluating expression: %s\n' % (condition,) # <<<<<<<<<<<<<< + * sys.stderr.write(msg) + * traceback.print_exc() + */ + __pyx_t_10 = PyTuple_New(1); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 179, __pyx_L5_except_error) + __Pyx_GOTREF(__pyx_t_10); + __Pyx_INCREF(__pyx_v_condition); + __Pyx_GIVEREF(__pyx_v_condition); + PyTuple_SET_ITEM(__pyx_t_10, 0, __pyx_v_condition); + __pyx_t_9 = __Pyx_PyString_Format(__pyx_kp_s_Error_while_evaluating_expressio, __pyx_t_10); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 179, __pyx_L5_except_error) + __Pyx_GOTREF(__pyx_t_9); + __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; + __pyx_v_msg = ((PyObject*)__pyx_t_9); + __pyx_t_9 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":180 + * + * msg = 'Error while evaluating expression: %s\n' % (condition,) + * sys.stderr.write(msg) # <<<<<<<<<<<<<< + * traceback.print_exc() + * if not py_db.suspend_on_breakpoint_exception: + */ + __pyx_t_10 = __Pyx_GetModuleGlobalName(__pyx_n_s_sys); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 180, __pyx_L5_except_error) + __Pyx_GOTREF(__pyx_t_10); + __pyx_t_11 = __Pyx_PyObject_GetAttrStr(__pyx_t_10, __pyx_n_s_stderr); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 180, __pyx_L5_except_error) + __Pyx_GOTREF(__pyx_t_11); + __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; + __pyx_t_10 = __Pyx_PyObject_GetAttrStr(__pyx_t_11, __pyx_n_s_write); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 180, __pyx_L5_except_error) + __Pyx_GOTREF(__pyx_t_10); + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; + __pyx_t_11 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_10))) { + __pyx_t_11 = PyMethod_GET_SELF(__pyx_t_10); + if (likely(__pyx_t_11)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_10); + __Pyx_INCREF(__pyx_t_11); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_10, function); + } + } + if (!__pyx_t_11) { + __pyx_t_9 = __Pyx_PyObject_CallOneArg(__pyx_t_10, __pyx_v_msg); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 180, __pyx_L5_except_error) + __Pyx_GOTREF(__pyx_t_9); + } else { + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_10)) { + PyObject *__pyx_temp[2] = {__pyx_t_11, __pyx_v_msg}; + __pyx_t_9 = __Pyx_PyFunction_FastCall(__pyx_t_10, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 180, __pyx_L5_except_error) + __Pyx_XDECREF(__pyx_t_11); __pyx_t_11 = 0; + __Pyx_GOTREF(__pyx_t_9); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_10)) { + PyObject *__pyx_temp[2] = {__pyx_t_11, __pyx_v_msg}; + __pyx_t_9 = __Pyx_PyCFunction_FastCall(__pyx_t_10, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 180, __pyx_L5_except_error) + __Pyx_XDECREF(__pyx_t_11); __pyx_t_11 = 0; + __Pyx_GOTREF(__pyx_t_9); + } else + #endif + { + __pyx_t_12 = PyTuple_New(1+1); if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 180, __pyx_L5_except_error) + __Pyx_GOTREF(__pyx_t_12); + __Pyx_GIVEREF(__pyx_t_11); PyTuple_SET_ITEM(__pyx_t_12, 0, __pyx_t_11); __pyx_t_11 = NULL; + __Pyx_INCREF(__pyx_v_msg); + __Pyx_GIVEREF(__pyx_v_msg); + PyTuple_SET_ITEM(__pyx_t_12, 0+1, __pyx_v_msg); + __pyx_t_9 = __Pyx_PyObject_Call(__pyx_t_10, __pyx_t_12, NULL); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 180, __pyx_L5_except_error) + __Pyx_GOTREF(__pyx_t_9); + __Pyx_DECREF(__pyx_t_12); __pyx_t_12 = 0; + } + } + __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":181 + * msg = 'Error while evaluating expression: %s\n' % (condition,) + * sys.stderr.write(msg) + * traceback.print_exc() # <<<<<<<<<<<<<< + * if not py_db.suspend_on_breakpoint_exception: + * return default_return_value + */ + __pyx_t_10 = __Pyx_GetModuleGlobalName(__pyx_n_s_traceback); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 181, __pyx_L5_except_error) + __Pyx_GOTREF(__pyx_t_10); + __pyx_t_12 = __Pyx_PyObject_GetAttrStr(__pyx_t_10, __pyx_n_s_print_exc); if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 181, __pyx_L5_except_error) + __Pyx_GOTREF(__pyx_t_12); + __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; + __pyx_t_10 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_12))) { + __pyx_t_10 = PyMethod_GET_SELF(__pyx_t_12); + if (likely(__pyx_t_10)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_12); + __Pyx_INCREF(__pyx_t_10); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_12, function); + } + } + if (__pyx_t_10) { + __pyx_t_9 = __Pyx_PyObject_CallOneArg(__pyx_t_12, __pyx_t_10); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 181, __pyx_L5_except_error) + __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; + } else { + __pyx_t_9 = __Pyx_PyObject_CallNoArg(__pyx_t_12); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 181, __pyx_L5_except_error) + } + __Pyx_GOTREF(__pyx_t_9); + __Pyx_DECREF(__pyx_t_12); __pyx_t_12 = 0; + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":182 + * sys.stderr.write(msg) + * traceback.print_exc() + * if not py_db.suspend_on_breakpoint_exception: # <<<<<<<<<<<<<< + * return default_return_value + * else: + */ + __pyx_t_9 = __Pyx_PyObject_GetAttrStr(__pyx_v_py_db, __pyx_n_s_suspend_on_breakpoint_exception); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 182, __pyx_L5_except_error) + __Pyx_GOTREF(__pyx_t_9); + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_9); if (unlikely(__pyx_t_7 < 0)) __PYX_ERR(0, 182, __pyx_L5_except_error) + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + __pyx_t_8 = ((!__pyx_t_7) != 0); + if (__pyx_t_8) { + + /* "_pydevd_bundle/pydevd_cython.pyx":183 + * traceback.print_exc() + * if not py_db.suspend_on_breakpoint_exception: + * return default_return_value # <<<<<<<<<<<<<< + * else: + * stop = True + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_default_return_value); + __pyx_r = __pyx_v_default_return_value; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + goto __pyx_L6_except_return; + + /* "_pydevd_bundle/pydevd_cython.pyx":182 + * sys.stderr.write(msg) + * traceback.print_exc() + * if not py_db.suspend_on_breakpoint_exception: # <<<<<<<<<<<<<< + * return default_return_value + * else: + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":185 + * return default_return_value + * else: + * stop = True # <<<<<<<<<<<<<< + * try: + * # add exception_type and stacktrace into thread additional info + */ + /*else*/ { + __pyx_v_stop = 1; + + /* "_pydevd_bundle/pydevd_cython.pyx":186 + * else: + * stop = True + * try: # <<<<<<<<<<<<<< + * # add exception_type and stacktrace into thread additional info + * etype, value, tb = sys.exc_info() + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_13, &__pyx_t_14, &__pyx_t_15); + __Pyx_XGOTREF(__pyx_t_13); + __Pyx_XGOTREF(__pyx_t_14); + __Pyx_XGOTREF(__pyx_t_15); + /*try:*/ { + + /* "_pydevd_bundle/pydevd_cython.pyx":188 + * try: + * # add exception_type and stacktrace into thread additional info + * etype, value, tb = sys.exc_info() # <<<<<<<<<<<<<< + * try: + * error = ''.join(traceback.format_exception_only(etype, value)) + */ + __pyx_t_12 = __Pyx_GetModuleGlobalName(__pyx_n_s_sys); if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 188, __pyx_L15_error) + __Pyx_GOTREF(__pyx_t_12); + __pyx_t_10 = __Pyx_PyObject_GetAttrStr(__pyx_t_12, __pyx_n_s_exc_info); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 188, __pyx_L15_error) + __Pyx_GOTREF(__pyx_t_10); + __Pyx_DECREF(__pyx_t_12); __pyx_t_12 = 0; + __pyx_t_12 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_10))) { + __pyx_t_12 = PyMethod_GET_SELF(__pyx_t_10); + if (likely(__pyx_t_12)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_10); + __Pyx_INCREF(__pyx_t_12); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_10, function); + } + } + if (__pyx_t_12) { + __pyx_t_9 = __Pyx_PyObject_CallOneArg(__pyx_t_10, __pyx_t_12); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 188, __pyx_L15_error) + __Pyx_DECREF(__pyx_t_12); __pyx_t_12 = 0; + } else { + __pyx_t_9 = __Pyx_PyObject_CallNoArg(__pyx_t_10); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 188, __pyx_L15_error) + } + __Pyx_GOTREF(__pyx_t_9); + __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; + if ((likely(PyTuple_CheckExact(__pyx_t_9))) || (PyList_CheckExact(__pyx_t_9))) { + PyObject* sequence = __pyx_t_9; + #if !CYTHON_COMPILING_IN_PYPY + Py_ssize_t size = Py_SIZE(sequence); + #else + Py_ssize_t size = PySequence_Size(sequence); + #endif + if (unlikely(size != 3)) { + if (size > 3) __Pyx_RaiseTooManyValuesError(3); + else if (size >= 0) __Pyx_RaiseNeedMoreValuesError(size); + __PYX_ERR(0, 188, __pyx_L15_error) + } + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + if (likely(PyTuple_CheckExact(sequence))) { + __pyx_t_10 = PyTuple_GET_ITEM(sequence, 0); + __pyx_t_12 = PyTuple_GET_ITEM(sequence, 1); + __pyx_t_11 = PyTuple_GET_ITEM(sequence, 2); + } else { + __pyx_t_10 = PyList_GET_ITEM(sequence, 0); + __pyx_t_12 = PyList_GET_ITEM(sequence, 1); + __pyx_t_11 = PyList_GET_ITEM(sequence, 2); + } + __Pyx_INCREF(__pyx_t_10); + __Pyx_INCREF(__pyx_t_12); + __Pyx_INCREF(__pyx_t_11); + #else + __pyx_t_10 = PySequence_ITEM(sequence, 0); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 188, __pyx_L15_error) + __Pyx_GOTREF(__pyx_t_10); + __pyx_t_12 = PySequence_ITEM(sequence, 1); if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 188, __pyx_L15_error) + __Pyx_GOTREF(__pyx_t_12); + __pyx_t_11 = PySequence_ITEM(sequence, 2); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 188, __pyx_L15_error) + __Pyx_GOTREF(__pyx_t_11); + #endif + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + } else { + Py_ssize_t index = -1; + __pyx_t_16 = PyObject_GetIter(__pyx_t_9); if (unlikely(!__pyx_t_16)) __PYX_ERR(0, 188, __pyx_L15_error) + __Pyx_GOTREF(__pyx_t_16); + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + __pyx_t_17 = Py_TYPE(__pyx_t_16)->tp_iternext; + index = 0; __pyx_t_10 = __pyx_t_17(__pyx_t_16); if (unlikely(!__pyx_t_10)) goto __pyx_L23_unpacking_failed; + __Pyx_GOTREF(__pyx_t_10); + index = 1; __pyx_t_12 = __pyx_t_17(__pyx_t_16); if (unlikely(!__pyx_t_12)) goto __pyx_L23_unpacking_failed; + __Pyx_GOTREF(__pyx_t_12); + index = 2; __pyx_t_11 = __pyx_t_17(__pyx_t_16); if (unlikely(!__pyx_t_11)) goto __pyx_L23_unpacking_failed; + __Pyx_GOTREF(__pyx_t_11); + if (__Pyx_IternextUnpackEndCheck(__pyx_t_17(__pyx_t_16), 3) < 0) __PYX_ERR(0, 188, __pyx_L15_error) + __pyx_t_17 = NULL; + __Pyx_DECREF(__pyx_t_16); __pyx_t_16 = 0; + goto __pyx_L24_unpacking_done; + __pyx_L23_unpacking_failed:; + __Pyx_DECREF(__pyx_t_16); __pyx_t_16 = 0; + __pyx_t_17 = NULL; + if (__Pyx_IterFinish() == 0) __Pyx_RaiseNeedMoreValuesError(index); + __PYX_ERR(0, 188, __pyx_L15_error) + __pyx_L24_unpacking_done:; + } + __pyx_v_etype = __pyx_t_10; + __pyx_t_10 = 0; + __pyx_v_value = __pyx_t_12; + __pyx_t_12 = 0; + __pyx_v_tb = __pyx_t_11; + __pyx_t_11 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":189 + * # add exception_type and stacktrace into thread additional info + * etype, value, tb = sys.exc_info() + * try: # <<<<<<<<<<<<<< + * error = ''.join(traceback.format_exception_only(etype, value)) + * stack = traceback.extract_stack(f=tb.tb_frame.f_back) + */ + /*try:*/ { + + /* "_pydevd_bundle/pydevd_cython.pyx":190 + * etype, value, tb = sys.exc_info() + * try: + * error = ''.join(traceback.format_exception_only(etype, value)) # <<<<<<<<<<<<<< + * stack = traceback.extract_stack(f=tb.tb_frame.f_back) + * + */ + __pyx_t_11 = __Pyx_GetModuleGlobalName(__pyx_n_s_traceback); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 190, __pyx_L28_error) + __Pyx_GOTREF(__pyx_t_11); + __pyx_t_12 = __Pyx_PyObject_GetAttrStr(__pyx_t_11, __pyx_n_s_format_exception_only); if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 190, __pyx_L28_error) + __Pyx_GOTREF(__pyx_t_12); + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; + __pyx_t_11 = NULL; + __pyx_t_18 = 0; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_12))) { + __pyx_t_11 = PyMethod_GET_SELF(__pyx_t_12); + if (likely(__pyx_t_11)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_12); + __Pyx_INCREF(__pyx_t_11); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_12, function); + __pyx_t_18 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_12)) { + PyObject *__pyx_temp[3] = {__pyx_t_11, __pyx_v_etype, __pyx_v_value}; + __pyx_t_9 = __Pyx_PyFunction_FastCall(__pyx_t_12, __pyx_temp+1-__pyx_t_18, 2+__pyx_t_18); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 190, __pyx_L28_error) + __Pyx_XDECREF(__pyx_t_11); __pyx_t_11 = 0; + __Pyx_GOTREF(__pyx_t_9); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_12)) { + PyObject *__pyx_temp[3] = {__pyx_t_11, __pyx_v_etype, __pyx_v_value}; + __pyx_t_9 = __Pyx_PyCFunction_FastCall(__pyx_t_12, __pyx_temp+1-__pyx_t_18, 2+__pyx_t_18); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 190, __pyx_L28_error) + __Pyx_XDECREF(__pyx_t_11); __pyx_t_11 = 0; + __Pyx_GOTREF(__pyx_t_9); + } else + #endif + { + __pyx_t_10 = PyTuple_New(2+__pyx_t_18); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 190, __pyx_L28_error) + __Pyx_GOTREF(__pyx_t_10); + if (__pyx_t_11) { + __Pyx_GIVEREF(__pyx_t_11); PyTuple_SET_ITEM(__pyx_t_10, 0, __pyx_t_11); __pyx_t_11 = NULL; + } + __Pyx_INCREF(__pyx_v_etype); + __Pyx_GIVEREF(__pyx_v_etype); + PyTuple_SET_ITEM(__pyx_t_10, 0+__pyx_t_18, __pyx_v_etype); + __Pyx_INCREF(__pyx_v_value); + __Pyx_GIVEREF(__pyx_v_value); + PyTuple_SET_ITEM(__pyx_t_10, 1+__pyx_t_18, __pyx_v_value); + __pyx_t_9 = __Pyx_PyObject_Call(__pyx_t_12, __pyx_t_10, NULL); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 190, __pyx_L28_error) + __Pyx_GOTREF(__pyx_t_9); + __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; + } + __Pyx_DECREF(__pyx_t_12); __pyx_t_12 = 0; + __pyx_t_12 = __Pyx_PyString_Join(__pyx_kp_s_, __pyx_t_9); if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 190, __pyx_L28_error) + __Pyx_GOTREF(__pyx_t_12); + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + __pyx_v_error = ((PyObject*)__pyx_t_12); + __pyx_t_12 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":191 + * try: + * error = ''.join(traceback.format_exception_only(etype, value)) + * stack = traceback.extract_stack(f=tb.tb_frame.f_back) # <<<<<<<<<<<<<< + * + * # On self.set_suspend(thread, CMD_SET_BREAK) this info will be + */ + __pyx_t_12 = __Pyx_GetModuleGlobalName(__pyx_n_s_traceback); if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 191, __pyx_L28_error) + __Pyx_GOTREF(__pyx_t_12); + __pyx_t_9 = __Pyx_PyObject_GetAttrStr(__pyx_t_12, __pyx_n_s_extract_stack); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 191, __pyx_L28_error) + __Pyx_GOTREF(__pyx_t_9); + __Pyx_DECREF(__pyx_t_12); __pyx_t_12 = 0; + __pyx_t_12 = PyDict_New(); if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 191, __pyx_L28_error) + __Pyx_GOTREF(__pyx_t_12); + __pyx_t_10 = __Pyx_PyObject_GetAttrStr(__pyx_v_tb, __pyx_n_s_tb_frame); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 191, __pyx_L28_error) + __Pyx_GOTREF(__pyx_t_10); + __pyx_t_11 = __Pyx_PyObject_GetAttrStr(__pyx_t_10, __pyx_n_s_f_back); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 191, __pyx_L28_error) + __Pyx_GOTREF(__pyx_t_11); + __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; + if (PyDict_SetItem(__pyx_t_12, __pyx_n_s_f, __pyx_t_11) < 0) __PYX_ERR(0, 191, __pyx_L28_error) + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; + __pyx_t_11 = __Pyx_PyObject_Call(__pyx_t_9, __pyx_empty_tuple, __pyx_t_12); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 191, __pyx_L28_error) + __Pyx_GOTREF(__pyx_t_11); + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_DECREF(__pyx_t_12); __pyx_t_12 = 0; + __pyx_v_stack = __pyx_t_11; + __pyx_t_11 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":196 + * # sent to the client. + * info.conditional_breakpoint_exception = \ + * ('Condition:\n' + condition + '\n\nError:\n' + error, stack) # <<<<<<<<<<<<<< + * finally: + * etype, value, tb = None, None, None + */ + __pyx_t_11 = PyNumber_Add(__pyx_kp_s_Condition, __pyx_v_condition); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 196, __pyx_L28_error) + __Pyx_GOTREF(__pyx_t_11); + __pyx_t_12 = PyNumber_Add(__pyx_t_11, __pyx_kp_s_Error); if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 196, __pyx_L28_error) + __Pyx_GOTREF(__pyx_t_12); + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; + __pyx_t_11 = PyNumber_Add(__pyx_t_12, __pyx_v_error); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 196, __pyx_L28_error) + __Pyx_GOTREF(__pyx_t_11); + __Pyx_DECREF(__pyx_t_12); __pyx_t_12 = 0; + __pyx_t_12 = PyTuple_New(2); if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 196, __pyx_L28_error) + __Pyx_GOTREF(__pyx_t_12); + __Pyx_GIVEREF(__pyx_t_11); + PyTuple_SET_ITEM(__pyx_t_12, 0, __pyx_t_11); + __Pyx_INCREF(__pyx_v_stack); + __Pyx_GIVEREF(__pyx_v_stack); + PyTuple_SET_ITEM(__pyx_t_12, 1, __pyx_v_stack); + __pyx_t_11 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":195 + * # On self.set_suspend(thread, CMD_SET_BREAK) this info will be + * # sent to the client. + * info.conditional_breakpoint_exception = \ # <<<<<<<<<<<<<< + * ('Condition:\n' + condition + '\n\nError:\n' + error, stack) + * finally: + */ + if (__Pyx_PyObject_SetAttrStr(__pyx_v_info, __pyx_n_s_conditional_breakpoint_exception, __pyx_t_12) < 0) __PYX_ERR(0, 195, __pyx_L28_error) + __Pyx_DECREF(__pyx_t_12); __pyx_t_12 = 0; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":198 + * ('Condition:\n' + condition + '\n\nError:\n' + error, stack) + * finally: + * etype, value, tb = None, None, None # <<<<<<<<<<<<<< + * except: + * traceback.print_exc() + */ + /*finally:*/ { + /*normal exit:*/{ + __pyx_t_12 = Py_None; + __Pyx_INCREF(__pyx_t_12); + __pyx_t_11 = Py_None; + __Pyx_INCREF(__pyx_t_11); + __pyx_t_9 = Py_None; + __Pyx_INCREF(__pyx_t_9); + __Pyx_DECREF_SET(__pyx_v_etype, __pyx_t_12); + __pyx_t_12 = 0; + __Pyx_DECREF_SET(__pyx_v_value, __pyx_t_11); + __pyx_t_11 = 0; + __Pyx_DECREF_SET(__pyx_v_tb, __pyx_t_9); + __pyx_t_9 = 0; + goto __pyx_L29; + } + /*exception exit:*/{ + __Pyx_PyThreadState_declare + __pyx_L28_error:; + __pyx_t_21 = 0; __pyx_t_22 = 0; __pyx_t_23 = 0; __pyx_t_24 = 0; __pyx_t_25 = 0; __pyx_t_26 = 0; + __Pyx_PyThreadState_assign + __Pyx_XDECREF(__pyx_t_16); __pyx_t_16 = 0; + __Pyx_XDECREF(__pyx_t_10); __pyx_t_10 = 0; + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_XDECREF(__pyx_t_11); __pyx_t_11 = 0; + __Pyx_XDECREF(__pyx_t_12); __pyx_t_12 = 0; + if (PY_MAJOR_VERSION >= 3) __Pyx_ExceptionSwap(&__pyx_t_24, &__pyx_t_25, &__pyx_t_26); + if ((PY_MAJOR_VERSION < 3) || unlikely(__Pyx_GetException(&__pyx_t_21, &__pyx_t_22, &__pyx_t_23) < 0)) __Pyx_ErrFetch(&__pyx_t_21, &__pyx_t_22, &__pyx_t_23); + __Pyx_XGOTREF(__pyx_t_21); + __Pyx_XGOTREF(__pyx_t_22); + __Pyx_XGOTREF(__pyx_t_23); + __Pyx_XGOTREF(__pyx_t_24); + __Pyx_XGOTREF(__pyx_t_25); + __Pyx_XGOTREF(__pyx_t_26); + __pyx_t_18 = __pyx_lineno; __pyx_t_19 = __pyx_clineno; __pyx_t_20 = __pyx_filename; + { + __pyx_t_9 = Py_None; + __Pyx_INCREF(__pyx_t_9); + __pyx_t_11 = Py_None; + __Pyx_INCREF(__pyx_t_11); + __pyx_t_12 = Py_None; + __Pyx_INCREF(__pyx_t_12); + __Pyx_DECREF_SET(__pyx_v_etype, __pyx_t_9); + __pyx_t_9 = 0; + __Pyx_DECREF_SET(__pyx_v_value, __pyx_t_11); + __pyx_t_11 = 0; + __Pyx_DECREF_SET(__pyx_v_tb, __pyx_t_12); + __pyx_t_12 = 0; + } + __Pyx_PyThreadState_assign + if (PY_MAJOR_VERSION >= 3) { + __Pyx_XGIVEREF(__pyx_t_24); + __Pyx_XGIVEREF(__pyx_t_25); + __Pyx_XGIVEREF(__pyx_t_26); + __Pyx_ExceptionReset(__pyx_t_24, __pyx_t_25, __pyx_t_26); + } + __Pyx_XGIVEREF(__pyx_t_21); + __Pyx_XGIVEREF(__pyx_t_22); + __Pyx_XGIVEREF(__pyx_t_23); + __Pyx_ErrRestore(__pyx_t_21, __pyx_t_22, __pyx_t_23); + __pyx_t_21 = 0; __pyx_t_22 = 0; __pyx_t_23 = 0; __pyx_t_24 = 0; __pyx_t_25 = 0; __pyx_t_26 = 0; + __pyx_lineno = __pyx_t_18; __pyx_clineno = __pyx_t_19; __pyx_filename = __pyx_t_20; + goto __pyx_L15_error; + } + __pyx_L29:; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":186 + * else: + * stop = True + * try: # <<<<<<<<<<<<<< + * # add exception_type and stacktrace into thread additional info + * etype, value, tb = sys.exc_info() + */ + } + __Pyx_XDECREF(__pyx_t_13); __pyx_t_13 = 0; + __Pyx_XDECREF(__pyx_t_14); __pyx_t_14 = 0; + __Pyx_XDECREF(__pyx_t_15); __pyx_t_15 = 0; + goto __pyx_L22_try_end; + __pyx_L15_error:; + __Pyx_PyThreadState_assign + __Pyx_XDECREF(__pyx_t_16); __pyx_t_16 = 0; + __Pyx_XDECREF(__pyx_t_10); __pyx_t_10 = 0; + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_XDECREF(__pyx_t_11); __pyx_t_11 = 0; + __Pyx_XDECREF(__pyx_t_12); __pyx_t_12 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":199 + * finally: + * etype, value, tb = None, None, None + * except: # <<<<<<<<<<<<<< + * traceback.print_exc() + * + */ + /*except:*/ { + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.handle_breakpoint_condition", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_12, &__pyx_t_11, &__pyx_t_9) < 0) __PYX_ERR(0, 199, __pyx_L17_except_error) + __Pyx_GOTREF(__pyx_t_12); + __Pyx_GOTREF(__pyx_t_11); + __Pyx_GOTREF(__pyx_t_9); + + /* "_pydevd_bundle/pydevd_cython.pyx":200 + * etype, value, tb = None, None, None + * except: + * traceback.print_exc() # <<<<<<<<<<<<<< + * + * + */ + __pyx_t_16 = __Pyx_GetModuleGlobalName(__pyx_n_s_traceback); if (unlikely(!__pyx_t_16)) __PYX_ERR(0, 200, __pyx_L17_except_error) + __Pyx_GOTREF(__pyx_t_16); + __pyx_t_27 = __Pyx_PyObject_GetAttrStr(__pyx_t_16, __pyx_n_s_print_exc); if (unlikely(!__pyx_t_27)) __PYX_ERR(0, 200, __pyx_L17_except_error) + __Pyx_GOTREF(__pyx_t_27); + __Pyx_DECREF(__pyx_t_16); __pyx_t_16 = 0; + __pyx_t_16 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_27))) { + __pyx_t_16 = PyMethod_GET_SELF(__pyx_t_27); + if (likely(__pyx_t_16)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_27); + __Pyx_INCREF(__pyx_t_16); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_27, function); + } + } + if (__pyx_t_16) { + __pyx_t_10 = __Pyx_PyObject_CallOneArg(__pyx_t_27, __pyx_t_16); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 200, __pyx_L17_except_error) + __Pyx_DECREF(__pyx_t_16); __pyx_t_16 = 0; + } else { + __pyx_t_10 = __Pyx_PyObject_CallNoArg(__pyx_t_27); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 200, __pyx_L17_except_error) + } + __Pyx_GOTREF(__pyx_t_10); + __Pyx_DECREF(__pyx_t_27); __pyx_t_27 = 0; + __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; + __Pyx_DECREF(__pyx_t_12); __pyx_t_12 = 0; + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + goto __pyx_L16_exception_handled; + } + __pyx_L17_except_error:; + + /* "_pydevd_bundle/pydevd_cython.pyx":186 + * else: + * stop = True + * try: # <<<<<<<<<<<<<< + * # add exception_type and stacktrace into thread additional info + * etype, value, tb = sys.exc_info() + */ + __Pyx_PyThreadState_assign + __Pyx_XGIVEREF(__pyx_t_13); + __Pyx_XGIVEREF(__pyx_t_14); + __Pyx_XGIVEREF(__pyx_t_15); + __Pyx_ExceptionReset(__pyx_t_13, __pyx_t_14, __pyx_t_15); + goto __pyx_L5_except_error; + __pyx_L16_exception_handled:; + __Pyx_PyThreadState_assign + __Pyx_XGIVEREF(__pyx_t_13); + __Pyx_XGIVEREF(__pyx_t_14); + __Pyx_XGIVEREF(__pyx_t_15); + __Pyx_ExceptionReset(__pyx_t_13, __pyx_t_14, __pyx_t_15); + __pyx_L22_try_end:; + } + } + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + goto __pyx_L4_exception_handled; + } + __pyx_L5_except_error:; + + /* "_pydevd_bundle/pydevd_cython.pyx":169 + * def handle_breakpoint_condition(py_db, info, breakpoint, new_frame, default_return_value): + * condition = breakpoint.condition + * try: # <<<<<<<<<<<<<< + * val = eval(condition, new_frame.f_globals, new_frame.f_locals) + * if not val: + */ + __Pyx_PyThreadState_assign + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_XGIVEREF(__pyx_t_4); + __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); + goto __pyx_L1_error; + __pyx_L7_try_return:; + __Pyx_PyThreadState_assign + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_XGIVEREF(__pyx_t_4); + __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); + goto __pyx_L0; + __pyx_L6_except_return:; + __Pyx_PyThreadState_assign + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_XGIVEREF(__pyx_t_4); + __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); + goto __pyx_L0; + __pyx_L4_exception_handled:; + __Pyx_PyThreadState_assign + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_XGIVEREF(__pyx_t_4); + __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); + __pyx_L8_try_end:; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":167 + * + * + * def handle_breakpoint_condition(py_db, info, breakpoint, new_frame, default_return_value): # <<<<<<<<<<<<<< + * condition = breakpoint.condition + * try: + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_9); + __Pyx_XDECREF(__pyx_t_10); + __Pyx_XDECREF(__pyx_t_11); + __Pyx_XDECREF(__pyx_t_12); + __Pyx_XDECREF(__pyx_t_16); + __Pyx_XDECREF(__pyx_t_27); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.handle_breakpoint_condition", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_condition); + __Pyx_XDECREF(__pyx_v_val); + __Pyx_XDECREF(__pyx_v_msg); + __Pyx_XDECREF(__pyx_v_etype); + __Pyx_XDECREF(__pyx_v_value); + __Pyx_XDECREF(__pyx_v_tb); + __Pyx_XDECREF(__pyx_v_error); + __Pyx_XDECREF(__pyx_v_stack); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pyx":203 + * + * + * def handle_breakpoint_expression(breakpoint, info, new_frame): # <<<<<<<<<<<<<< + * try: + * try: + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_9handle_breakpoint_expression(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static PyMethodDef __pyx_mdef_14_pydevd_bundle_13pydevd_cython_9handle_breakpoint_expression = {"handle_breakpoint_expression", (PyCFunction)__pyx_pw_14_pydevd_bundle_13pydevd_cython_9handle_breakpoint_expression, METH_VARARGS|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_9handle_breakpoint_expression(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_breakpoint = 0; + PyObject *__pyx_v_info = 0; + PyObject *__pyx_v_new_frame = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("handle_breakpoint_expression (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_breakpoint,&__pyx_n_s_info,&__pyx_n_s_new_frame,0}; + PyObject* values[3] = {0,0,0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + CYTHON_FALLTHROUGH; + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_breakpoint)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (likely((values[1] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_info)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("handle_breakpoint_expression", 1, 3, 3, 1); __PYX_ERR(0, 203, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 2: + if (likely((values[2] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_new_frame)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("handle_breakpoint_expression", 1, 3, 3, 2); __PYX_ERR(0, 203, __pyx_L3_error) + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "handle_breakpoint_expression") < 0)) __PYX_ERR(0, 203, __pyx_L3_error) + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 3) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + } + __pyx_v_breakpoint = values[0]; + __pyx_v_info = values[1]; + __pyx_v_new_frame = values[2]; + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("handle_breakpoint_expression", 1, 3, 3, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 203, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.handle_breakpoint_expression", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_8handle_breakpoint_expression(__pyx_self, __pyx_v_breakpoint, __pyx_v_info, __pyx_v_new_frame); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_8handle_breakpoint_expression(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_breakpoint, PyObject *__pyx_v_info, PyObject *__pyx_v_new_frame) { + PyObject *__pyx_v_val = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + PyObject *__pyx_t_7 = NULL; + PyObject *__pyx_t_8 = NULL; + PyObject *__pyx_t_9 = NULL; + int __pyx_t_10; + int __pyx_t_11; + int __pyx_t_12; + int __pyx_t_13; + char const *__pyx_t_14; + PyObject *__pyx_t_15 = NULL; + PyObject *__pyx_t_16 = NULL; + PyObject *__pyx_t_17 = NULL; + __Pyx_RefNannySetupContext("handle_breakpoint_expression", 0); + + /* "_pydevd_bundle/pydevd_cython.pyx":204 + * + * def handle_breakpoint_expression(breakpoint, info, new_frame): + * try: # <<<<<<<<<<<<<< + * try: + * val = eval(breakpoint.expression, new_frame.f_globals, new_frame.f_locals) + */ + /*try:*/ { + + /* "_pydevd_bundle/pydevd_cython.pyx":205 + * def handle_breakpoint_expression(breakpoint, info, new_frame): + * try: + * try: # <<<<<<<<<<<<<< + * val = eval(breakpoint.expression, new_frame.f_globals, new_frame.f_locals) + * except: + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_1, &__pyx_t_2, &__pyx_t_3); + __Pyx_XGOTREF(__pyx_t_1); + __Pyx_XGOTREF(__pyx_t_2); + __Pyx_XGOTREF(__pyx_t_3); + /*try:*/ { + + /* "_pydevd_bundle/pydevd_cython.pyx":206 + * try: + * try: + * val = eval(breakpoint.expression, new_frame.f_globals, new_frame.f_locals) # <<<<<<<<<<<<<< + * except: + * val = sys.exc_info()[1] + */ + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_breakpoint, __pyx_n_s_expression); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 206, __pyx_L6_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_v_new_frame, __pyx_n_s_f_globals); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 206, __pyx_L6_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_v_new_frame, __pyx_n_s_f_locals); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 206, __pyx_L6_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_7 = PyTuple_New(3); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 206, __pyx_L6_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_GIVEREF(__pyx_t_4); + PyTuple_SET_ITEM(__pyx_t_7, 0, __pyx_t_4); + __Pyx_GIVEREF(__pyx_t_5); + PyTuple_SET_ITEM(__pyx_t_7, 1, __pyx_t_5); + __Pyx_GIVEREF(__pyx_t_6); + PyTuple_SET_ITEM(__pyx_t_7, 2, __pyx_t_6); + __pyx_t_4 = 0; + __pyx_t_5 = 0; + __pyx_t_6 = 0; + __pyx_t_6 = __Pyx_PyObject_Call(__pyx_builtin_eval, __pyx_t_7, NULL); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 206, __pyx_L6_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __pyx_v_val = __pyx_t_6; + __pyx_t_6 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":205 + * def handle_breakpoint_expression(breakpoint, info, new_frame): + * try: + * try: # <<<<<<<<<<<<<< + * val = eval(breakpoint.expression, new_frame.f_globals, new_frame.f_locals) + * except: + */ + } + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + goto __pyx_L11_try_end; + __pyx_L6_error:; + __Pyx_PyThreadState_assign + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":207 + * try: + * val = eval(breakpoint.expression, new_frame.f_globals, new_frame.f_locals) + * except: # <<<<<<<<<<<<<< + * val = sys.exc_info()[1] + * finally: + */ + /*except:*/ { + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.handle_breakpoint_expression", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_6, &__pyx_t_7, &__pyx_t_5) < 0) __PYX_ERR(0, 207, __pyx_L8_except_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_GOTREF(__pyx_t_7); + __Pyx_GOTREF(__pyx_t_5); + + /* "_pydevd_bundle/pydevd_cython.pyx":208 + * val = eval(breakpoint.expression, new_frame.f_globals, new_frame.f_locals) + * except: + * val = sys.exc_info()[1] # <<<<<<<<<<<<<< + * finally: + * if val is not None: + */ + __pyx_t_8 = __Pyx_GetModuleGlobalName(__pyx_n_s_sys); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 208, __pyx_L8_except_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_9 = __Pyx_PyObject_GetAttrStr(__pyx_t_8, __pyx_n_s_exc_info); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 208, __pyx_L8_except_error) + __Pyx_GOTREF(__pyx_t_9); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __pyx_t_8 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_9))) { + __pyx_t_8 = PyMethod_GET_SELF(__pyx_t_9); + if (likely(__pyx_t_8)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_9); + __Pyx_INCREF(__pyx_t_8); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_9, function); + } + } + if (__pyx_t_8) { + __pyx_t_4 = __Pyx_PyObject_CallOneArg(__pyx_t_9, __pyx_t_8); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 208, __pyx_L8_except_error) + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + } else { + __pyx_t_4 = __Pyx_PyObject_CallNoArg(__pyx_t_9); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 208, __pyx_L8_except_error) + } + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + __pyx_t_9 = __Pyx_GetItemInt(__pyx_t_4, 1, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 208, __pyx_L8_except_error) + __Pyx_GOTREF(__pyx_t_9); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_XDECREF_SET(__pyx_v_val, __pyx_t_9); + __pyx_t_9 = 0; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + goto __pyx_L7_exception_handled; + } + __pyx_L8_except_error:; + + /* "_pydevd_bundle/pydevd_cython.pyx":205 + * def handle_breakpoint_expression(breakpoint, info, new_frame): + * try: + * try: # <<<<<<<<<<<<<< + * val = eval(breakpoint.expression, new_frame.f_globals, new_frame.f_locals) + * except: + */ + __Pyx_PyThreadState_assign + __Pyx_XGIVEREF(__pyx_t_1); + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_ExceptionReset(__pyx_t_1, __pyx_t_2, __pyx_t_3); + goto __pyx_L4_error; + __pyx_L7_exception_handled:; + __Pyx_PyThreadState_assign + __Pyx_XGIVEREF(__pyx_t_1); + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_ExceptionReset(__pyx_t_1, __pyx_t_2, __pyx_t_3); + __pyx_L11_try_end:; + } + } + + /* "_pydevd_bundle/pydevd_cython.pyx":210 + * val = sys.exc_info()[1] + * finally: + * if val is not None: # <<<<<<<<<<<<<< + * info.pydev_message = str(val) + * + */ + /*finally:*/ { + /*normal exit:*/{ + __pyx_t_10 = (__pyx_v_val != Py_None); + __pyx_t_11 = (__pyx_t_10 != 0); + if (__pyx_t_11) { + + /* "_pydevd_bundle/pydevd_cython.pyx":211 + * finally: + * if val is not None: + * info.pydev_message = str(val) # <<<<<<<<<<<<<< + * + * + */ + __pyx_t_5 = PyTuple_New(1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 211, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_INCREF(__pyx_v_val); + __Pyx_GIVEREF(__pyx_v_val); + PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_v_val); + __pyx_t_7 = __Pyx_PyObject_Call(((PyObject *)(&PyString_Type)), __pyx_t_5, NULL); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 211, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + if (__Pyx_PyObject_SetAttrStr(__pyx_v_info, __pyx_n_s_pydev_message, __pyx_t_7) < 0) __PYX_ERR(0, 211, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":210 + * val = sys.exc_info()[1] + * finally: + * if val is not None: # <<<<<<<<<<<<<< + * info.pydev_message = str(val) + * + */ + } + goto __pyx_L5; + } + /*exception exit:*/{ + __Pyx_PyThreadState_declare + __pyx_L4_error:; + __pyx_t_3 = 0; __pyx_t_2 = 0; __pyx_t_1 = 0; __pyx_t_15 = 0; __pyx_t_16 = 0; __pyx_t_17 = 0; + __Pyx_PyThreadState_assign + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + if (PY_MAJOR_VERSION >= 3) __Pyx_ExceptionSwap(&__pyx_t_15, &__pyx_t_16, &__pyx_t_17); + if ((PY_MAJOR_VERSION < 3) || unlikely(__Pyx_GetException(&__pyx_t_3, &__pyx_t_2, &__pyx_t_1) < 0)) __Pyx_ErrFetch(&__pyx_t_3, &__pyx_t_2, &__pyx_t_1); + __Pyx_XGOTREF(__pyx_t_3); + __Pyx_XGOTREF(__pyx_t_2); + __Pyx_XGOTREF(__pyx_t_1); + __Pyx_XGOTREF(__pyx_t_15); + __Pyx_XGOTREF(__pyx_t_16); + __Pyx_XGOTREF(__pyx_t_17); + __pyx_t_12 = __pyx_lineno; __pyx_t_13 = __pyx_clineno; __pyx_t_14 = __pyx_filename; + { + if (unlikely(!__pyx_v_val)) { __Pyx_RaiseUnboundLocalError("val"); __PYX_ERR(0, 210, __pyx_L16_error) } + __pyx_t_11 = (__pyx_v_val != Py_None); + __pyx_t_10 = (__pyx_t_11 != 0); + if (__pyx_t_10) { + + /* "_pydevd_bundle/pydevd_cython.pyx":211 + * finally: + * if val is not None: + * info.pydev_message = str(val) # <<<<<<<<<<<<<< + * + * + */ + if (unlikely(!__pyx_v_val)) { __Pyx_RaiseUnboundLocalError("val"); __PYX_ERR(0, 211, __pyx_L16_error) } + __pyx_t_7 = PyTuple_New(1); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 211, __pyx_L16_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_INCREF(__pyx_v_val); + __Pyx_GIVEREF(__pyx_v_val); + PyTuple_SET_ITEM(__pyx_t_7, 0, __pyx_v_val); + __pyx_t_5 = __Pyx_PyObject_Call(((PyObject *)(&PyString_Type)), __pyx_t_7, NULL); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 211, __pyx_L16_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + if (__Pyx_PyObject_SetAttrStr(__pyx_v_info, __pyx_n_s_pydev_message, __pyx_t_5) < 0) __PYX_ERR(0, 211, __pyx_L16_error) + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":210 + * val = sys.exc_info()[1] + * finally: + * if val is not None: # <<<<<<<<<<<<<< + * info.pydev_message = str(val) + * + */ + } + } + __Pyx_PyThreadState_assign + if (PY_MAJOR_VERSION >= 3) { + __Pyx_XGIVEREF(__pyx_t_15); + __Pyx_XGIVEREF(__pyx_t_16); + __Pyx_XGIVEREF(__pyx_t_17); + __Pyx_ExceptionReset(__pyx_t_15, __pyx_t_16, __pyx_t_17); + } + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_1); + __Pyx_ErrRestore(__pyx_t_3, __pyx_t_2, __pyx_t_1); + __pyx_t_3 = 0; __pyx_t_2 = 0; __pyx_t_1 = 0; __pyx_t_15 = 0; __pyx_t_16 = 0; __pyx_t_17 = 0; + __pyx_lineno = __pyx_t_12; __pyx_clineno = __pyx_t_13; __pyx_filename = __pyx_t_14; + goto __pyx_L1_error; + __pyx_L16_error:; + __Pyx_PyThreadState_assign + if (PY_MAJOR_VERSION >= 3) { + __Pyx_XGIVEREF(__pyx_t_15); + __Pyx_XGIVEREF(__pyx_t_16); + __Pyx_XGIVEREF(__pyx_t_17); + __Pyx_ExceptionReset(__pyx_t_15, __pyx_t_16, __pyx_t_17); + } + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_15 = 0; __pyx_t_16 = 0; __pyx_t_17 = 0; + goto __pyx_L1_error; + } + __pyx_L5:; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":203 + * + * + * def handle_breakpoint_expression(breakpoint, info, new_frame): # <<<<<<<<<<<<<< + * try: + * try: + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_XDECREF(__pyx_t_8); + __Pyx_XDECREF(__pyx_t_9); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.handle_breakpoint_expression", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_val); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pyx":238 + * cdef tuple _args + * cdef int should_skip + * def __init__(self, tuple args): # <<<<<<<<<<<<<< + * self._args = args # In the cython version we don't need to pass the frame + * self.should_skip = -1 # On cythonized version, put in instance. + */ + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_args = 0; + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__init__ (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_args,0}; + PyObject* values[1] = {0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_args)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__init__") < 0)) __PYX_ERR(0, 238, __pyx_L3_error) + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 1) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + } + __pyx_v_args = ((PyObject*)values[0]); + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("__init__", 1, 1, 1, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 238, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBFrame.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return -1; + __pyx_L4_argument_unpacking_done:; + if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_args), (&PyTuple_Type), 1, "args", 1))) __PYX_ERR(0, 238, __pyx_L1_error) + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_9PyDBFrame___init__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *)__pyx_v_self), __pyx_v_args); + + /* function exit code */ + goto __pyx_L0; + __pyx_L1_error:; + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_9PyDBFrame___init__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *__pyx_v_self, PyObject *__pyx_v_args) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__init__", 0); + + /* "_pydevd_bundle/pydevd_cython.pyx":239 + * cdef int should_skip + * def __init__(self, tuple args): + * self._args = args # In the cython version we don't need to pass the frame # <<<<<<<<<<<<<< + * self.should_skip = -1 # On cythonized version, put in instance. + * # ELSE + */ + __Pyx_INCREF(__pyx_v_args); + __Pyx_GIVEREF(__pyx_v_args); + __Pyx_GOTREF(__pyx_v_self->_args); + __Pyx_DECREF(__pyx_v_self->_args); + __pyx_v_self->_args = __pyx_v_args; + + /* "_pydevd_bundle/pydevd_cython.pyx":240 + * def __init__(self, tuple args): + * self._args = args # In the cython version we don't need to pass the frame + * self.should_skip = -1 # On cythonized version, put in instance. # <<<<<<<<<<<<<< + * # ELSE + * # should_skip = -1 # Default value in class (put in instance on set). + */ + __pyx_v_self->should_skip = -1; + + /* "_pydevd_bundle/pydevd_cython.pyx":238 + * cdef tuple _args + * cdef int should_skip + * def __init__(self, tuple args): # <<<<<<<<<<<<<< + * self._args = args # In the cython version we don't need to pass the frame + * self.should_skip = -1 # On cythonized version, put in instance. + */ + + /* function exit code */ + __pyx_r = 0; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pyx":250 + * # ENDIF + * + * def set_suspend(self, *args, **kwargs): # <<<<<<<<<<<<<< + * self._args[0].set_suspend(*args, **kwargs) + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_3set_suspend(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_3set_suspend(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_args = 0; + PyObject *__pyx_v_kwargs = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("set_suspend (wrapper)", 0); + if (unlikely(__pyx_kwds) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "set_suspend", 1))) return NULL; + if (unlikely(__pyx_kwds)) { + __pyx_v_kwargs = PyDict_Copy(__pyx_kwds); if (unlikely(!__pyx_v_kwargs)) return NULL; + __Pyx_GOTREF(__pyx_v_kwargs); + } else { + __pyx_v_kwargs = NULL; + } + __Pyx_INCREF(__pyx_args); + __pyx_v_args = __pyx_args; + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_2set_suspend(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *)__pyx_v_self), __pyx_v_args, __pyx_v_kwargs); + + /* function exit code */ + __Pyx_XDECREF(__pyx_v_args); + __Pyx_XDECREF(__pyx_v_kwargs); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_2set_suspend(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *__pyx_v_self, PyObject *__pyx_v_args, PyObject *__pyx_v_kwargs) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + __Pyx_RefNannySetupContext("set_suspend", 0); + + /* "_pydevd_bundle/pydevd_cython.pyx":251 + * + * def set_suspend(self, *args, **kwargs): + * self._args[0].set_suspend(*args, **kwargs) # <<<<<<<<<<<<<< + * + * def do_wait_suspend(self, *args, **kwargs): + */ + if (unlikely(__pyx_v_self->_args == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(0, 251, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v_self->_args, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 251, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_set_suspend); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 251, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_2, __pyx_v_args, __pyx_v_kwargs); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 251, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":250 + * # ENDIF + * + * def set_suspend(self, *args, **kwargs): # <<<<<<<<<<<<<< + * self._args[0].set_suspend(*args, **kwargs) + * + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBFrame.set_suspend", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pyx":253 + * self._args[0].set_suspend(*args, **kwargs) + * + * def do_wait_suspend(self, *args, **kwargs): # <<<<<<<<<<<<<< + * self._args[0].do_wait_suspend(*args, **kwargs) + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_5do_wait_suspend(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_5do_wait_suspend(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_args = 0; + PyObject *__pyx_v_kwargs = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("do_wait_suspend (wrapper)", 0); + if (unlikely(__pyx_kwds) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "do_wait_suspend", 1))) return NULL; + if (unlikely(__pyx_kwds)) { + __pyx_v_kwargs = PyDict_Copy(__pyx_kwds); if (unlikely(!__pyx_v_kwargs)) return NULL; + __Pyx_GOTREF(__pyx_v_kwargs); + } else { + __pyx_v_kwargs = NULL; + } + __Pyx_INCREF(__pyx_args); + __pyx_v_args = __pyx_args; + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_4do_wait_suspend(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *)__pyx_v_self), __pyx_v_args, __pyx_v_kwargs); + + /* function exit code */ + __Pyx_XDECREF(__pyx_v_args); + __Pyx_XDECREF(__pyx_v_kwargs); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_4do_wait_suspend(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *__pyx_v_self, PyObject *__pyx_v_args, PyObject *__pyx_v_kwargs) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + __Pyx_RefNannySetupContext("do_wait_suspend", 0); + + /* "_pydevd_bundle/pydevd_cython.pyx":254 + * + * def do_wait_suspend(self, *args, **kwargs): + * self._args[0].do_wait_suspend(*args, **kwargs) # <<<<<<<<<<<<<< + * + * # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + */ + if (unlikely(__pyx_v_self->_args == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(0, 254, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v_self->_args, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 254, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_do_wait_suspend); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 254, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_2, __pyx_v_args, __pyx_v_kwargs); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 254, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":253 + * self._args[0].set_suspend(*args, **kwargs) + * + * def do_wait_suspend(self, *args, **kwargs): # <<<<<<<<<<<<<< + * self._args[0].do_wait_suspend(*args, **kwargs) + * + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBFrame.do_wait_suspend", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pyx":257 + * + * # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + * def trace_exception(self, frame, str event, arg): # <<<<<<<<<<<<<< + * cdef bint flag; + * # ELSE + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_7trace_exception(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_7trace_exception(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_frame = 0; + PyObject *__pyx_v_event = 0; + PyObject *__pyx_v_arg = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("trace_exception (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_frame,&__pyx_n_s_event,&__pyx_n_s_arg,0}; + PyObject* values[3] = {0,0,0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + CYTHON_FALLTHROUGH; + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_frame)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (likely((values[1] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_event)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("trace_exception", 1, 3, 3, 1); __PYX_ERR(0, 257, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 2: + if (likely((values[2] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_arg)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("trace_exception", 1, 3, 3, 2); __PYX_ERR(0, 257, __pyx_L3_error) + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "trace_exception") < 0)) __PYX_ERR(0, 257, __pyx_L3_error) + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 3) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + } + __pyx_v_frame = values[0]; + __pyx_v_event = ((PyObject*)values[1]); + __pyx_v_arg = values[2]; + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("trace_exception", 1, 3, 3, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 257, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBFrame.trace_exception", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_event), (&PyString_Type), 1, "event", 1))) __PYX_ERR(0, 257, __pyx_L1_error) + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_6trace_exception(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *)__pyx_v_self), __pyx_v_frame, __pyx_v_event, __pyx_v_arg); + + /* function exit code */ + goto __pyx_L0; + __pyx_L1_error:; + __pyx_r = NULL; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_6trace_exception(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *__pyx_v_self, PyObject *__pyx_v_frame, PyObject *__pyx_v_event, PyObject *__pyx_v_arg) { + int __pyx_v_flag; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + int __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + int __pyx_t_6; + PyObject *__pyx_t_7 = NULL; + PyObject *(*__pyx_t_8)(PyObject *); + __Pyx_RefNannySetupContext("trace_exception", 0); + __Pyx_INCREF(__pyx_v_frame); + + /* "_pydevd_bundle/pydevd_cython.pyx":262 + * # def trace_exception(self, frame, event, arg): + * # ENDIF + * if event == 'exception': # <<<<<<<<<<<<<< + * flag, frame = self.should_stop_on_exception(frame, event, arg) + * + */ + __pyx_t_1 = (__Pyx_PyString_Equals(__pyx_v_event, __pyx_n_s_exception, Py_EQ)); if (unlikely(__pyx_t_1 < 0)) __PYX_ERR(0, 262, __pyx_L1_error) + __pyx_t_2 = (__pyx_t_1 != 0); + if (__pyx_t_2) { + + /* "_pydevd_bundle/pydevd_cython.pyx":263 + * # ENDIF + * if event == 'exception': + * flag, frame = self.should_stop_on_exception(frame, event, arg) # <<<<<<<<<<<<<< + * + * if flag: + */ + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_should_stop_on_exception); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 263, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_5 = NULL; + __pyx_t_6 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_4))) { + __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_4); + if (likely(__pyx_t_5)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); + __Pyx_INCREF(__pyx_t_5); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_4, function); + __pyx_t_6 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_4)) { + PyObject *__pyx_temp[4] = {__pyx_t_5, __pyx_v_frame, __pyx_v_event, __pyx_v_arg}; + __pyx_t_3 = __Pyx_PyFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_6, 3+__pyx_t_6); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 263, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_GOTREF(__pyx_t_3); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_4)) { + PyObject *__pyx_temp[4] = {__pyx_t_5, __pyx_v_frame, __pyx_v_event, __pyx_v_arg}; + __pyx_t_3 = __Pyx_PyCFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_6, 3+__pyx_t_6); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 263, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_GOTREF(__pyx_t_3); + } else + #endif + { + __pyx_t_7 = PyTuple_New(3+__pyx_t_6); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 263, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + if (__pyx_t_5) { + __Pyx_GIVEREF(__pyx_t_5); PyTuple_SET_ITEM(__pyx_t_7, 0, __pyx_t_5); __pyx_t_5 = NULL; + } + __Pyx_INCREF(__pyx_v_frame); + __Pyx_GIVEREF(__pyx_v_frame); + PyTuple_SET_ITEM(__pyx_t_7, 0+__pyx_t_6, __pyx_v_frame); + __Pyx_INCREF(__pyx_v_event); + __Pyx_GIVEREF(__pyx_v_event); + PyTuple_SET_ITEM(__pyx_t_7, 1+__pyx_t_6, __pyx_v_event); + __Pyx_INCREF(__pyx_v_arg); + __Pyx_GIVEREF(__pyx_v_arg); + PyTuple_SET_ITEM(__pyx_t_7, 2+__pyx_t_6, __pyx_v_arg); + __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_4, __pyx_t_7, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 263, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + } + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if ((likely(PyTuple_CheckExact(__pyx_t_3))) || (PyList_CheckExact(__pyx_t_3))) { + PyObject* sequence = __pyx_t_3; + #if !CYTHON_COMPILING_IN_PYPY + Py_ssize_t size = Py_SIZE(sequence); + #else + Py_ssize_t size = PySequence_Size(sequence); + #endif + if (unlikely(size != 2)) { + if (size > 2) __Pyx_RaiseTooManyValuesError(2); + else if (size >= 0) __Pyx_RaiseNeedMoreValuesError(size); + __PYX_ERR(0, 263, __pyx_L1_error) + } + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + if (likely(PyTuple_CheckExact(sequence))) { + __pyx_t_4 = PyTuple_GET_ITEM(sequence, 0); + __pyx_t_7 = PyTuple_GET_ITEM(sequence, 1); + } else { + __pyx_t_4 = PyList_GET_ITEM(sequence, 0); + __pyx_t_7 = PyList_GET_ITEM(sequence, 1); + } + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(__pyx_t_7); + #else + __pyx_t_4 = PySequence_ITEM(sequence, 0); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 263, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_7 = PySequence_ITEM(sequence, 1); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 263, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + #endif + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + } else { + Py_ssize_t index = -1; + __pyx_t_5 = PyObject_GetIter(__pyx_t_3); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 263, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_8 = Py_TYPE(__pyx_t_5)->tp_iternext; + index = 0; __pyx_t_4 = __pyx_t_8(__pyx_t_5); if (unlikely(!__pyx_t_4)) goto __pyx_L4_unpacking_failed; + __Pyx_GOTREF(__pyx_t_4); + index = 1; __pyx_t_7 = __pyx_t_8(__pyx_t_5); if (unlikely(!__pyx_t_7)) goto __pyx_L4_unpacking_failed; + __Pyx_GOTREF(__pyx_t_7); + if (__Pyx_IternextUnpackEndCheck(__pyx_t_8(__pyx_t_5), 2) < 0) __PYX_ERR(0, 263, __pyx_L1_error) + __pyx_t_8 = NULL; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + goto __pyx_L5_unpacking_done; + __pyx_L4_unpacking_failed:; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_8 = NULL; + if (__Pyx_IterFinish() == 0) __Pyx_RaiseNeedMoreValuesError(index); + __PYX_ERR(0, 263, __pyx_L1_error) + __pyx_L5_unpacking_done:; + } + __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 263, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_v_flag = __pyx_t_2; + __Pyx_DECREF_SET(__pyx_v_frame, __pyx_t_7); + __pyx_t_7 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":265 + * flag, frame = self.should_stop_on_exception(frame, event, arg) + * + * if flag: # <<<<<<<<<<<<<< + * self.handle_exception(frame, event, arg) + * return self.trace_dispatch + */ + __pyx_t_2 = (__pyx_v_flag != 0); + if (__pyx_t_2) { + + /* "_pydevd_bundle/pydevd_cython.pyx":266 + * + * if flag: + * self.handle_exception(frame, event, arg) # <<<<<<<<<<<<<< + * return self.trace_dispatch + * + */ + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_handle_exception); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 266, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_4 = NULL; + __pyx_t_6 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_7))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_7); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_7); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_7, function); + __pyx_t_6 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_7)) { + PyObject *__pyx_temp[4] = {__pyx_t_4, __pyx_v_frame, __pyx_v_event, __pyx_v_arg}; + __pyx_t_3 = __Pyx_PyFunction_FastCall(__pyx_t_7, __pyx_temp+1-__pyx_t_6, 3+__pyx_t_6); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 266, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_GOTREF(__pyx_t_3); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_7)) { + PyObject *__pyx_temp[4] = {__pyx_t_4, __pyx_v_frame, __pyx_v_event, __pyx_v_arg}; + __pyx_t_3 = __Pyx_PyCFunction_FastCall(__pyx_t_7, __pyx_temp+1-__pyx_t_6, 3+__pyx_t_6); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 266, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_GOTREF(__pyx_t_3); + } else + #endif + { + __pyx_t_5 = PyTuple_New(3+__pyx_t_6); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 266, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + if (__pyx_t_4) { + __Pyx_GIVEREF(__pyx_t_4); PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_t_4); __pyx_t_4 = NULL; + } + __Pyx_INCREF(__pyx_v_frame); + __Pyx_GIVEREF(__pyx_v_frame); + PyTuple_SET_ITEM(__pyx_t_5, 0+__pyx_t_6, __pyx_v_frame); + __Pyx_INCREF(__pyx_v_event); + __Pyx_GIVEREF(__pyx_v_event); + PyTuple_SET_ITEM(__pyx_t_5, 1+__pyx_t_6, __pyx_v_event); + __Pyx_INCREF(__pyx_v_arg); + __Pyx_GIVEREF(__pyx_v_arg); + PyTuple_SET_ITEM(__pyx_t_5, 2+__pyx_t_6, __pyx_v_arg); + __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_7, __pyx_t_5, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 266, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + } + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":267 + * if flag: + * self.handle_exception(frame, event, arg) + * return self.trace_dispatch # <<<<<<<<<<<<<< + * + * return self.trace_exception + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_trace_dispatch); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 267, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_r = __pyx_t_3; + __pyx_t_3 = 0; + goto __pyx_L0; + + /* "_pydevd_bundle/pydevd_cython.pyx":265 + * flag, frame = self.should_stop_on_exception(frame, event, arg) + * + * if flag: # <<<<<<<<<<<<<< + * self.handle_exception(frame, event, arg) + * return self.trace_dispatch + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":262 + * # def trace_exception(self, frame, event, arg): + * # ENDIF + * if event == 'exception': # <<<<<<<<<<<<<< + * flag, frame = self.should_stop_on_exception(frame, event, arg) + * + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":269 + * return self.trace_dispatch + * + * return self.trace_exception # <<<<<<<<<<<<<< + * + * def trace_return(self, frame, event, arg): + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_trace_exception); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 269, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_r = __pyx_t_3; + __pyx_t_3 = 0; + goto __pyx_L0; + + /* "_pydevd_bundle/pydevd_cython.pyx":257 + * + * # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + * def trace_exception(self, frame, str event, arg): # <<<<<<<<<<<<<< + * cdef bint flag; + * # ELSE + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBFrame.trace_exception", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_frame); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pyx":271 + * return self.trace_exception + * + * def trace_return(self, frame, event, arg): # <<<<<<<<<<<<<< + * if event == 'return': + * main_debugger, filename = self._args[0], self._args[1] + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_9trace_return(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_9trace_return(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_frame = 0; + PyObject *__pyx_v_event = 0; + PyObject *__pyx_v_arg = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("trace_return (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_frame,&__pyx_n_s_event,&__pyx_n_s_arg,0}; + PyObject* values[3] = {0,0,0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + CYTHON_FALLTHROUGH; + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_frame)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (likely((values[1] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_event)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("trace_return", 1, 3, 3, 1); __PYX_ERR(0, 271, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 2: + if (likely((values[2] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_arg)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("trace_return", 1, 3, 3, 2); __PYX_ERR(0, 271, __pyx_L3_error) + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "trace_return") < 0)) __PYX_ERR(0, 271, __pyx_L3_error) + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 3) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + } + __pyx_v_frame = values[0]; + __pyx_v_event = values[1]; + __pyx_v_arg = values[2]; + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("trace_return", 1, 3, 3, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 271, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBFrame.trace_return", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_8trace_return(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *)__pyx_v_self), __pyx_v_frame, __pyx_v_event, __pyx_v_arg); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_8trace_return(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *__pyx_v_self, PyObject *__pyx_v_frame, PyObject *__pyx_v_event, PyObject *__pyx_v_arg) { + PyObject *__pyx_v_main_debugger = NULL; + PyObject *__pyx_v_filename = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + int __pyx_t_5; + PyObject *__pyx_t_6 = NULL; + __Pyx_RefNannySetupContext("trace_return", 0); + + /* "_pydevd_bundle/pydevd_cython.pyx":272 + * + * def trace_return(self, frame, event, arg): + * if event == 'return': # <<<<<<<<<<<<<< + * main_debugger, filename = self._args[0], self._args[1] + * send_signature_return_trace(main_debugger, frame, filename, arg) + */ + __pyx_t_1 = (__Pyx_PyString_Equals(__pyx_v_event, __pyx_n_s_return, Py_EQ)); if (unlikely(__pyx_t_1 < 0)) __PYX_ERR(0, 272, __pyx_L1_error) + if (__pyx_t_1) { + + /* "_pydevd_bundle/pydevd_cython.pyx":273 + * def trace_return(self, frame, event, arg): + * if event == 'return': + * main_debugger, filename = self._args[0], self._args[1] # <<<<<<<<<<<<<< + * send_signature_return_trace(main_debugger, frame, filename, arg) + * return self.trace_return + */ + if (unlikely(__pyx_v_self->_args == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(0, 273, __pyx_L1_error) + } + __pyx_t_2 = __Pyx_GetItemInt_Tuple(__pyx_v_self->_args, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 273, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (unlikely(__pyx_v_self->_args == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(0, 273, __pyx_L1_error) + } + __pyx_t_3 = __Pyx_GetItemInt_Tuple(__pyx_v_self->_args, 1, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 273, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_v_main_debugger = __pyx_t_2; + __pyx_t_2 = 0; + __pyx_v_filename = __pyx_t_3; + __pyx_t_3 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":274 + * if event == 'return': + * main_debugger, filename = self._args[0], self._args[1] + * send_signature_return_trace(main_debugger, frame, filename, arg) # <<<<<<<<<<<<<< + * return self.trace_return + * + */ + __pyx_t_2 = __Pyx_GetModuleGlobalName(__pyx_n_s_send_signature_return_trace); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 274, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_4 = NULL; + __pyx_t_5 = 0; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + __pyx_t_5 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_2)) { + PyObject *__pyx_temp[5] = {__pyx_t_4, __pyx_v_main_debugger, __pyx_v_frame, __pyx_v_filename, __pyx_v_arg}; + __pyx_t_3 = __Pyx_PyFunction_FastCall(__pyx_t_2, __pyx_temp+1-__pyx_t_5, 4+__pyx_t_5); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 274, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_GOTREF(__pyx_t_3); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_2)) { + PyObject *__pyx_temp[5] = {__pyx_t_4, __pyx_v_main_debugger, __pyx_v_frame, __pyx_v_filename, __pyx_v_arg}; + __pyx_t_3 = __Pyx_PyCFunction_FastCall(__pyx_t_2, __pyx_temp+1-__pyx_t_5, 4+__pyx_t_5); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 274, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_GOTREF(__pyx_t_3); + } else + #endif + { + __pyx_t_6 = PyTuple_New(4+__pyx_t_5); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 274, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + if (__pyx_t_4) { + __Pyx_GIVEREF(__pyx_t_4); PyTuple_SET_ITEM(__pyx_t_6, 0, __pyx_t_4); __pyx_t_4 = NULL; + } + __Pyx_INCREF(__pyx_v_main_debugger); + __Pyx_GIVEREF(__pyx_v_main_debugger); + PyTuple_SET_ITEM(__pyx_t_6, 0+__pyx_t_5, __pyx_v_main_debugger); + __Pyx_INCREF(__pyx_v_frame); + __Pyx_GIVEREF(__pyx_v_frame); + PyTuple_SET_ITEM(__pyx_t_6, 1+__pyx_t_5, __pyx_v_frame); + __Pyx_INCREF(__pyx_v_filename); + __Pyx_GIVEREF(__pyx_v_filename); + PyTuple_SET_ITEM(__pyx_t_6, 2+__pyx_t_5, __pyx_v_filename); + __Pyx_INCREF(__pyx_v_arg); + __Pyx_GIVEREF(__pyx_v_arg); + PyTuple_SET_ITEM(__pyx_t_6, 3+__pyx_t_5, __pyx_v_arg); + __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_2, __pyx_t_6, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 274, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + } + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":272 + * + * def trace_return(self, frame, event, arg): + * if event == 'return': # <<<<<<<<<<<<<< + * main_debugger, filename = self._args[0], self._args[1] + * send_signature_return_trace(main_debugger, frame, filename, arg) + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":275 + * main_debugger, filename = self._args[0], self._args[1] + * send_signature_return_trace(main_debugger, frame, filename, arg) + * return self.trace_return # <<<<<<<<<<<<<< + * + * # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_trace_return); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 275, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_r = __pyx_t_3; + __pyx_t_3 = 0; + goto __pyx_L0; + + /* "_pydevd_bundle/pydevd_cython.pyx":271 + * return self.trace_exception + * + * def trace_return(self, frame, event, arg): # <<<<<<<<<<<<<< + * if event == 'return': + * main_debugger, filename = self._args[0], self._args[1] + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBFrame.trace_return", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_main_debugger); + __Pyx_XDECREF(__pyx_v_filename); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pyx":278 + * + * # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + * def should_stop_on_exception(self, frame, str event, arg): # <<<<<<<<<<<<<< + * cdef PyDBAdditionalThreadInfo info; + * cdef bint flag; + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_11should_stop_on_exception(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_11should_stop_on_exception(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_frame = 0; + CYTHON_UNUSED PyObject *__pyx_v_event = 0; + PyObject *__pyx_v_arg = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("should_stop_on_exception (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_frame,&__pyx_n_s_event,&__pyx_n_s_arg,0}; + PyObject* values[3] = {0,0,0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + CYTHON_FALLTHROUGH; + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_frame)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (likely((values[1] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_event)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("should_stop_on_exception", 1, 3, 3, 1); __PYX_ERR(0, 278, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 2: + if (likely((values[2] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_arg)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("should_stop_on_exception", 1, 3, 3, 2); __PYX_ERR(0, 278, __pyx_L3_error) + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "should_stop_on_exception") < 0)) __PYX_ERR(0, 278, __pyx_L3_error) + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 3) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + } + __pyx_v_frame = values[0]; + __pyx_v_event = ((PyObject*)values[1]); + __pyx_v_arg = values[2]; + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("should_stop_on_exception", 1, 3, 3, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 278, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBFrame.should_stop_on_exception", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_event), (&PyString_Type), 1, "event", 1))) __PYX_ERR(0, 278, __pyx_L1_error) + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_10should_stop_on_exception(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *)__pyx_v_self), __pyx_v_frame, __pyx_v_event, __pyx_v_arg); + + /* function exit code */ + goto __pyx_L0; + __pyx_L1_error:; + __pyx_r = NULL; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_10should_stop_on_exception(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *__pyx_v_self, PyObject *__pyx_v_frame, CYTHON_UNUSED PyObject *__pyx_v_event, PyObject *__pyx_v_arg) { + struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_info = 0; + int __pyx_v_flag; + PyObject *__pyx_v_main_debugger = NULL; + PyObject *__pyx_v_exception = NULL; + PyObject *__pyx_v_value = NULL; + PyObject *__pyx_v_trace = NULL; + PyObject *__pyx_v_exception_breakpoint = NULL; + PyObject *__pyx_v_result = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + PyObject *(*__pyx_t_6)(PyObject *); + int __pyx_t_7; + int __pyx_t_8; + PyObject *__pyx_t_9 = NULL; + PyObject *__pyx_t_10 = NULL; + PyObject *__pyx_t_11 = NULL; + PyObject *__pyx_t_12 = NULL; + int __pyx_t_13; + __Pyx_RefNannySetupContext("should_stop_on_exception", 0); + __Pyx_INCREF(__pyx_v_frame); + + /* "_pydevd_bundle/pydevd_cython.pyx":286 + * + * # main_debugger, _filename, info, _thread = self._args + * main_debugger = self._args[0] # <<<<<<<<<<<<<< + * info = self._args[2] + * flag = False + */ + if (unlikely(__pyx_v_self->_args == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(0, 286, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v_self->_args, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 286, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v_main_debugger = __pyx_t_1; + __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":287 + * # main_debugger, _filename, info, _thread = self._args + * main_debugger = self._args[0] + * info = self._args[2] # <<<<<<<<<<<<<< + * flag = False + * + */ + if (unlikely(__pyx_v_self->_args == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(0, 287, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v_self->_args, 2, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 287, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (!(likely(((__pyx_t_1) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_1, __pyx_ptype_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo))))) __PYX_ERR(0, 287, __pyx_L1_error) + __pyx_v_info = ((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_t_1); + __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":288 + * main_debugger = self._args[0] + * info = self._args[2] + * flag = False # <<<<<<<<<<<<<< + * + * # STATE_SUSPEND = 2 + */ + __pyx_v_flag = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":291 + * + * # STATE_SUSPEND = 2 + * if info.pydev_state != 2: #and breakpoint is not None: # <<<<<<<<<<<<<< + * exception, value, trace = arg + * + */ + __pyx_t_2 = ((__pyx_v_info->pydev_state != 2) != 0); + if (__pyx_t_2) { + + /* "_pydevd_bundle/pydevd_cython.pyx":292 + * # STATE_SUSPEND = 2 + * if info.pydev_state != 2: #and breakpoint is not None: + * exception, value, trace = arg # <<<<<<<<<<<<<< + * + * if trace is not None: #on jython trace is None on the first event + */ + if ((likely(PyTuple_CheckExact(__pyx_v_arg))) || (PyList_CheckExact(__pyx_v_arg))) { + PyObject* sequence = __pyx_v_arg; + #if !CYTHON_COMPILING_IN_PYPY + Py_ssize_t size = Py_SIZE(sequence); + #else + Py_ssize_t size = PySequence_Size(sequence); + #endif + if (unlikely(size != 3)) { + if (size > 3) __Pyx_RaiseTooManyValuesError(3); + else if (size >= 0) __Pyx_RaiseNeedMoreValuesError(size); + __PYX_ERR(0, 292, __pyx_L1_error) + } + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + if (likely(PyTuple_CheckExact(sequence))) { + __pyx_t_1 = PyTuple_GET_ITEM(sequence, 0); + __pyx_t_3 = PyTuple_GET_ITEM(sequence, 1); + __pyx_t_4 = PyTuple_GET_ITEM(sequence, 2); + } else { + __pyx_t_1 = PyList_GET_ITEM(sequence, 0); + __pyx_t_3 = PyList_GET_ITEM(sequence, 1); + __pyx_t_4 = PyList_GET_ITEM(sequence, 2); + } + __Pyx_INCREF(__pyx_t_1); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(__pyx_t_4); + #else + __pyx_t_1 = PySequence_ITEM(sequence, 0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 292, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_3 = PySequence_ITEM(sequence, 1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 292, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = PySequence_ITEM(sequence, 2); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 292, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + #endif + } else { + Py_ssize_t index = -1; + __pyx_t_5 = PyObject_GetIter(__pyx_v_arg); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 292, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_6 = Py_TYPE(__pyx_t_5)->tp_iternext; + index = 0; __pyx_t_1 = __pyx_t_6(__pyx_t_5); if (unlikely(!__pyx_t_1)) goto __pyx_L4_unpacking_failed; + __Pyx_GOTREF(__pyx_t_1); + index = 1; __pyx_t_3 = __pyx_t_6(__pyx_t_5); if (unlikely(!__pyx_t_3)) goto __pyx_L4_unpacking_failed; + __Pyx_GOTREF(__pyx_t_3); + index = 2; __pyx_t_4 = __pyx_t_6(__pyx_t_5); if (unlikely(!__pyx_t_4)) goto __pyx_L4_unpacking_failed; + __Pyx_GOTREF(__pyx_t_4); + if (__Pyx_IternextUnpackEndCheck(__pyx_t_6(__pyx_t_5), 3) < 0) __PYX_ERR(0, 292, __pyx_L1_error) + __pyx_t_6 = NULL; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + goto __pyx_L5_unpacking_done; + __pyx_L4_unpacking_failed:; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_6 = NULL; + if (__Pyx_IterFinish() == 0) __Pyx_RaiseNeedMoreValuesError(index); + __PYX_ERR(0, 292, __pyx_L1_error) + __pyx_L5_unpacking_done:; + } + __pyx_v_exception = __pyx_t_1; + __pyx_t_1 = 0; + __pyx_v_value = __pyx_t_3; + __pyx_t_3 = 0; + __pyx_v_trace = __pyx_t_4; + __pyx_t_4 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":294 + * exception, value, trace = arg + * + * if trace is not None: #on jython trace is None on the first event # <<<<<<<<<<<<<< + * exception_breakpoint = get_exception_breakpoint( + * exception, main_debugger.break_on_caught_exceptions) + */ + __pyx_t_2 = (__pyx_v_trace != Py_None); + __pyx_t_7 = (__pyx_t_2 != 0); + if (__pyx_t_7) { + + /* "_pydevd_bundle/pydevd_cython.pyx":295 + * + * if trace is not None: #on jython trace is None on the first event + * exception_breakpoint = get_exception_breakpoint( # <<<<<<<<<<<<<< + * exception, main_debugger.break_on_caught_exceptions) + * + */ + __pyx_t_3 = __Pyx_GetModuleGlobalName(__pyx_n_s_get_exception_breakpoint); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 295, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + + /* "_pydevd_bundle/pydevd_cython.pyx":296 + * if trace is not None: #on jython trace is None on the first event + * exception_breakpoint = get_exception_breakpoint( + * exception, main_debugger.break_on_caught_exceptions) # <<<<<<<<<<<<<< + * + * if exception_breakpoint is not None: + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_break_on_caught_exceptions); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 296, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_5 = NULL; + __pyx_t_8 = 0; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_3))) { + __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_3); + if (likely(__pyx_t_5)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); + __Pyx_INCREF(__pyx_t_5); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_3, function); + __pyx_t_8 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_3)) { + PyObject *__pyx_temp[3] = {__pyx_t_5, __pyx_v_exception, __pyx_t_1}; + __pyx_t_4 = __Pyx_PyFunction_FastCall(__pyx_t_3, __pyx_temp+1-__pyx_t_8, 2+__pyx_t_8); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 295, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_3)) { + PyObject *__pyx_temp[3] = {__pyx_t_5, __pyx_v_exception, __pyx_t_1}; + __pyx_t_4 = __Pyx_PyCFunction_FastCall(__pyx_t_3, __pyx_temp+1-__pyx_t_8, 2+__pyx_t_8); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 295, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + } else + #endif + { + __pyx_t_9 = PyTuple_New(2+__pyx_t_8); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 295, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + if (__pyx_t_5) { + __Pyx_GIVEREF(__pyx_t_5); PyTuple_SET_ITEM(__pyx_t_9, 0, __pyx_t_5); __pyx_t_5 = NULL; + } + __Pyx_INCREF(__pyx_v_exception); + __Pyx_GIVEREF(__pyx_v_exception); + PyTuple_SET_ITEM(__pyx_t_9, 0+__pyx_t_8, __pyx_v_exception); + __Pyx_GIVEREF(__pyx_t_1); + PyTuple_SET_ITEM(__pyx_t_9, 1+__pyx_t_8, __pyx_t_1); + __pyx_t_1 = 0; + __pyx_t_4 = __Pyx_PyObject_Call(__pyx_t_3, __pyx_t_9, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 295, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + } + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_v_exception_breakpoint = __pyx_t_4; + __pyx_t_4 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":298 + * exception, main_debugger.break_on_caught_exceptions) + * + * if exception_breakpoint is not None: # <<<<<<<<<<<<<< + * if exception_breakpoint.ignore_libraries: + * if exception_breakpoint.notify_on_first_raise_only: + */ + __pyx_t_7 = (__pyx_v_exception_breakpoint != Py_None); + __pyx_t_2 = (__pyx_t_7 != 0); + if (__pyx_t_2) { + + /* "_pydevd_bundle/pydevd_cython.pyx":299 + * + * if exception_breakpoint is not None: + * if exception_breakpoint.ignore_libraries: # <<<<<<<<<<<<<< + * if exception_breakpoint.notify_on_first_raise_only: + * if main_debugger.first_appearance_in_scope(trace): + */ + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_exception_breakpoint, __pyx_n_s_ignore_libraries); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 299, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 299, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if (__pyx_t_2) { + + /* "_pydevd_bundle/pydevd_cython.pyx":300 + * if exception_breakpoint is not None: + * if exception_breakpoint.ignore_libraries: + * if exception_breakpoint.notify_on_first_raise_only: # <<<<<<<<<<<<<< + * if main_debugger.first_appearance_in_scope(trace): + * add_exception_to_frame(frame, (exception, value, trace)) + */ + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_exception_breakpoint, __pyx_n_s_notify_on_first_raise_only); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 300, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 300, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if (__pyx_t_2) { + + /* "_pydevd_bundle/pydevd_cython.pyx":301 + * if exception_breakpoint.ignore_libraries: + * if exception_breakpoint.notify_on_first_raise_only: + * if main_debugger.first_appearance_in_scope(trace): # <<<<<<<<<<<<<< + * add_exception_to_frame(frame, (exception, value, trace)) + * try: + */ + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_first_appearance_in_scope); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 301, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_9 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_3))) { + __pyx_t_9 = PyMethod_GET_SELF(__pyx_t_3); + if (likely(__pyx_t_9)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); + __Pyx_INCREF(__pyx_t_9); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_3, function); + } + } + if (!__pyx_t_9) { + __pyx_t_4 = __Pyx_PyObject_CallOneArg(__pyx_t_3, __pyx_v_trace); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 301, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + } else { + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_3)) { + PyObject *__pyx_temp[2] = {__pyx_t_9, __pyx_v_trace}; + __pyx_t_4 = __Pyx_PyFunction_FastCall(__pyx_t_3, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 301, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_GOTREF(__pyx_t_4); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_3)) { + PyObject *__pyx_temp[2] = {__pyx_t_9, __pyx_v_trace}; + __pyx_t_4 = __Pyx_PyCFunction_FastCall(__pyx_t_3, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 301, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_GOTREF(__pyx_t_4); + } else + #endif + { + __pyx_t_1 = PyTuple_New(1+1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 301, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_9); PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_t_9); __pyx_t_9 = NULL; + __Pyx_INCREF(__pyx_v_trace); + __Pyx_GIVEREF(__pyx_v_trace); + PyTuple_SET_ITEM(__pyx_t_1, 0+1, __pyx_v_trace); + __pyx_t_4 = __Pyx_PyObject_Call(__pyx_t_3, __pyx_t_1, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 301, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + } + } + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 301, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if (__pyx_t_2) { + + /* "_pydevd_bundle/pydevd_cython.pyx":302 + * if exception_breakpoint.notify_on_first_raise_only: + * if main_debugger.first_appearance_in_scope(trace): + * add_exception_to_frame(frame, (exception, value, trace)) # <<<<<<<<<<<<<< + * try: + * info.pydev_message = exception_breakpoint.qname + */ + __pyx_t_3 = __Pyx_GetModuleGlobalName(__pyx_n_s_add_exception_to_frame); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 302, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_1 = PyTuple_New(3); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 302, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_v_exception); + __Pyx_GIVEREF(__pyx_v_exception); + PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_v_exception); + __Pyx_INCREF(__pyx_v_value); + __Pyx_GIVEREF(__pyx_v_value); + PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_v_value); + __Pyx_INCREF(__pyx_v_trace); + __Pyx_GIVEREF(__pyx_v_trace); + PyTuple_SET_ITEM(__pyx_t_1, 2, __pyx_v_trace); + __pyx_t_9 = NULL; + __pyx_t_8 = 0; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_3))) { + __pyx_t_9 = PyMethod_GET_SELF(__pyx_t_3); + if (likely(__pyx_t_9)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); + __Pyx_INCREF(__pyx_t_9); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_3, function); + __pyx_t_8 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_3)) { + PyObject *__pyx_temp[3] = {__pyx_t_9, __pyx_v_frame, __pyx_t_1}; + __pyx_t_4 = __Pyx_PyFunction_FastCall(__pyx_t_3, __pyx_temp+1-__pyx_t_8, 2+__pyx_t_8); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 302, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_3)) { + PyObject *__pyx_temp[3] = {__pyx_t_9, __pyx_v_frame, __pyx_t_1}; + __pyx_t_4 = __Pyx_PyCFunction_FastCall(__pyx_t_3, __pyx_temp+1-__pyx_t_8, 2+__pyx_t_8); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 302, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + } else + #endif + { + __pyx_t_5 = PyTuple_New(2+__pyx_t_8); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 302, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + if (__pyx_t_9) { + __Pyx_GIVEREF(__pyx_t_9); PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_t_9); __pyx_t_9 = NULL; + } + __Pyx_INCREF(__pyx_v_frame); + __Pyx_GIVEREF(__pyx_v_frame); + PyTuple_SET_ITEM(__pyx_t_5, 0+__pyx_t_8, __pyx_v_frame); + __Pyx_GIVEREF(__pyx_t_1); + PyTuple_SET_ITEM(__pyx_t_5, 1+__pyx_t_8, __pyx_t_1); + __pyx_t_1 = 0; + __pyx_t_4 = __Pyx_PyObject_Call(__pyx_t_3, __pyx_t_5, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 302, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + } + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":303 + * if main_debugger.first_appearance_in_scope(trace): + * add_exception_to_frame(frame, (exception, value, trace)) + * try: # <<<<<<<<<<<<<< + * info.pydev_message = exception_breakpoint.qname + * except: + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_10, &__pyx_t_11, &__pyx_t_12); + __Pyx_XGOTREF(__pyx_t_10); + __Pyx_XGOTREF(__pyx_t_11); + __Pyx_XGOTREF(__pyx_t_12); + /*try:*/ { + + /* "_pydevd_bundle/pydevd_cython.pyx":304 + * add_exception_to_frame(frame, (exception, value, trace)) + * try: + * info.pydev_message = exception_breakpoint.qname # <<<<<<<<<<<<<< + * except: + * info.pydev_message = exception_breakpoint.qname.encode('utf-8') + */ + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_exception_breakpoint, __pyx_n_s_qname); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 304, __pyx_L11_error) + __Pyx_GOTREF(__pyx_t_4); + if (!(likely(PyString_CheckExact(__pyx_t_4))||((__pyx_t_4) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "str", Py_TYPE(__pyx_t_4)->tp_name), 0))) __PYX_ERR(0, 304, __pyx_L11_error) + __Pyx_GIVEREF(__pyx_t_4); + __Pyx_GOTREF(__pyx_v_info->pydev_message); + __Pyx_DECREF(__pyx_v_info->pydev_message); + __pyx_v_info->pydev_message = ((PyObject*)__pyx_t_4); + __pyx_t_4 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":303 + * if main_debugger.first_appearance_in_scope(trace): + * add_exception_to_frame(frame, (exception, value, trace)) + * try: # <<<<<<<<<<<<<< + * info.pydev_message = exception_breakpoint.qname + * except: + */ + } + __Pyx_XDECREF(__pyx_t_10); __pyx_t_10 = 0; + __Pyx_XDECREF(__pyx_t_11); __pyx_t_11 = 0; + __Pyx_XDECREF(__pyx_t_12); __pyx_t_12 = 0; + goto __pyx_L16_try_end; + __pyx_L11_error:; + __Pyx_PyThreadState_assign + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":305 + * try: + * info.pydev_message = exception_breakpoint.qname + * except: # <<<<<<<<<<<<<< + * info.pydev_message = exception_breakpoint.qname.encode('utf-8') + * flag = True + */ + /*except:*/ { + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBFrame.should_stop_on_exception", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_4, &__pyx_t_3, &__pyx_t_5) < 0) __PYX_ERR(0, 305, __pyx_L13_except_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_GOTREF(__pyx_t_3); + __Pyx_GOTREF(__pyx_t_5); + + /* "_pydevd_bundle/pydevd_cython.pyx":306 + * info.pydev_message = exception_breakpoint.qname + * except: + * info.pydev_message = exception_breakpoint.qname.encode('utf-8') # <<<<<<<<<<<<<< + * flag = True + * else: + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_exception_breakpoint, __pyx_n_s_qname); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 306, __pyx_L13_except_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_9 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_encode); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 306, __pyx_L13_except_error) + __Pyx_GOTREF(__pyx_t_9); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_9, __pyx_tuple__3, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 306, __pyx_L13_except_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + if (!(likely(PyString_CheckExact(__pyx_t_1))||((__pyx_t_1) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "str", Py_TYPE(__pyx_t_1)->tp_name), 0))) __PYX_ERR(0, 306, __pyx_L13_except_error) + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v_info->pydev_message); + __Pyx_DECREF(__pyx_v_info->pydev_message); + __pyx_v_info->pydev_message = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + goto __pyx_L12_exception_handled; + } + __pyx_L13_except_error:; + + /* "_pydevd_bundle/pydevd_cython.pyx":303 + * if main_debugger.first_appearance_in_scope(trace): + * add_exception_to_frame(frame, (exception, value, trace)) + * try: # <<<<<<<<<<<<<< + * info.pydev_message = exception_breakpoint.qname + * except: + */ + __Pyx_PyThreadState_assign + __Pyx_XGIVEREF(__pyx_t_10); + __Pyx_XGIVEREF(__pyx_t_11); + __Pyx_XGIVEREF(__pyx_t_12); + __Pyx_ExceptionReset(__pyx_t_10, __pyx_t_11, __pyx_t_12); + goto __pyx_L1_error; + __pyx_L12_exception_handled:; + __Pyx_PyThreadState_assign + __Pyx_XGIVEREF(__pyx_t_10); + __Pyx_XGIVEREF(__pyx_t_11); + __Pyx_XGIVEREF(__pyx_t_12); + __Pyx_ExceptionReset(__pyx_t_10, __pyx_t_11, __pyx_t_12); + __pyx_L16_try_end:; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":307 + * except: + * info.pydev_message = exception_breakpoint.qname.encode('utf-8') + * flag = True # <<<<<<<<<<<<<< + * else: + * pydev_log.debug("Ignore exception %s in library %s" % (exception, frame.f_code.co_filename)) + */ + __pyx_v_flag = 1; + + /* "_pydevd_bundle/pydevd_cython.pyx":301 + * if exception_breakpoint.ignore_libraries: + * if exception_breakpoint.notify_on_first_raise_only: + * if main_debugger.first_appearance_in_scope(trace): # <<<<<<<<<<<<<< + * add_exception_to_frame(frame, (exception, value, trace)) + * try: + */ + goto __pyx_L10; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":309 + * flag = True + * else: + * pydev_log.debug("Ignore exception %s in library %s" % (exception, frame.f_code.co_filename)) # <<<<<<<<<<<<<< + * flag = False + * else: + */ + /*else*/ { + __pyx_t_3 = __Pyx_GetModuleGlobalName(__pyx_n_s_pydev_log); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 309, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_debug); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 309, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_code); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 309, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_co_filename); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 309, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_3 = PyTuple_New(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 309, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_INCREF(__pyx_v_exception); + __Pyx_GIVEREF(__pyx_v_exception); + PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_v_exception); + __Pyx_GIVEREF(__pyx_t_1); + PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_t_1); + __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_PyString_Format(__pyx_kp_s_Ignore_exception_s_in_library_s, __pyx_t_3); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 309, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_3 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_4))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_4); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_4, function); + } + } + if (!__pyx_t_3) { + __pyx_t_5 = __Pyx_PyObject_CallOneArg(__pyx_t_4, __pyx_t_1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 309, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_GOTREF(__pyx_t_5); + } else { + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_4)) { + PyObject *__pyx_temp[2] = {__pyx_t_3, __pyx_t_1}; + __pyx_t_5 = __Pyx_PyFunction_FastCall(__pyx_t_4, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 309, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_4)) { + PyObject *__pyx_temp[2] = {__pyx_t_3, __pyx_t_1}; + __pyx_t_5 = __Pyx_PyCFunction_FastCall(__pyx_t_4, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 309, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + } else + #endif + { + __pyx_t_9 = PyTuple_New(1+1); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 309, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + __Pyx_GIVEREF(__pyx_t_3); PyTuple_SET_ITEM(__pyx_t_9, 0, __pyx_t_3); __pyx_t_3 = NULL; + __Pyx_GIVEREF(__pyx_t_1); + PyTuple_SET_ITEM(__pyx_t_9, 0+1, __pyx_t_1); + __pyx_t_1 = 0; + __pyx_t_5 = __Pyx_PyObject_Call(__pyx_t_4, __pyx_t_9, NULL); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 309, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + } + } + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":310 + * else: + * pydev_log.debug("Ignore exception %s in library %s" % (exception, frame.f_code.co_filename)) + * flag = False # <<<<<<<<<<<<<< + * else: + * if not exception_breakpoint.notify_on_first_raise_only or just_raised(trace): + */ + __pyx_v_flag = 0; + } + __pyx_L10:; + + /* "_pydevd_bundle/pydevd_cython.pyx":300 + * if exception_breakpoint is not None: + * if exception_breakpoint.ignore_libraries: + * if exception_breakpoint.notify_on_first_raise_only: # <<<<<<<<<<<<<< + * if main_debugger.first_appearance_in_scope(trace): + * add_exception_to_frame(frame, (exception, value, trace)) + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":299 + * + * if exception_breakpoint is not None: + * if exception_breakpoint.ignore_libraries: # <<<<<<<<<<<<<< + * if exception_breakpoint.notify_on_first_raise_only: + * if main_debugger.first_appearance_in_scope(trace): + */ + goto __pyx_L8; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":312 + * flag = False + * else: + * if not exception_breakpoint.notify_on_first_raise_only or just_raised(trace): # <<<<<<<<<<<<<< + * add_exception_to_frame(frame, (exception, value, trace)) + * try: + */ + /*else*/ { + __pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_v_exception_breakpoint, __pyx_n_s_notify_on_first_raise_only); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 312, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_5); if (unlikely(__pyx_t_7 < 0)) __PYX_ERR(0, 312, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_13 = ((!__pyx_t_7) != 0); + if (!__pyx_t_13) { + } else { + __pyx_t_2 = __pyx_t_13; + goto __pyx_L20_bool_binop_done; + } + __pyx_t_4 = __Pyx_GetModuleGlobalName(__pyx_n_s_just_raised); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 312, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_9 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_4))) { + __pyx_t_9 = PyMethod_GET_SELF(__pyx_t_4); + if (likely(__pyx_t_9)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); + __Pyx_INCREF(__pyx_t_9); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_4, function); + } + } + if (!__pyx_t_9) { + __pyx_t_5 = __Pyx_PyObject_CallOneArg(__pyx_t_4, __pyx_v_trace); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 312, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + } else { + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_4)) { + PyObject *__pyx_temp[2] = {__pyx_t_9, __pyx_v_trace}; + __pyx_t_5 = __Pyx_PyFunction_FastCall(__pyx_t_4, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 312, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_GOTREF(__pyx_t_5); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_4)) { + PyObject *__pyx_temp[2] = {__pyx_t_9, __pyx_v_trace}; + __pyx_t_5 = __Pyx_PyCFunction_FastCall(__pyx_t_4, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 312, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_GOTREF(__pyx_t_5); + } else + #endif + { + __pyx_t_1 = PyTuple_New(1+1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 312, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_9); PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_t_9); __pyx_t_9 = NULL; + __Pyx_INCREF(__pyx_v_trace); + __Pyx_GIVEREF(__pyx_v_trace); + PyTuple_SET_ITEM(__pyx_t_1, 0+1, __pyx_v_trace); + __pyx_t_5 = __Pyx_PyObject_Call(__pyx_t_4, __pyx_t_1, NULL); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 312, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + } + } + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_13 = __Pyx_PyObject_IsTrue(__pyx_t_5); if (unlikely(__pyx_t_13 < 0)) __PYX_ERR(0, 312, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_2 = __pyx_t_13; + __pyx_L20_bool_binop_done:; + if (__pyx_t_2) { + + /* "_pydevd_bundle/pydevd_cython.pyx":313 + * else: + * if not exception_breakpoint.notify_on_first_raise_only or just_raised(trace): + * add_exception_to_frame(frame, (exception, value, trace)) # <<<<<<<<<<<<<< + * try: + * info.pydev_message = exception_breakpoint.qname + */ + __pyx_t_4 = __Pyx_GetModuleGlobalName(__pyx_n_s_add_exception_to_frame); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 313, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_1 = PyTuple_New(3); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 313, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_v_exception); + __Pyx_GIVEREF(__pyx_v_exception); + PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_v_exception); + __Pyx_INCREF(__pyx_v_value); + __Pyx_GIVEREF(__pyx_v_value); + PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_v_value); + __Pyx_INCREF(__pyx_v_trace); + __Pyx_GIVEREF(__pyx_v_trace); + PyTuple_SET_ITEM(__pyx_t_1, 2, __pyx_v_trace); + __pyx_t_9 = NULL; + __pyx_t_8 = 0; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_4))) { + __pyx_t_9 = PyMethod_GET_SELF(__pyx_t_4); + if (likely(__pyx_t_9)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); + __Pyx_INCREF(__pyx_t_9); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_4, function); + __pyx_t_8 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_4)) { + PyObject *__pyx_temp[3] = {__pyx_t_9, __pyx_v_frame, __pyx_t_1}; + __pyx_t_5 = __Pyx_PyFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_8, 2+__pyx_t_8); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 313, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_4)) { + PyObject *__pyx_temp[3] = {__pyx_t_9, __pyx_v_frame, __pyx_t_1}; + __pyx_t_5 = __Pyx_PyCFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_8, 2+__pyx_t_8); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 313, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + } else + #endif + { + __pyx_t_3 = PyTuple_New(2+__pyx_t_8); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 313, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (__pyx_t_9) { + __Pyx_GIVEREF(__pyx_t_9); PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_t_9); __pyx_t_9 = NULL; + } + __Pyx_INCREF(__pyx_v_frame); + __Pyx_GIVEREF(__pyx_v_frame); + PyTuple_SET_ITEM(__pyx_t_3, 0+__pyx_t_8, __pyx_v_frame); + __Pyx_GIVEREF(__pyx_t_1); + PyTuple_SET_ITEM(__pyx_t_3, 1+__pyx_t_8, __pyx_t_1); + __pyx_t_1 = 0; + __pyx_t_5 = __Pyx_PyObject_Call(__pyx_t_4, __pyx_t_3, NULL); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 313, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + } + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":314 + * if not exception_breakpoint.notify_on_first_raise_only or just_raised(trace): + * add_exception_to_frame(frame, (exception, value, trace)) + * try: # <<<<<<<<<<<<<< + * info.pydev_message = exception_breakpoint.qname + * except: + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_12, &__pyx_t_11, &__pyx_t_10); + __Pyx_XGOTREF(__pyx_t_12); + __Pyx_XGOTREF(__pyx_t_11); + __Pyx_XGOTREF(__pyx_t_10); + /*try:*/ { + + /* "_pydevd_bundle/pydevd_cython.pyx":315 + * add_exception_to_frame(frame, (exception, value, trace)) + * try: + * info.pydev_message = exception_breakpoint.qname # <<<<<<<<<<<<<< + * except: + * info.pydev_message = exception_breakpoint.qname.encode('utf-8') + */ + __pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_v_exception_breakpoint, __pyx_n_s_qname); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 315, __pyx_L22_error) + __Pyx_GOTREF(__pyx_t_5); + if (!(likely(PyString_CheckExact(__pyx_t_5))||((__pyx_t_5) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "str", Py_TYPE(__pyx_t_5)->tp_name), 0))) __PYX_ERR(0, 315, __pyx_L22_error) + __Pyx_GIVEREF(__pyx_t_5); + __Pyx_GOTREF(__pyx_v_info->pydev_message); + __Pyx_DECREF(__pyx_v_info->pydev_message); + __pyx_v_info->pydev_message = ((PyObject*)__pyx_t_5); + __pyx_t_5 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":314 + * if not exception_breakpoint.notify_on_first_raise_only or just_raised(trace): + * add_exception_to_frame(frame, (exception, value, trace)) + * try: # <<<<<<<<<<<<<< + * info.pydev_message = exception_breakpoint.qname + * except: + */ + } + __Pyx_XDECREF(__pyx_t_12); __pyx_t_12 = 0; + __Pyx_XDECREF(__pyx_t_11); __pyx_t_11 = 0; + __Pyx_XDECREF(__pyx_t_10); __pyx_t_10 = 0; + goto __pyx_L27_try_end; + __pyx_L22_error:; + __Pyx_PyThreadState_assign + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":316 + * try: + * info.pydev_message = exception_breakpoint.qname + * except: # <<<<<<<<<<<<<< + * info.pydev_message = exception_breakpoint.qname.encode('utf-8') + * flag = True + */ + /*except:*/ { + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBFrame.should_stop_on_exception", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_5, &__pyx_t_4, &__pyx_t_3) < 0) __PYX_ERR(0, 316, __pyx_L24_except_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_GOTREF(__pyx_t_4); + __Pyx_GOTREF(__pyx_t_3); + + /* "_pydevd_bundle/pydevd_cython.pyx":317 + * info.pydev_message = exception_breakpoint.qname + * except: + * info.pydev_message = exception_breakpoint.qname.encode('utf-8') # <<<<<<<<<<<<<< + * flag = True + * else: + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_exception_breakpoint, __pyx_n_s_qname); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 317, __pyx_L24_except_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_9 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_encode); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 317, __pyx_L24_except_error) + __Pyx_GOTREF(__pyx_t_9); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_9, __pyx_tuple__4, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 317, __pyx_L24_except_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + if (!(likely(PyString_CheckExact(__pyx_t_1))||((__pyx_t_1) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "str", Py_TYPE(__pyx_t_1)->tp_name), 0))) __PYX_ERR(0, 317, __pyx_L24_except_error) + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v_info->pydev_message); + __Pyx_DECREF(__pyx_v_info->pydev_message); + __pyx_v_info->pydev_message = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + goto __pyx_L23_exception_handled; + } + __pyx_L24_except_error:; + + /* "_pydevd_bundle/pydevd_cython.pyx":314 + * if not exception_breakpoint.notify_on_first_raise_only or just_raised(trace): + * add_exception_to_frame(frame, (exception, value, trace)) + * try: # <<<<<<<<<<<<<< + * info.pydev_message = exception_breakpoint.qname + * except: + */ + __Pyx_PyThreadState_assign + __Pyx_XGIVEREF(__pyx_t_12); + __Pyx_XGIVEREF(__pyx_t_11); + __Pyx_XGIVEREF(__pyx_t_10); + __Pyx_ExceptionReset(__pyx_t_12, __pyx_t_11, __pyx_t_10); + goto __pyx_L1_error; + __pyx_L23_exception_handled:; + __Pyx_PyThreadState_assign + __Pyx_XGIVEREF(__pyx_t_12); + __Pyx_XGIVEREF(__pyx_t_11); + __Pyx_XGIVEREF(__pyx_t_10); + __Pyx_ExceptionReset(__pyx_t_12, __pyx_t_11, __pyx_t_10); + __pyx_L27_try_end:; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":318 + * except: + * info.pydev_message = exception_breakpoint.qname.encode('utf-8') + * flag = True # <<<<<<<<<<<<<< + * else: + * flag = False + */ + __pyx_v_flag = 1; + + /* "_pydevd_bundle/pydevd_cython.pyx":312 + * flag = False + * else: + * if not exception_breakpoint.notify_on_first_raise_only or just_raised(trace): # <<<<<<<<<<<<<< + * add_exception_to_frame(frame, (exception, value, trace)) + * try: + */ + goto __pyx_L19; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":320 + * flag = True + * else: + * flag = False # <<<<<<<<<<<<<< + * else: + * try: + */ + /*else*/ { + __pyx_v_flag = 0; + } + __pyx_L19:; + } + __pyx_L8:; + + /* "_pydevd_bundle/pydevd_cython.pyx":298 + * exception, main_debugger.break_on_caught_exceptions) + * + * if exception_breakpoint is not None: # <<<<<<<<<<<<<< + * if exception_breakpoint.ignore_libraries: + * if exception_breakpoint.notify_on_first_raise_only: + */ + goto __pyx_L7; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":322 + * flag = False + * else: + * try: # <<<<<<<<<<<<<< + * if main_debugger.plugin is not None: + * result = main_debugger.plugin.exception_break(main_debugger, self, frame, self._args, arg) + */ + /*else*/ { + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_10, &__pyx_t_11, &__pyx_t_12); + __Pyx_XGOTREF(__pyx_t_10); + __Pyx_XGOTREF(__pyx_t_11); + __Pyx_XGOTREF(__pyx_t_12); + /*try:*/ { + + /* "_pydevd_bundle/pydevd_cython.pyx":323 + * else: + * try: + * if main_debugger.plugin is not None: # <<<<<<<<<<<<<< + * result = main_debugger.plugin.exception_break(main_debugger, self, frame, self._args, arg) + * if result: + */ + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_plugin); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 323, __pyx_L30_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_2 = (__pyx_t_3 != Py_None); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_13 = (__pyx_t_2 != 0); + if (__pyx_t_13) { + + /* "_pydevd_bundle/pydevd_cython.pyx":324 + * try: + * if main_debugger.plugin is not None: + * result = main_debugger.plugin.exception_break(main_debugger, self, frame, self._args, arg) # <<<<<<<<<<<<<< + * if result: + * flag, frame = result + */ + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_plugin); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 324, __pyx_L30_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_t_4, __pyx_n_s_exception_break); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 324, __pyx_L30_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_4 = NULL; + __pyx_t_8 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_5))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_5); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_5); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_5, function); + __pyx_t_8 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_5)) { + PyObject *__pyx_temp[6] = {__pyx_t_4, __pyx_v_main_debugger, ((PyObject *)__pyx_v_self), __pyx_v_frame, __pyx_v_self->_args, __pyx_v_arg}; + __pyx_t_3 = __Pyx_PyFunction_FastCall(__pyx_t_5, __pyx_temp+1-__pyx_t_8, 5+__pyx_t_8); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 324, __pyx_L30_error) + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_GOTREF(__pyx_t_3); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_5)) { + PyObject *__pyx_temp[6] = {__pyx_t_4, __pyx_v_main_debugger, ((PyObject *)__pyx_v_self), __pyx_v_frame, __pyx_v_self->_args, __pyx_v_arg}; + __pyx_t_3 = __Pyx_PyCFunction_FastCall(__pyx_t_5, __pyx_temp+1-__pyx_t_8, 5+__pyx_t_8); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 324, __pyx_L30_error) + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_GOTREF(__pyx_t_3); + } else + #endif + { + __pyx_t_1 = PyTuple_New(5+__pyx_t_8); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 324, __pyx_L30_error) + __Pyx_GOTREF(__pyx_t_1); + if (__pyx_t_4) { + __Pyx_GIVEREF(__pyx_t_4); PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_t_4); __pyx_t_4 = NULL; + } + __Pyx_INCREF(__pyx_v_main_debugger); + __Pyx_GIVEREF(__pyx_v_main_debugger); + PyTuple_SET_ITEM(__pyx_t_1, 0+__pyx_t_8, __pyx_v_main_debugger); + __Pyx_INCREF(((PyObject *)__pyx_v_self)); + __Pyx_GIVEREF(((PyObject *)__pyx_v_self)); + PyTuple_SET_ITEM(__pyx_t_1, 1+__pyx_t_8, ((PyObject *)__pyx_v_self)); + __Pyx_INCREF(__pyx_v_frame); + __Pyx_GIVEREF(__pyx_v_frame); + PyTuple_SET_ITEM(__pyx_t_1, 2+__pyx_t_8, __pyx_v_frame); + __Pyx_INCREF(__pyx_v_self->_args); + __Pyx_GIVEREF(__pyx_v_self->_args); + PyTuple_SET_ITEM(__pyx_t_1, 3+__pyx_t_8, __pyx_v_self->_args); + __Pyx_INCREF(__pyx_v_arg); + __Pyx_GIVEREF(__pyx_v_arg); + PyTuple_SET_ITEM(__pyx_t_1, 4+__pyx_t_8, __pyx_v_arg); + __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_5, __pyx_t_1, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 324, __pyx_L30_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + } + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_v_result = __pyx_t_3; + __pyx_t_3 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":325 + * if main_debugger.plugin is not None: + * result = main_debugger.plugin.exception_break(main_debugger, self, frame, self._args, arg) + * if result: # <<<<<<<<<<<<<< + * flag, frame = result + * except: + */ + __pyx_t_13 = __Pyx_PyObject_IsTrue(__pyx_v_result); if (unlikely(__pyx_t_13 < 0)) __PYX_ERR(0, 325, __pyx_L30_error) + if (__pyx_t_13) { + + /* "_pydevd_bundle/pydevd_cython.pyx":326 + * result = main_debugger.plugin.exception_break(main_debugger, self, frame, self._args, arg) + * if result: + * flag, frame = result # <<<<<<<<<<<<<< + * except: + * flag = False + */ + if ((likely(PyTuple_CheckExact(__pyx_v_result))) || (PyList_CheckExact(__pyx_v_result))) { + PyObject* sequence = __pyx_v_result; + #if !CYTHON_COMPILING_IN_PYPY + Py_ssize_t size = Py_SIZE(sequence); + #else + Py_ssize_t size = PySequence_Size(sequence); + #endif + if (unlikely(size != 2)) { + if (size > 2) __Pyx_RaiseTooManyValuesError(2); + else if (size >= 0) __Pyx_RaiseNeedMoreValuesError(size); + __PYX_ERR(0, 326, __pyx_L30_error) + } + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + if (likely(PyTuple_CheckExact(sequence))) { + __pyx_t_3 = PyTuple_GET_ITEM(sequence, 0); + __pyx_t_5 = PyTuple_GET_ITEM(sequence, 1); + } else { + __pyx_t_3 = PyList_GET_ITEM(sequence, 0); + __pyx_t_5 = PyList_GET_ITEM(sequence, 1); + } + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(__pyx_t_5); + #else + __pyx_t_3 = PySequence_ITEM(sequence, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 326, __pyx_L30_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_5 = PySequence_ITEM(sequence, 1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 326, __pyx_L30_error) + __Pyx_GOTREF(__pyx_t_5); + #endif + } else { + Py_ssize_t index = -1; + __pyx_t_1 = PyObject_GetIter(__pyx_v_result); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 326, __pyx_L30_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_6 = Py_TYPE(__pyx_t_1)->tp_iternext; + index = 0; __pyx_t_3 = __pyx_t_6(__pyx_t_1); if (unlikely(!__pyx_t_3)) goto __pyx_L38_unpacking_failed; + __Pyx_GOTREF(__pyx_t_3); + index = 1; __pyx_t_5 = __pyx_t_6(__pyx_t_1); if (unlikely(!__pyx_t_5)) goto __pyx_L38_unpacking_failed; + __Pyx_GOTREF(__pyx_t_5); + if (__Pyx_IternextUnpackEndCheck(__pyx_t_6(__pyx_t_1), 2) < 0) __PYX_ERR(0, 326, __pyx_L30_error) + __pyx_t_6 = NULL; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + goto __pyx_L39_unpacking_done; + __pyx_L38_unpacking_failed:; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_6 = NULL; + if (__Pyx_IterFinish() == 0) __Pyx_RaiseNeedMoreValuesError(index); + __PYX_ERR(0, 326, __pyx_L30_error) + __pyx_L39_unpacking_done:; + } + __pyx_t_13 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely((__pyx_t_13 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 326, __pyx_L30_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_v_flag = __pyx_t_13; + __Pyx_DECREF_SET(__pyx_v_frame, __pyx_t_5); + __pyx_t_5 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":325 + * if main_debugger.plugin is not None: + * result = main_debugger.plugin.exception_break(main_debugger, self, frame, self._args, arg) + * if result: # <<<<<<<<<<<<<< + * flag, frame = result + * except: + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":323 + * else: + * try: + * if main_debugger.plugin is not None: # <<<<<<<<<<<<<< + * result = main_debugger.plugin.exception_break(main_debugger, self, frame, self._args, arg) + * if result: + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":322 + * flag = False + * else: + * try: # <<<<<<<<<<<<<< + * if main_debugger.plugin is not None: + * result = main_debugger.plugin.exception_break(main_debugger, self, frame, self._args, arg) + */ + } + __Pyx_XDECREF(__pyx_t_10); __pyx_t_10 = 0; + __Pyx_XDECREF(__pyx_t_11); __pyx_t_11 = 0; + __Pyx_XDECREF(__pyx_t_12); __pyx_t_12 = 0; + goto __pyx_L35_try_end; + __pyx_L30_error:; + __Pyx_PyThreadState_assign + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":327 + * if result: + * flag, frame = result + * except: # <<<<<<<<<<<<<< + * flag = False + * + */ + /*except:*/ { + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBFrame.should_stop_on_exception", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_5, &__pyx_t_3, &__pyx_t_1) < 0) __PYX_ERR(0, 327, __pyx_L32_except_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_GOTREF(__pyx_t_3); + __Pyx_GOTREF(__pyx_t_1); + + /* "_pydevd_bundle/pydevd_cython.pyx":328 + * flag, frame = result + * except: + * flag = False # <<<<<<<<<<<<<< + * + * return flag, frame + */ + __pyx_v_flag = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + goto __pyx_L31_exception_handled; + } + __pyx_L32_except_error:; + + /* "_pydevd_bundle/pydevd_cython.pyx":322 + * flag = False + * else: + * try: # <<<<<<<<<<<<<< + * if main_debugger.plugin is not None: + * result = main_debugger.plugin.exception_break(main_debugger, self, frame, self._args, arg) + */ + __Pyx_PyThreadState_assign + __Pyx_XGIVEREF(__pyx_t_10); + __Pyx_XGIVEREF(__pyx_t_11); + __Pyx_XGIVEREF(__pyx_t_12); + __Pyx_ExceptionReset(__pyx_t_10, __pyx_t_11, __pyx_t_12); + goto __pyx_L1_error; + __pyx_L31_exception_handled:; + __Pyx_PyThreadState_assign + __Pyx_XGIVEREF(__pyx_t_10); + __Pyx_XGIVEREF(__pyx_t_11); + __Pyx_XGIVEREF(__pyx_t_12); + __Pyx_ExceptionReset(__pyx_t_10, __pyx_t_11, __pyx_t_12); + __pyx_L35_try_end:; + } + } + __pyx_L7:; + + /* "_pydevd_bundle/pydevd_cython.pyx":294 + * exception, value, trace = arg + * + * if trace is not None: #on jython trace is None on the first event # <<<<<<<<<<<<<< + * exception_breakpoint = get_exception_breakpoint( + * exception, main_debugger.break_on_caught_exceptions) + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":291 + * + * # STATE_SUSPEND = 2 + * if info.pydev_state != 2: #and breakpoint is not None: # <<<<<<<<<<<<<< + * exception, value, trace = arg + * + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":330 + * flag = False + * + * return flag, frame # <<<<<<<<<<<<<< + * + * def handle_exception(self, frame, event, arg): + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __Pyx_PyBool_FromLong(__pyx_v_flag); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 330, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_3 = PyTuple_New(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 330, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_GIVEREF(__pyx_t_1); + PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_t_1); + __Pyx_INCREF(__pyx_v_frame); + __Pyx_GIVEREF(__pyx_v_frame); + PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_v_frame); + __pyx_t_1 = 0; + __pyx_r = __pyx_t_3; + __pyx_t_3 = 0; + goto __pyx_L0; + + /* "_pydevd_bundle/pydevd_cython.pyx":278 + * + * # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + * def should_stop_on_exception(self, frame, str event, arg): # <<<<<<<<<<<<<< + * cdef PyDBAdditionalThreadInfo info; + * cdef bint flag; + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_9); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBFrame.should_stop_on_exception", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF((PyObject *)__pyx_v_info); + __Pyx_XDECREF(__pyx_v_main_debugger); + __Pyx_XDECREF(__pyx_v_exception); + __Pyx_XDECREF(__pyx_v_value); + __Pyx_XDECREF(__pyx_v_trace); + __Pyx_XDECREF(__pyx_v_exception_breakpoint); + __Pyx_XDECREF(__pyx_v_result); + __Pyx_XDECREF(__pyx_v_frame); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pyx":332 + * return flag, frame + * + * def handle_exception(self, frame, event, arg): # <<<<<<<<<<<<<< + * try: + * # print 'handle_exception', frame.f_lineno, frame.f_code.co_name + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_13handle_exception(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_13handle_exception(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_frame = 0; + PyObject *__pyx_v_event = 0; + PyObject *__pyx_v_arg = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("handle_exception (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_frame,&__pyx_n_s_event,&__pyx_n_s_arg,0}; + PyObject* values[3] = {0,0,0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + CYTHON_FALLTHROUGH; + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_frame)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (likely((values[1] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_event)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("handle_exception", 1, 3, 3, 1); __PYX_ERR(0, 332, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 2: + if (likely((values[2] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_arg)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("handle_exception", 1, 3, 3, 2); __PYX_ERR(0, 332, __pyx_L3_error) + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "handle_exception") < 0)) __PYX_ERR(0, 332, __pyx_L3_error) + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 3) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + } + __pyx_v_frame = values[0]; + __pyx_v_event = values[1]; + __pyx_v_arg = values[2]; + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("handle_exception", 1, 3, 3, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 332, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBFrame.handle_exception", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_12handle_exception(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *)__pyx_v_self), __pyx_v_frame, __pyx_v_event, __pyx_v_arg); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_12handle_exception(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *__pyx_v_self, PyObject *__pyx_v_frame, PyObject *__pyx_v_event, PyObject *__pyx_v_arg) { + PyObject *__pyx_v_trace_obj = NULL; + PyObject *__pyx_v_main_debugger = NULL; + PyObject *__pyx_v_initial_trace_obj = NULL; + PyObject *__pyx_v_check_trace_obj = NULL; + PyObject *__pyx_v_filename = NULL; + PyObject *__pyx_v_filename_to_lines_where_exceptions_are_ignored = NULL; + PyObject *__pyx_v_lines_ignored = NULL; + PyObject *__pyx_v_curr_stat = NULL; + PyObject *__pyx_v_last_stat = NULL; + PyObject *__pyx_v_from_user_input = NULL; + PyObject *__pyx_v_merged = NULL; + PyObject *__pyx_v_exc_lineno = NULL; + PyObject *__pyx_v_line = NULL; + PyObject *__pyx_v_thread = NULL; + PyObject *__pyx_v_frame_id_to_frame = NULL; + PyObject *__pyx_v_f = NULL; + PyObject *__pyx_v_thread_id = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + int __pyx_t_3; + int __pyx_t_4; + PyObject *__pyx_t_5 = NULL; + Py_ssize_t __pyx_t_6; + PyObject *__pyx_t_7 = NULL; + PyObject *__pyx_t_8 = NULL; + PyObject *__pyx_t_9 = NULL; + PyObject *__pyx_t_10 = NULL; + PyObject *__pyx_t_11 = NULL; + PyObject *__pyx_t_12 = NULL; + PyObject *__pyx_t_13 = NULL; + PyObject *__pyx_t_14 = NULL; + int __pyx_t_15; + PyObject *__pyx_t_16 = NULL; + int __pyx_t_17; + char const *__pyx_t_18; + PyObject *__pyx_t_19 = NULL; + PyObject *__pyx_t_20 = NULL; + PyObject *__pyx_t_21 = NULL; + PyObject *__pyx_t_22 = NULL; + PyObject *__pyx_t_23 = NULL; + PyObject *__pyx_t_24 = NULL; + char const *__pyx_t_25; + __Pyx_RefNannySetupContext("handle_exception", 0); + + /* "_pydevd_bundle/pydevd_cython.pyx":333 + * + * def handle_exception(self, frame, event, arg): + * try: # <<<<<<<<<<<<<< + * # print 'handle_exception', frame.f_lineno, frame.f_code.co_name + * + */ + /*try:*/ { + + /* "_pydevd_bundle/pydevd_cython.pyx":337 + * + * # We have 3 things in arg: exception type, description, traceback object + * trace_obj = arg[2] # <<<<<<<<<<<<<< + * main_debugger = self._args[0] + * + */ + __pyx_t_1 = __Pyx_GetItemInt(__pyx_v_arg, 2, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 337, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v_trace_obj = __pyx_t_1; + __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":338 + * # We have 3 things in arg: exception type, description, traceback object + * trace_obj = arg[2] + * main_debugger = self._args[0] # <<<<<<<<<<<<<< + * + * if not hasattr(trace_obj, 'tb_next'): + */ + if (unlikely(__pyx_v_self->_args == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(0, 338, __pyx_L4_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v_self->_args, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 338, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v_main_debugger = __pyx_t_1; + __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":340 + * main_debugger = self._args[0] + * + * if not hasattr(trace_obj, 'tb_next'): # <<<<<<<<<<<<<< + * return #Not always there on Jython... + * + */ + __pyx_t_2 = __Pyx_HasAttr(__pyx_v_trace_obj, __pyx_n_s_tb_next); if (unlikely(__pyx_t_2 == -1)) __PYX_ERR(0, 340, __pyx_L4_error) + __pyx_t_3 = ((!(__pyx_t_2 != 0)) != 0); + if (__pyx_t_3) { + + /* "_pydevd_bundle/pydevd_cython.pyx":341 + * + * if not hasattr(trace_obj, 'tb_next'): + * return #Not always there on Jython... # <<<<<<<<<<<<<< + * + * initial_trace_obj = trace_obj + */ + __Pyx_XDECREF(__pyx_r); + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L3_return; + + /* "_pydevd_bundle/pydevd_cython.pyx":340 + * main_debugger = self._args[0] + * + * if not hasattr(trace_obj, 'tb_next'): # <<<<<<<<<<<<<< + * return #Not always there on Jython... + * + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":343 + * return #Not always there on Jython... + * + * initial_trace_obj = trace_obj # <<<<<<<<<<<<<< + * if trace_obj.tb_next is None and trace_obj.tb_frame is frame: + * #I.e.: tb_next should be only None in the context it was thrown (trace_obj.tb_frame is frame is just a double check). + */ + __Pyx_INCREF(__pyx_v_trace_obj); + __pyx_v_initial_trace_obj = __pyx_v_trace_obj; + + /* "_pydevd_bundle/pydevd_cython.pyx":344 + * + * initial_trace_obj = trace_obj + * if trace_obj.tb_next is None and trace_obj.tb_frame is frame: # <<<<<<<<<<<<<< + * #I.e.: tb_next should be only None in the context it was thrown (trace_obj.tb_frame is frame is just a double check). + * + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_trace_obj, __pyx_n_s_tb_next); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 344, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = (__pyx_t_1 == Py_None); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_4 = (__pyx_t_2 != 0); + if (__pyx_t_4) { + } else { + __pyx_t_3 = __pyx_t_4; + goto __pyx_L8_bool_binop_done; + } + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_trace_obj, __pyx_n_s_tb_frame); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 344, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_4 = (__pyx_t_1 == __pyx_v_frame); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_2 = (__pyx_t_4 != 0); + __pyx_t_3 = __pyx_t_2; + __pyx_L8_bool_binop_done:; + if (__pyx_t_3) { + + /* "_pydevd_bundle/pydevd_cython.pyx":347 + * #I.e.: tb_next should be only None in the context it was thrown (trace_obj.tb_frame is frame is just a double check). + * + * if main_debugger.break_on_exceptions_thrown_in_same_context: # <<<<<<<<<<<<<< + * #Option: Don't break if an exception is caught in the same function from which it is thrown + * return + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_break_on_exceptions_thrown_in_sa); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 347, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_3 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_3 < 0)) __PYX_ERR(0, 347, __pyx_L4_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + if (__pyx_t_3) { + + /* "_pydevd_bundle/pydevd_cython.pyx":349 + * if main_debugger.break_on_exceptions_thrown_in_same_context: + * #Option: Don't break if an exception is caught in the same function from which it is thrown + * return # <<<<<<<<<<<<<< + * else: + * #Get the trace_obj from where the exception was raised... + */ + __Pyx_XDECREF(__pyx_r); + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L3_return; + + /* "_pydevd_bundle/pydevd_cython.pyx":347 + * #I.e.: tb_next should be only None in the context it was thrown (trace_obj.tb_frame is frame is just a double check). + * + * if main_debugger.break_on_exceptions_thrown_in_same_context: # <<<<<<<<<<<<<< + * #Option: Don't break if an exception is caught in the same function from which it is thrown + * return + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":344 + * + * initial_trace_obj = trace_obj + * if trace_obj.tb_next is None and trace_obj.tb_frame is frame: # <<<<<<<<<<<<<< + * #I.e.: tb_next should be only None in the context it was thrown (trace_obj.tb_frame is frame is just a double check). + * + */ + goto __pyx_L7; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":352 + * else: + * #Get the trace_obj from where the exception was raised... + * while trace_obj.tb_next is not None: # <<<<<<<<<<<<<< + * trace_obj = trace_obj.tb_next + * + */ + /*else*/ { + while (1) { + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_trace_obj, __pyx_n_s_tb_next); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 352, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_3 = (__pyx_t_1 != Py_None); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_2 = (__pyx_t_3 != 0); + if (!__pyx_t_2) break; + + /* "_pydevd_bundle/pydevd_cython.pyx":353 + * #Get the trace_obj from where the exception was raised... + * while trace_obj.tb_next is not None: + * trace_obj = trace_obj.tb_next # <<<<<<<<<<<<<< + * + * + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_trace_obj, __pyx_n_s_tb_next); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 353, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF_SET(__pyx_v_trace_obj, __pyx_t_1); + __pyx_t_1 = 0; + } + } + __pyx_L7:; + + /* "_pydevd_bundle/pydevd_cython.pyx":356 + * + * + * if main_debugger.ignore_exceptions_thrown_in_lines_with_ignore_exception: # <<<<<<<<<<<<<< + * for check_trace_obj in (initial_trace_obj, trace_obj): + * filename = get_abs_path_real_path_and_base_from_frame(check_trace_obj.tb_frame)[1] + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_ignore_exceptions_thrown_in_line); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 356, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 356, __pyx_L4_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + if (__pyx_t_2) { + + /* "_pydevd_bundle/pydevd_cython.pyx":357 + * + * if main_debugger.ignore_exceptions_thrown_in_lines_with_ignore_exception: + * for check_trace_obj in (initial_trace_obj, trace_obj): # <<<<<<<<<<<<<< + * filename = get_abs_path_real_path_and_base_from_frame(check_trace_obj.tb_frame)[1] + * + */ + __pyx_t_1 = PyTuple_New(2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 357, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_v_initial_trace_obj); + __Pyx_GIVEREF(__pyx_v_initial_trace_obj); + PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_v_initial_trace_obj); + __Pyx_INCREF(__pyx_v_trace_obj); + __Pyx_GIVEREF(__pyx_v_trace_obj); + PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_v_trace_obj); + __pyx_t_5 = __pyx_t_1; __Pyx_INCREF(__pyx_t_5); __pyx_t_6 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + for (;;) { + if (__pyx_t_6 >= 2) break; + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_1 = PyTuple_GET_ITEM(__pyx_t_5, __pyx_t_6); __Pyx_INCREF(__pyx_t_1); __pyx_t_6++; if (unlikely(0 < 0)) __PYX_ERR(0, 357, __pyx_L4_error) + #else + __pyx_t_1 = PySequence_ITEM(__pyx_t_5, __pyx_t_6); __pyx_t_6++; if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 357, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_1); + #endif + __Pyx_XDECREF_SET(__pyx_v_check_trace_obj, __pyx_t_1); + __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":358 + * if main_debugger.ignore_exceptions_thrown_in_lines_with_ignore_exception: + * for check_trace_obj in (initial_trace_obj, trace_obj): + * filename = get_abs_path_real_path_and_base_from_frame(check_trace_obj.tb_frame)[1] # <<<<<<<<<<<<<< + * + * + */ + __pyx_t_7 = __Pyx_GetModuleGlobalName(__pyx_n_s_get_abs_path_real_path_and_base); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 358, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_v_check_trace_obj, __pyx_n_s_tb_frame); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 358, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_9 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_7))) { + __pyx_t_9 = PyMethod_GET_SELF(__pyx_t_7); + if (likely(__pyx_t_9)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_7); + __Pyx_INCREF(__pyx_t_9); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_7, function); + } + } + if (!__pyx_t_9) { + __pyx_t_1 = __Pyx_PyObject_CallOneArg(__pyx_t_7, __pyx_t_8); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 358, __pyx_L4_error) + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_GOTREF(__pyx_t_1); + } else { + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_7)) { + PyObject *__pyx_temp[2] = {__pyx_t_9, __pyx_t_8}; + __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_7, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 358, __pyx_L4_error) + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_7)) { + PyObject *__pyx_temp[2] = {__pyx_t_9, __pyx_t_8}; + __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_7, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 358, __pyx_L4_error) + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + } else + #endif + { + __pyx_t_10 = PyTuple_New(1+1); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 358, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_10); + __Pyx_GIVEREF(__pyx_t_9); PyTuple_SET_ITEM(__pyx_t_10, 0, __pyx_t_9); __pyx_t_9 = NULL; + __Pyx_GIVEREF(__pyx_t_8); + PyTuple_SET_ITEM(__pyx_t_10, 0+1, __pyx_t_8); + __pyx_t_8 = 0; + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_7, __pyx_t_10, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 358, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; + } + } + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __pyx_t_7 = __Pyx_GetItemInt(__pyx_t_1, 1, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 358, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF_SET(__pyx_v_filename, __pyx_t_7); + __pyx_t_7 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":361 + * + * + * filename_to_lines_where_exceptions_are_ignored = self.filename_to_lines_where_exceptions_are_ignored # <<<<<<<<<<<<<< + * + * + */ + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_filename_to_lines_where_exceptio); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 361, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_XDECREF_SET(__pyx_v_filename_to_lines_where_exceptions_are_ignored, __pyx_t_7); + __pyx_t_7 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":364 + * + * + * lines_ignored = filename_to_lines_where_exceptions_are_ignored.get(filename) # <<<<<<<<<<<<<< + * if lines_ignored is None: + * lines_ignored = filename_to_lines_where_exceptions_are_ignored[filename] = {} + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_filename_to_lines_where_exceptions_are_ignored, __pyx_n_s_get); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 364, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_10 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_1))) { + __pyx_t_10 = PyMethod_GET_SELF(__pyx_t_1); + if (likely(__pyx_t_10)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1); + __Pyx_INCREF(__pyx_t_10); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_1, function); + } + } + if (!__pyx_t_10) { + __pyx_t_7 = __Pyx_PyObject_CallOneArg(__pyx_t_1, __pyx_v_filename); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 364, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_7); + } else { + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_1)) { + PyObject *__pyx_temp[2] = {__pyx_t_10, __pyx_v_filename}; + __pyx_t_7 = __Pyx_PyFunction_FastCall(__pyx_t_1, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 364, __pyx_L4_error) + __Pyx_XDECREF(__pyx_t_10); __pyx_t_10 = 0; + __Pyx_GOTREF(__pyx_t_7); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_1)) { + PyObject *__pyx_temp[2] = {__pyx_t_10, __pyx_v_filename}; + __pyx_t_7 = __Pyx_PyCFunction_FastCall(__pyx_t_1, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 364, __pyx_L4_error) + __Pyx_XDECREF(__pyx_t_10); __pyx_t_10 = 0; + __Pyx_GOTREF(__pyx_t_7); + } else + #endif + { + __pyx_t_8 = PyTuple_New(1+1); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 364, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_GIVEREF(__pyx_t_10); PyTuple_SET_ITEM(__pyx_t_8, 0, __pyx_t_10); __pyx_t_10 = NULL; + __Pyx_INCREF(__pyx_v_filename); + __Pyx_GIVEREF(__pyx_v_filename); + PyTuple_SET_ITEM(__pyx_t_8, 0+1, __pyx_v_filename); + __pyx_t_7 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_t_8, NULL); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 364, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + } + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF_SET(__pyx_v_lines_ignored, __pyx_t_7); + __pyx_t_7 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":365 + * + * lines_ignored = filename_to_lines_where_exceptions_are_ignored.get(filename) + * if lines_ignored is None: # <<<<<<<<<<<<<< + * lines_ignored = filename_to_lines_where_exceptions_are_ignored[filename] = {} + * + */ + __pyx_t_2 = (__pyx_v_lines_ignored == Py_None); + __pyx_t_3 = (__pyx_t_2 != 0); + if (__pyx_t_3) { + + /* "_pydevd_bundle/pydevd_cython.pyx":366 + * lines_ignored = filename_to_lines_where_exceptions_are_ignored.get(filename) + * if lines_ignored is None: + * lines_ignored = filename_to_lines_where_exceptions_are_ignored[filename] = {} # <<<<<<<<<<<<<< + * + * try: + */ + __pyx_t_7 = PyDict_New(); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 366, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_INCREF(__pyx_t_7); + __Pyx_DECREF_SET(__pyx_v_lines_ignored, __pyx_t_7); + if (unlikely(PyObject_SetItem(__pyx_v_filename_to_lines_where_exceptions_are_ignored, __pyx_v_filename, __pyx_t_7) < 0)) __PYX_ERR(0, 366, __pyx_L4_error) + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":365 + * + * lines_ignored = filename_to_lines_where_exceptions_are_ignored.get(filename) + * if lines_ignored is None: # <<<<<<<<<<<<<< + * lines_ignored = filename_to_lines_where_exceptions_are_ignored[filename] = {} + * + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":368 + * lines_ignored = filename_to_lines_where_exceptions_are_ignored[filename] = {} + * + * try: # <<<<<<<<<<<<<< + * curr_stat = os.stat(filename) + * curr_stat = (curr_stat.st_size, curr_stat.st_mtime) + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_11, &__pyx_t_12, &__pyx_t_13); + __Pyx_XGOTREF(__pyx_t_11); + __Pyx_XGOTREF(__pyx_t_12); + __Pyx_XGOTREF(__pyx_t_13); + /*try:*/ { + + /* "_pydevd_bundle/pydevd_cython.pyx":369 + * + * try: + * curr_stat = os.stat(filename) # <<<<<<<<<<<<<< + * curr_stat = (curr_stat.st_size, curr_stat.st_mtime) + * except: + */ + __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_os); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 369, __pyx_L17_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_stat); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 369, __pyx_L17_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_8))) { + __pyx_t_1 = PyMethod_GET_SELF(__pyx_t_8); + if (likely(__pyx_t_1)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_8); + __Pyx_INCREF(__pyx_t_1); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_8, function); + } + } + if (!__pyx_t_1) { + __pyx_t_7 = __Pyx_PyObject_CallOneArg(__pyx_t_8, __pyx_v_filename); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 369, __pyx_L17_error) + __Pyx_GOTREF(__pyx_t_7); + } else { + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_8)) { + PyObject *__pyx_temp[2] = {__pyx_t_1, __pyx_v_filename}; + __pyx_t_7 = __Pyx_PyFunction_FastCall(__pyx_t_8, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 369, __pyx_L17_error) + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_GOTREF(__pyx_t_7); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_8)) { + PyObject *__pyx_temp[2] = {__pyx_t_1, __pyx_v_filename}; + __pyx_t_7 = __Pyx_PyCFunction_FastCall(__pyx_t_8, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 369, __pyx_L17_error) + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_GOTREF(__pyx_t_7); + } else + #endif + { + __pyx_t_10 = PyTuple_New(1+1); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 369, __pyx_L17_error) + __Pyx_GOTREF(__pyx_t_10); + __Pyx_GIVEREF(__pyx_t_1); PyTuple_SET_ITEM(__pyx_t_10, 0, __pyx_t_1); __pyx_t_1 = NULL; + __Pyx_INCREF(__pyx_v_filename); + __Pyx_GIVEREF(__pyx_v_filename); + PyTuple_SET_ITEM(__pyx_t_10, 0+1, __pyx_v_filename); + __pyx_t_7 = __Pyx_PyObject_Call(__pyx_t_8, __pyx_t_10, NULL); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 369, __pyx_L17_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; + } + } + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_XDECREF_SET(__pyx_v_curr_stat, __pyx_t_7); + __pyx_t_7 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":370 + * try: + * curr_stat = os.stat(filename) + * curr_stat = (curr_stat.st_size, curr_stat.st_mtime) # <<<<<<<<<<<<<< + * except: + * curr_stat = None + */ + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_v_curr_stat, __pyx_n_s_st_size); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 370, __pyx_L17_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_v_curr_stat, __pyx_n_s_st_mtime); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 370, __pyx_L17_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_10 = PyTuple_New(2); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 370, __pyx_L17_error) + __Pyx_GOTREF(__pyx_t_10); + __Pyx_GIVEREF(__pyx_t_7); + PyTuple_SET_ITEM(__pyx_t_10, 0, __pyx_t_7); + __Pyx_GIVEREF(__pyx_t_8); + PyTuple_SET_ITEM(__pyx_t_10, 1, __pyx_t_8); + __pyx_t_7 = 0; + __pyx_t_8 = 0; + __Pyx_DECREF_SET(__pyx_v_curr_stat, __pyx_t_10); + __pyx_t_10 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":368 + * lines_ignored = filename_to_lines_where_exceptions_are_ignored[filename] = {} + * + * try: # <<<<<<<<<<<<<< + * curr_stat = os.stat(filename) + * curr_stat = (curr_stat.st_size, curr_stat.st_mtime) + */ + } + __Pyx_XDECREF(__pyx_t_11); __pyx_t_11 = 0; + __Pyx_XDECREF(__pyx_t_12); __pyx_t_12 = 0; + __Pyx_XDECREF(__pyx_t_13); __pyx_t_13 = 0; + goto __pyx_L24_try_end; + __pyx_L17_error:; + __Pyx_PyThreadState_assign + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_XDECREF(__pyx_t_10); __pyx_t_10 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":371 + * curr_stat = os.stat(filename) + * curr_stat = (curr_stat.st_size, curr_stat.st_mtime) + * except: # <<<<<<<<<<<<<< + * curr_stat = None + * + */ + /*except:*/ { + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBFrame.handle_exception", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_10, &__pyx_t_8, &__pyx_t_7) < 0) __PYX_ERR(0, 371, __pyx_L19_except_error) + __Pyx_GOTREF(__pyx_t_10); + __Pyx_GOTREF(__pyx_t_8); + __Pyx_GOTREF(__pyx_t_7); + + /* "_pydevd_bundle/pydevd_cython.pyx":372 + * curr_stat = (curr_stat.st_size, curr_stat.st_mtime) + * except: + * curr_stat = None # <<<<<<<<<<<<<< + * + * last_stat = self.filename_to_stat_info.get(filename) + */ + __Pyx_INCREF(Py_None); + __Pyx_XDECREF_SET(__pyx_v_curr_stat, Py_None); + __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + goto __pyx_L18_exception_handled; + } + __pyx_L19_except_error:; + + /* "_pydevd_bundle/pydevd_cython.pyx":368 + * lines_ignored = filename_to_lines_where_exceptions_are_ignored[filename] = {} + * + * try: # <<<<<<<<<<<<<< + * curr_stat = os.stat(filename) + * curr_stat = (curr_stat.st_size, curr_stat.st_mtime) + */ + __Pyx_PyThreadState_assign + __Pyx_XGIVEREF(__pyx_t_11); + __Pyx_XGIVEREF(__pyx_t_12); + __Pyx_XGIVEREF(__pyx_t_13); + __Pyx_ExceptionReset(__pyx_t_11, __pyx_t_12, __pyx_t_13); + goto __pyx_L4_error; + __pyx_L18_exception_handled:; + __Pyx_PyThreadState_assign + __Pyx_XGIVEREF(__pyx_t_11); + __Pyx_XGIVEREF(__pyx_t_12); + __Pyx_XGIVEREF(__pyx_t_13); + __Pyx_ExceptionReset(__pyx_t_11, __pyx_t_12, __pyx_t_13); + __pyx_L24_try_end:; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":374 + * curr_stat = None + * + * last_stat = self.filename_to_stat_info.get(filename) # <<<<<<<<<<<<<< + * if last_stat != curr_stat: + * self.filename_to_stat_info[filename] = curr_stat + */ + __pyx_t_8 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_filename_to_stat_info); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 374, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_10 = __Pyx_PyObject_GetAttrStr(__pyx_t_8, __pyx_n_s_get); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 374, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_10); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __pyx_t_8 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_10))) { + __pyx_t_8 = PyMethod_GET_SELF(__pyx_t_10); + if (likely(__pyx_t_8)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_10); + __Pyx_INCREF(__pyx_t_8); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_10, function); + } + } + if (!__pyx_t_8) { + __pyx_t_7 = __Pyx_PyObject_CallOneArg(__pyx_t_10, __pyx_v_filename); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 374, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_7); + } else { + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_10)) { + PyObject *__pyx_temp[2] = {__pyx_t_8, __pyx_v_filename}; + __pyx_t_7 = __Pyx_PyFunction_FastCall(__pyx_t_10, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 374, __pyx_L4_error) + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_GOTREF(__pyx_t_7); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_10)) { + PyObject *__pyx_temp[2] = {__pyx_t_8, __pyx_v_filename}; + __pyx_t_7 = __Pyx_PyCFunction_FastCall(__pyx_t_10, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 374, __pyx_L4_error) + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_GOTREF(__pyx_t_7); + } else + #endif + { + __pyx_t_1 = PyTuple_New(1+1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 374, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_8); PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_t_8); __pyx_t_8 = NULL; + __Pyx_INCREF(__pyx_v_filename); + __Pyx_GIVEREF(__pyx_v_filename); + PyTuple_SET_ITEM(__pyx_t_1, 0+1, __pyx_v_filename); + __pyx_t_7 = __Pyx_PyObject_Call(__pyx_t_10, __pyx_t_1, NULL); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 374, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + } + } + __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; + __Pyx_XDECREF_SET(__pyx_v_last_stat, __pyx_t_7); + __pyx_t_7 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":375 + * + * last_stat = self.filename_to_stat_info.get(filename) + * if last_stat != curr_stat: # <<<<<<<<<<<<<< + * self.filename_to_stat_info[filename] = curr_stat + * lines_ignored.clear() + */ + __pyx_t_7 = PyObject_RichCompare(__pyx_v_last_stat, __pyx_v_curr_stat, Py_NE); __Pyx_XGOTREF(__pyx_t_7); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 375, __pyx_L4_error) + __pyx_t_3 = __Pyx_PyObject_IsTrue(__pyx_t_7); if (unlikely(__pyx_t_3 < 0)) __PYX_ERR(0, 375, __pyx_L4_error) + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + if (__pyx_t_3) { + + /* "_pydevd_bundle/pydevd_cython.pyx":376 + * last_stat = self.filename_to_stat_info.get(filename) + * if last_stat != curr_stat: + * self.filename_to_stat_info[filename] = curr_stat # <<<<<<<<<<<<<< + * lines_ignored.clear() + * try: + */ + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_filename_to_stat_info); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 376, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_7); + if (unlikely(PyObject_SetItem(__pyx_t_7, __pyx_v_filename, __pyx_v_curr_stat) < 0)) __PYX_ERR(0, 376, __pyx_L4_error) + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":377 + * if last_stat != curr_stat: + * self.filename_to_stat_info[filename] = curr_stat + * lines_ignored.clear() # <<<<<<<<<<<<<< + * try: + * linecache.checkcache(filename) + */ + __pyx_t_10 = __Pyx_PyObject_GetAttrStr(__pyx_v_lines_ignored, __pyx_n_s_clear); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 377, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_10); + __pyx_t_1 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_10))) { + __pyx_t_1 = PyMethod_GET_SELF(__pyx_t_10); + if (likely(__pyx_t_1)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_10); + __Pyx_INCREF(__pyx_t_1); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_10, function); + } + } + if (__pyx_t_1) { + __pyx_t_7 = __Pyx_PyObject_CallOneArg(__pyx_t_10, __pyx_t_1); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 377, __pyx_L4_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + } else { + __pyx_t_7 = __Pyx_PyObject_CallNoArg(__pyx_t_10); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 377, __pyx_L4_error) + } + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":378 + * self.filename_to_stat_info[filename] = curr_stat + * lines_ignored.clear() + * try: # <<<<<<<<<<<<<< + * linecache.checkcache(filename) + * except: + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_13, &__pyx_t_12, &__pyx_t_11); + __Pyx_XGOTREF(__pyx_t_13); + __Pyx_XGOTREF(__pyx_t_12); + __Pyx_XGOTREF(__pyx_t_11); + /*try:*/ { + + /* "_pydevd_bundle/pydevd_cython.pyx":379 + * lines_ignored.clear() + * try: + * linecache.checkcache(filename) # <<<<<<<<<<<<<< + * except: + * #Jython 2.1 + */ + __pyx_t_10 = __Pyx_GetModuleGlobalName(__pyx_n_s_linecache); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 379, __pyx_L28_error) + __Pyx_GOTREF(__pyx_t_10); + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_10, __pyx_n_s_checkcache); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 379, __pyx_L28_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; + __pyx_t_10 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_1))) { + __pyx_t_10 = PyMethod_GET_SELF(__pyx_t_1); + if (likely(__pyx_t_10)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1); + __Pyx_INCREF(__pyx_t_10); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_1, function); + } + } + if (!__pyx_t_10) { + __pyx_t_7 = __Pyx_PyObject_CallOneArg(__pyx_t_1, __pyx_v_filename); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 379, __pyx_L28_error) + __Pyx_GOTREF(__pyx_t_7); + } else { + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_1)) { + PyObject *__pyx_temp[2] = {__pyx_t_10, __pyx_v_filename}; + __pyx_t_7 = __Pyx_PyFunction_FastCall(__pyx_t_1, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 379, __pyx_L28_error) + __Pyx_XDECREF(__pyx_t_10); __pyx_t_10 = 0; + __Pyx_GOTREF(__pyx_t_7); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_1)) { + PyObject *__pyx_temp[2] = {__pyx_t_10, __pyx_v_filename}; + __pyx_t_7 = __Pyx_PyCFunction_FastCall(__pyx_t_1, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 379, __pyx_L28_error) + __Pyx_XDECREF(__pyx_t_10); __pyx_t_10 = 0; + __Pyx_GOTREF(__pyx_t_7); + } else + #endif + { + __pyx_t_8 = PyTuple_New(1+1); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 379, __pyx_L28_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_GIVEREF(__pyx_t_10); PyTuple_SET_ITEM(__pyx_t_8, 0, __pyx_t_10); __pyx_t_10 = NULL; + __Pyx_INCREF(__pyx_v_filename); + __Pyx_GIVEREF(__pyx_v_filename); + PyTuple_SET_ITEM(__pyx_t_8, 0+1, __pyx_v_filename); + __pyx_t_7 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_t_8, NULL); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 379, __pyx_L28_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + } + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":378 + * self.filename_to_stat_info[filename] = curr_stat + * lines_ignored.clear() + * try: # <<<<<<<<<<<<<< + * linecache.checkcache(filename) + * except: + */ + } + __Pyx_XDECREF(__pyx_t_13); __pyx_t_13 = 0; + __Pyx_XDECREF(__pyx_t_12); __pyx_t_12 = 0; + __Pyx_XDECREF(__pyx_t_11); __pyx_t_11 = 0; + goto __pyx_L35_try_end; + __pyx_L28_error:; + __Pyx_PyThreadState_assign + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_XDECREF(__pyx_t_10); __pyx_t_10 = 0; + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":380 + * try: + * linecache.checkcache(filename) + * except: # <<<<<<<<<<<<<< + * #Jython 2.1 + * linecache.checkcache() + */ + /*except:*/ { + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBFrame.handle_exception", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_7, &__pyx_t_1, &__pyx_t_8) < 0) __PYX_ERR(0, 380, __pyx_L30_except_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_t_8); + + /* "_pydevd_bundle/pydevd_cython.pyx":382 + * except: + * #Jython 2.1 + * linecache.checkcache() # <<<<<<<<<<<<<< + * + * from_user_input = main_debugger.filename_to_lines_where_exceptions_are_ignored.get(filename) + */ + __pyx_t_9 = __Pyx_GetModuleGlobalName(__pyx_n_s_linecache); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 382, __pyx_L30_except_error) + __Pyx_GOTREF(__pyx_t_9); + __pyx_t_14 = __Pyx_PyObject_GetAttrStr(__pyx_t_9, __pyx_n_s_checkcache); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 382, __pyx_L30_except_error) + __Pyx_GOTREF(__pyx_t_14); + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + __pyx_t_9 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_14))) { + __pyx_t_9 = PyMethod_GET_SELF(__pyx_t_14); + if (likely(__pyx_t_9)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_14); + __Pyx_INCREF(__pyx_t_9); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_14, function); + } + } + if (__pyx_t_9) { + __pyx_t_10 = __Pyx_PyObject_CallOneArg(__pyx_t_14, __pyx_t_9); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 382, __pyx_L30_except_error) + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + } else { + __pyx_t_10 = __Pyx_PyObject_CallNoArg(__pyx_t_14); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 382, __pyx_L30_except_error) + } + __Pyx_GOTREF(__pyx_t_10); + __Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0; + __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + goto __pyx_L29_exception_handled; + } + __pyx_L30_except_error:; + + /* "_pydevd_bundle/pydevd_cython.pyx":378 + * self.filename_to_stat_info[filename] = curr_stat + * lines_ignored.clear() + * try: # <<<<<<<<<<<<<< + * linecache.checkcache(filename) + * except: + */ + __Pyx_PyThreadState_assign + __Pyx_XGIVEREF(__pyx_t_13); + __Pyx_XGIVEREF(__pyx_t_12); + __Pyx_XGIVEREF(__pyx_t_11); + __Pyx_ExceptionReset(__pyx_t_13, __pyx_t_12, __pyx_t_11); + goto __pyx_L4_error; + __pyx_L29_exception_handled:; + __Pyx_PyThreadState_assign + __Pyx_XGIVEREF(__pyx_t_13); + __Pyx_XGIVEREF(__pyx_t_12); + __Pyx_XGIVEREF(__pyx_t_11); + __Pyx_ExceptionReset(__pyx_t_13, __pyx_t_12, __pyx_t_11); + __pyx_L35_try_end:; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":375 + * + * last_stat = self.filename_to_stat_info.get(filename) + * if last_stat != curr_stat: # <<<<<<<<<<<<<< + * self.filename_to_stat_info[filename] = curr_stat + * lines_ignored.clear() + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":384 + * linecache.checkcache() + * + * from_user_input = main_debugger.filename_to_lines_where_exceptions_are_ignored.get(filename) # <<<<<<<<<<<<<< + * if from_user_input: + * merged = {} + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_filename_to_lines_where_exceptio); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 384, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_get); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 384, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_7))) { + __pyx_t_1 = PyMethod_GET_SELF(__pyx_t_7); + if (likely(__pyx_t_1)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_7); + __Pyx_INCREF(__pyx_t_1); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_7, function); + } + } + if (!__pyx_t_1) { + __pyx_t_8 = __Pyx_PyObject_CallOneArg(__pyx_t_7, __pyx_v_filename); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 384, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_8); + } else { + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_7)) { + PyObject *__pyx_temp[2] = {__pyx_t_1, __pyx_v_filename}; + __pyx_t_8 = __Pyx_PyFunction_FastCall(__pyx_t_7, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 384, __pyx_L4_error) + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_GOTREF(__pyx_t_8); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_7)) { + PyObject *__pyx_temp[2] = {__pyx_t_1, __pyx_v_filename}; + __pyx_t_8 = __Pyx_PyCFunction_FastCall(__pyx_t_7, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 384, __pyx_L4_error) + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_GOTREF(__pyx_t_8); + } else + #endif + { + __pyx_t_10 = PyTuple_New(1+1); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 384, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_10); + __Pyx_GIVEREF(__pyx_t_1); PyTuple_SET_ITEM(__pyx_t_10, 0, __pyx_t_1); __pyx_t_1 = NULL; + __Pyx_INCREF(__pyx_v_filename); + __Pyx_GIVEREF(__pyx_v_filename); + PyTuple_SET_ITEM(__pyx_t_10, 0+1, __pyx_v_filename); + __pyx_t_8 = __Pyx_PyObject_Call(__pyx_t_7, __pyx_t_10, NULL); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 384, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; + } + } + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_XDECREF_SET(__pyx_v_from_user_input, __pyx_t_8); + __pyx_t_8 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":385 + * + * from_user_input = main_debugger.filename_to_lines_where_exceptions_are_ignored.get(filename) + * if from_user_input: # <<<<<<<<<<<<<< + * merged = {} + * merged.update(lines_ignored) + */ + __pyx_t_3 = __Pyx_PyObject_IsTrue(__pyx_v_from_user_input); if (unlikely(__pyx_t_3 < 0)) __PYX_ERR(0, 385, __pyx_L4_error) + if (__pyx_t_3) { + + /* "_pydevd_bundle/pydevd_cython.pyx":386 + * from_user_input = main_debugger.filename_to_lines_where_exceptions_are_ignored.get(filename) + * if from_user_input: + * merged = {} # <<<<<<<<<<<<<< + * merged.update(lines_ignored) + * #Override what we have with the related entries that the user entered + */ + __pyx_t_8 = PyDict_New(); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 386, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_XDECREF_SET(__pyx_v_merged, __pyx_t_8); + __pyx_t_8 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":387 + * if from_user_input: + * merged = {} + * merged.update(lines_ignored) # <<<<<<<<<<<<<< + * #Override what we have with the related entries that the user entered + * merged.update(from_user_input) + */ + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_v_merged, __pyx_n_s_update); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 387, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_10 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_7))) { + __pyx_t_10 = PyMethod_GET_SELF(__pyx_t_7); + if (likely(__pyx_t_10)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_7); + __Pyx_INCREF(__pyx_t_10); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_7, function); + } + } + if (!__pyx_t_10) { + __pyx_t_8 = __Pyx_PyObject_CallOneArg(__pyx_t_7, __pyx_v_lines_ignored); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 387, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_8); + } else { + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_7)) { + PyObject *__pyx_temp[2] = {__pyx_t_10, __pyx_v_lines_ignored}; + __pyx_t_8 = __Pyx_PyFunction_FastCall(__pyx_t_7, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 387, __pyx_L4_error) + __Pyx_XDECREF(__pyx_t_10); __pyx_t_10 = 0; + __Pyx_GOTREF(__pyx_t_8); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_7)) { + PyObject *__pyx_temp[2] = {__pyx_t_10, __pyx_v_lines_ignored}; + __pyx_t_8 = __Pyx_PyCFunction_FastCall(__pyx_t_7, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 387, __pyx_L4_error) + __Pyx_XDECREF(__pyx_t_10); __pyx_t_10 = 0; + __Pyx_GOTREF(__pyx_t_8); + } else + #endif + { + __pyx_t_1 = PyTuple_New(1+1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 387, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_10); PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_t_10); __pyx_t_10 = NULL; + __Pyx_INCREF(__pyx_v_lines_ignored); + __Pyx_GIVEREF(__pyx_v_lines_ignored); + PyTuple_SET_ITEM(__pyx_t_1, 0+1, __pyx_v_lines_ignored); + __pyx_t_8 = __Pyx_PyObject_Call(__pyx_t_7, __pyx_t_1, NULL); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 387, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + } + } + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":389 + * merged.update(lines_ignored) + * #Override what we have with the related entries that the user entered + * merged.update(from_user_input) # <<<<<<<<<<<<<< + * else: + * merged = lines_ignored + */ + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_v_merged, __pyx_n_s_update); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 389, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_1 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_7))) { + __pyx_t_1 = PyMethod_GET_SELF(__pyx_t_7); + if (likely(__pyx_t_1)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_7); + __Pyx_INCREF(__pyx_t_1); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_7, function); + } + } + if (!__pyx_t_1) { + __pyx_t_8 = __Pyx_PyObject_CallOneArg(__pyx_t_7, __pyx_v_from_user_input); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 389, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_8); + } else { + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_7)) { + PyObject *__pyx_temp[2] = {__pyx_t_1, __pyx_v_from_user_input}; + __pyx_t_8 = __Pyx_PyFunction_FastCall(__pyx_t_7, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 389, __pyx_L4_error) + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_GOTREF(__pyx_t_8); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_7)) { + PyObject *__pyx_temp[2] = {__pyx_t_1, __pyx_v_from_user_input}; + __pyx_t_8 = __Pyx_PyCFunction_FastCall(__pyx_t_7, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 389, __pyx_L4_error) + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_GOTREF(__pyx_t_8); + } else + #endif + { + __pyx_t_10 = PyTuple_New(1+1); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 389, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_10); + __Pyx_GIVEREF(__pyx_t_1); PyTuple_SET_ITEM(__pyx_t_10, 0, __pyx_t_1); __pyx_t_1 = NULL; + __Pyx_INCREF(__pyx_v_from_user_input); + __Pyx_GIVEREF(__pyx_v_from_user_input); + PyTuple_SET_ITEM(__pyx_t_10, 0+1, __pyx_v_from_user_input); + __pyx_t_8 = __Pyx_PyObject_Call(__pyx_t_7, __pyx_t_10, NULL); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 389, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; + } + } + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":385 + * + * from_user_input = main_debugger.filename_to_lines_where_exceptions_are_ignored.get(filename) + * if from_user_input: # <<<<<<<<<<<<<< + * merged = {} + * merged.update(lines_ignored) + */ + goto __pyx_L38; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":391 + * merged.update(from_user_input) + * else: + * merged = lines_ignored # <<<<<<<<<<<<<< + * + * exc_lineno = check_trace_obj.tb_lineno + */ + /*else*/ { + __Pyx_INCREF(__pyx_v_lines_ignored); + __Pyx_XDECREF_SET(__pyx_v_merged, __pyx_v_lines_ignored); + } + __pyx_L38:; + + /* "_pydevd_bundle/pydevd_cython.pyx":393 + * merged = lines_ignored + * + * exc_lineno = check_trace_obj.tb_lineno # <<<<<<<<<<<<<< + * + * # print ('lines ignored', lines_ignored) + */ + __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_v_check_trace_obj, __pyx_n_s_tb_lineno); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 393, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_XDECREF_SET(__pyx_v_exc_lineno, __pyx_t_8); + __pyx_t_8 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":399 + * # print ('merged', merged, 'curr', exc_lineno) + * + * if exc_lineno not in merged: #Note: check on merged but update lines_ignored. # <<<<<<<<<<<<<< + * try: + * line = linecache.getline(filename, exc_lineno, check_trace_obj.tb_frame.f_globals) + */ + __pyx_t_3 = (__Pyx_PySequence_ContainsTF(__pyx_v_exc_lineno, __pyx_v_merged, Py_NE)); if (unlikely(__pyx_t_3 < 0)) __PYX_ERR(0, 399, __pyx_L4_error) + __pyx_t_2 = (__pyx_t_3 != 0); + if (__pyx_t_2) { + + /* "_pydevd_bundle/pydevd_cython.pyx":400 + * + * if exc_lineno not in merged: #Note: check on merged but update lines_ignored. + * try: # <<<<<<<<<<<<<< + * line = linecache.getline(filename, exc_lineno, check_trace_obj.tb_frame.f_globals) + * except: + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_11, &__pyx_t_12, &__pyx_t_13); + __Pyx_XGOTREF(__pyx_t_11); + __Pyx_XGOTREF(__pyx_t_12); + __Pyx_XGOTREF(__pyx_t_13); + /*try:*/ { + + /* "_pydevd_bundle/pydevd_cython.pyx":401 + * if exc_lineno not in merged: #Note: check on merged but update lines_ignored. + * try: + * line = linecache.getline(filename, exc_lineno, check_trace_obj.tb_frame.f_globals) # <<<<<<<<<<<<<< + * except: + * #Jython 2.1 + */ + __pyx_t_7 = __Pyx_GetModuleGlobalName(__pyx_n_s_linecache); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 401, __pyx_L40_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_10 = __Pyx_PyObject_GetAttrStr(__pyx_t_7, __pyx_n_s_getline); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 401, __pyx_L40_error) + __Pyx_GOTREF(__pyx_t_10); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_v_check_trace_obj, __pyx_n_s_tb_frame); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 401, __pyx_L40_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_7, __pyx_n_s_f_globals); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 401, __pyx_L40_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __pyx_t_7 = NULL; + __pyx_t_15 = 0; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_10))) { + __pyx_t_7 = PyMethod_GET_SELF(__pyx_t_10); + if (likely(__pyx_t_7)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_10); + __Pyx_INCREF(__pyx_t_7); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_10, function); + __pyx_t_15 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_10)) { + PyObject *__pyx_temp[4] = {__pyx_t_7, __pyx_v_filename, __pyx_v_exc_lineno, __pyx_t_1}; + __pyx_t_8 = __Pyx_PyFunction_FastCall(__pyx_t_10, __pyx_temp+1-__pyx_t_15, 3+__pyx_t_15); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 401, __pyx_L40_error) + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_GOTREF(__pyx_t_8); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_10)) { + PyObject *__pyx_temp[4] = {__pyx_t_7, __pyx_v_filename, __pyx_v_exc_lineno, __pyx_t_1}; + __pyx_t_8 = __Pyx_PyCFunction_FastCall(__pyx_t_10, __pyx_temp+1-__pyx_t_15, 3+__pyx_t_15); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 401, __pyx_L40_error) + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_GOTREF(__pyx_t_8); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + } else + #endif + { + __pyx_t_14 = PyTuple_New(3+__pyx_t_15); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 401, __pyx_L40_error) + __Pyx_GOTREF(__pyx_t_14); + if (__pyx_t_7) { + __Pyx_GIVEREF(__pyx_t_7); PyTuple_SET_ITEM(__pyx_t_14, 0, __pyx_t_7); __pyx_t_7 = NULL; + } + __Pyx_INCREF(__pyx_v_filename); + __Pyx_GIVEREF(__pyx_v_filename); + PyTuple_SET_ITEM(__pyx_t_14, 0+__pyx_t_15, __pyx_v_filename); + __Pyx_INCREF(__pyx_v_exc_lineno); + __Pyx_GIVEREF(__pyx_v_exc_lineno); + PyTuple_SET_ITEM(__pyx_t_14, 1+__pyx_t_15, __pyx_v_exc_lineno); + __Pyx_GIVEREF(__pyx_t_1); + PyTuple_SET_ITEM(__pyx_t_14, 2+__pyx_t_15, __pyx_t_1); + __pyx_t_1 = 0; + __pyx_t_8 = __Pyx_PyObject_Call(__pyx_t_10, __pyx_t_14, NULL); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 401, __pyx_L40_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0; + } + __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; + __Pyx_XDECREF_SET(__pyx_v_line, __pyx_t_8); + __pyx_t_8 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":400 + * + * if exc_lineno not in merged: #Note: check on merged but update lines_ignored. + * try: # <<<<<<<<<<<<<< + * line = linecache.getline(filename, exc_lineno, check_trace_obj.tb_frame.f_globals) + * except: + */ + } + __Pyx_XDECREF(__pyx_t_11); __pyx_t_11 = 0; + __Pyx_XDECREF(__pyx_t_12); __pyx_t_12 = 0; + __Pyx_XDECREF(__pyx_t_13); __pyx_t_13 = 0; + goto __pyx_L47_try_end; + __pyx_L40_error:; + __Pyx_PyThreadState_assign + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_14); __pyx_t_14 = 0; + __Pyx_XDECREF(__pyx_t_10); __pyx_t_10 = 0; + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":402 + * try: + * line = linecache.getline(filename, exc_lineno, check_trace_obj.tb_frame.f_globals) + * except: # <<<<<<<<<<<<<< + * #Jython 2.1 + * line = linecache.getline(filename, exc_lineno) + */ + /*except:*/ { + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBFrame.handle_exception", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_8, &__pyx_t_10, &__pyx_t_14) < 0) __PYX_ERR(0, 402, __pyx_L42_except_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_GOTREF(__pyx_t_10); + __Pyx_GOTREF(__pyx_t_14); + + /* "_pydevd_bundle/pydevd_cython.pyx":404 + * except: + * #Jython 2.1 + * line = linecache.getline(filename, exc_lineno) # <<<<<<<<<<<<<< + * + * if IGNORE_EXCEPTION_TAG.match(line) is not None: + */ + __pyx_t_7 = __Pyx_GetModuleGlobalName(__pyx_n_s_linecache); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 404, __pyx_L42_except_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_9 = __Pyx_PyObject_GetAttrStr(__pyx_t_7, __pyx_n_s_getline); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 404, __pyx_L42_except_error) + __Pyx_GOTREF(__pyx_t_9); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __pyx_t_7 = NULL; + __pyx_t_15 = 0; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_9))) { + __pyx_t_7 = PyMethod_GET_SELF(__pyx_t_9); + if (likely(__pyx_t_7)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_9); + __Pyx_INCREF(__pyx_t_7); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_9, function); + __pyx_t_15 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_9)) { + PyObject *__pyx_temp[3] = {__pyx_t_7, __pyx_v_filename, __pyx_v_exc_lineno}; + __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_9, __pyx_temp+1-__pyx_t_15, 2+__pyx_t_15); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 404, __pyx_L42_except_error) + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_GOTREF(__pyx_t_1); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_9)) { + PyObject *__pyx_temp[3] = {__pyx_t_7, __pyx_v_filename, __pyx_v_exc_lineno}; + __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_9, __pyx_temp+1-__pyx_t_15, 2+__pyx_t_15); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 404, __pyx_L42_except_error) + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_GOTREF(__pyx_t_1); + } else + #endif + { + __pyx_t_16 = PyTuple_New(2+__pyx_t_15); if (unlikely(!__pyx_t_16)) __PYX_ERR(0, 404, __pyx_L42_except_error) + __Pyx_GOTREF(__pyx_t_16); + if (__pyx_t_7) { + __Pyx_GIVEREF(__pyx_t_7); PyTuple_SET_ITEM(__pyx_t_16, 0, __pyx_t_7); __pyx_t_7 = NULL; + } + __Pyx_INCREF(__pyx_v_filename); + __Pyx_GIVEREF(__pyx_v_filename); + PyTuple_SET_ITEM(__pyx_t_16, 0+__pyx_t_15, __pyx_v_filename); + __Pyx_INCREF(__pyx_v_exc_lineno); + __Pyx_GIVEREF(__pyx_v_exc_lineno); + PyTuple_SET_ITEM(__pyx_t_16, 1+__pyx_t_15, __pyx_v_exc_lineno); + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_9, __pyx_t_16, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 404, __pyx_L42_except_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_16); __pyx_t_16 = 0; + } + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_XDECREF_SET(__pyx_v_line, __pyx_t_1); + __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; + __Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0; + goto __pyx_L41_exception_handled; + } + __pyx_L42_except_error:; + + /* "_pydevd_bundle/pydevd_cython.pyx":400 + * + * if exc_lineno not in merged: #Note: check on merged but update lines_ignored. + * try: # <<<<<<<<<<<<<< + * line = linecache.getline(filename, exc_lineno, check_trace_obj.tb_frame.f_globals) + * except: + */ + __Pyx_PyThreadState_assign + __Pyx_XGIVEREF(__pyx_t_11); + __Pyx_XGIVEREF(__pyx_t_12); + __Pyx_XGIVEREF(__pyx_t_13); + __Pyx_ExceptionReset(__pyx_t_11, __pyx_t_12, __pyx_t_13); + goto __pyx_L4_error; + __pyx_L41_exception_handled:; + __Pyx_PyThreadState_assign + __Pyx_XGIVEREF(__pyx_t_11); + __Pyx_XGIVEREF(__pyx_t_12); + __Pyx_XGIVEREF(__pyx_t_13); + __Pyx_ExceptionReset(__pyx_t_11, __pyx_t_12, __pyx_t_13); + __pyx_L47_try_end:; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":406 + * line = linecache.getline(filename, exc_lineno) + * + * if IGNORE_EXCEPTION_TAG.match(line) is not None: # <<<<<<<<<<<<<< + * lines_ignored[exc_lineno] = 1 + * return + */ + __pyx_t_10 = __Pyx_GetModuleGlobalName(__pyx_n_s_IGNORE_EXCEPTION_TAG); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 406, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_10); + __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_t_10, __pyx_n_s_match); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 406, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; + __pyx_t_10 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_8))) { + __pyx_t_10 = PyMethod_GET_SELF(__pyx_t_8); + if (likely(__pyx_t_10)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_8); + __Pyx_INCREF(__pyx_t_10); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_8, function); + } + } + if (!__pyx_t_10) { + __pyx_t_14 = __Pyx_PyObject_CallOneArg(__pyx_t_8, __pyx_v_line); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 406, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_14); + } else { + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_8)) { + PyObject *__pyx_temp[2] = {__pyx_t_10, __pyx_v_line}; + __pyx_t_14 = __Pyx_PyFunction_FastCall(__pyx_t_8, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 406, __pyx_L4_error) + __Pyx_XDECREF(__pyx_t_10); __pyx_t_10 = 0; + __Pyx_GOTREF(__pyx_t_14); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_8)) { + PyObject *__pyx_temp[2] = {__pyx_t_10, __pyx_v_line}; + __pyx_t_14 = __Pyx_PyCFunction_FastCall(__pyx_t_8, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 406, __pyx_L4_error) + __Pyx_XDECREF(__pyx_t_10); __pyx_t_10 = 0; + __Pyx_GOTREF(__pyx_t_14); + } else + #endif + { + __pyx_t_1 = PyTuple_New(1+1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 406, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_10); PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_t_10); __pyx_t_10 = NULL; + __Pyx_INCREF(__pyx_v_line); + __Pyx_GIVEREF(__pyx_v_line); + PyTuple_SET_ITEM(__pyx_t_1, 0+1, __pyx_v_line); + __pyx_t_14 = __Pyx_PyObject_Call(__pyx_t_8, __pyx_t_1, NULL); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 406, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_14); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + } + } + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __pyx_t_2 = (__pyx_t_14 != Py_None); + __Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0; + __pyx_t_3 = (__pyx_t_2 != 0); + if (__pyx_t_3) { + + /* "_pydevd_bundle/pydevd_cython.pyx":407 + * + * if IGNORE_EXCEPTION_TAG.match(line) is not None: + * lines_ignored[exc_lineno] = 1 # <<<<<<<<<<<<<< + * return + * else: + */ + if (unlikely(PyObject_SetItem(__pyx_v_lines_ignored, __pyx_v_exc_lineno, __pyx_int_1) < 0)) __PYX_ERR(0, 407, __pyx_L4_error) + + /* "_pydevd_bundle/pydevd_cython.pyx":408 + * if IGNORE_EXCEPTION_TAG.match(line) is not None: + * lines_ignored[exc_lineno] = 1 + * return # <<<<<<<<<<<<<< + * else: + * #Put in the cache saying not to ignore + */ + __Pyx_XDECREF(__pyx_r); + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + goto __pyx_L3_return; + + /* "_pydevd_bundle/pydevd_cython.pyx":406 + * line = linecache.getline(filename, exc_lineno) + * + * if IGNORE_EXCEPTION_TAG.match(line) is not None: # <<<<<<<<<<<<<< + * lines_ignored[exc_lineno] = 1 + * return + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":411 + * else: + * #Put in the cache saying not to ignore + * lines_ignored[exc_lineno] = 0 # <<<<<<<<<<<<<< + * else: + * #Ok, dict has it already cached, so, let's check it... + */ + /*else*/ { + if (unlikely(PyObject_SetItem(__pyx_v_lines_ignored, __pyx_v_exc_lineno, __pyx_int_0) < 0)) __PYX_ERR(0, 411, __pyx_L4_error) + } + + /* "_pydevd_bundle/pydevd_cython.pyx":399 + * # print ('merged', merged, 'curr', exc_lineno) + * + * if exc_lineno not in merged: #Note: check on merged but update lines_ignored. # <<<<<<<<<<<<<< + * try: + * line = linecache.getline(filename, exc_lineno, check_trace_obj.tb_frame.f_globals) + */ + goto __pyx_L39; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":414 + * else: + * #Ok, dict has it already cached, so, let's check it... + * if merged.get(exc_lineno, 0): # <<<<<<<<<<<<<< + * return + * + */ + /*else*/ { + __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_v_merged, __pyx_n_s_get); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 414, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_1 = NULL; + __pyx_t_15 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_8))) { + __pyx_t_1 = PyMethod_GET_SELF(__pyx_t_8); + if (likely(__pyx_t_1)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_8); + __Pyx_INCREF(__pyx_t_1); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_8, function); + __pyx_t_15 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_8)) { + PyObject *__pyx_temp[3] = {__pyx_t_1, __pyx_v_exc_lineno, __pyx_int_0}; + __pyx_t_14 = __Pyx_PyFunction_FastCall(__pyx_t_8, __pyx_temp+1-__pyx_t_15, 2+__pyx_t_15); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 414, __pyx_L4_error) + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_GOTREF(__pyx_t_14); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_8)) { + PyObject *__pyx_temp[3] = {__pyx_t_1, __pyx_v_exc_lineno, __pyx_int_0}; + __pyx_t_14 = __Pyx_PyCFunction_FastCall(__pyx_t_8, __pyx_temp+1-__pyx_t_15, 2+__pyx_t_15); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 414, __pyx_L4_error) + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_GOTREF(__pyx_t_14); + } else + #endif + { + __pyx_t_10 = PyTuple_New(2+__pyx_t_15); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 414, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_10); + if (__pyx_t_1) { + __Pyx_GIVEREF(__pyx_t_1); PyTuple_SET_ITEM(__pyx_t_10, 0, __pyx_t_1); __pyx_t_1 = NULL; + } + __Pyx_INCREF(__pyx_v_exc_lineno); + __Pyx_GIVEREF(__pyx_v_exc_lineno); + PyTuple_SET_ITEM(__pyx_t_10, 0+__pyx_t_15, __pyx_v_exc_lineno); + __Pyx_INCREF(__pyx_int_0); + __Pyx_GIVEREF(__pyx_int_0); + PyTuple_SET_ITEM(__pyx_t_10, 1+__pyx_t_15, __pyx_int_0); + __pyx_t_14 = __Pyx_PyObject_Call(__pyx_t_8, __pyx_t_10, NULL); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 414, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_14); + __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; + } + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __pyx_t_3 = __Pyx_PyObject_IsTrue(__pyx_t_14); if (unlikely(__pyx_t_3 < 0)) __PYX_ERR(0, 414, __pyx_L4_error) + __Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0; + if (__pyx_t_3) { + + /* "_pydevd_bundle/pydevd_cython.pyx":415 + * #Ok, dict has it already cached, so, let's check it... + * if merged.get(exc_lineno, 0): + * return # <<<<<<<<<<<<<< + * + * + */ + __Pyx_XDECREF(__pyx_r); + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + goto __pyx_L3_return; + + /* "_pydevd_bundle/pydevd_cython.pyx":414 + * else: + * #Ok, dict has it already cached, so, let's check it... + * if merged.get(exc_lineno, 0): # <<<<<<<<<<<<<< + * return + * + */ + } + } + __pyx_L39:; + + /* "_pydevd_bundle/pydevd_cython.pyx":357 + * + * if main_debugger.ignore_exceptions_thrown_in_lines_with_ignore_exception: + * for check_trace_obj in (initial_trace_obj, trace_obj): # <<<<<<<<<<<<<< + * filename = get_abs_path_real_path_and_base_from_frame(check_trace_obj.tb_frame)[1] + * + */ + } + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":356 + * + * + * if main_debugger.ignore_exceptions_thrown_in_lines_with_ignore_exception: # <<<<<<<<<<<<<< + * for check_trace_obj in (initial_trace_obj, trace_obj): + * filename = get_abs_path_real_path_and_base_from_frame(check_trace_obj.tb_frame)[1] + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":418 + * + * + * thread = self._args[3] # <<<<<<<<<<<<<< + * + * try: + */ + if (unlikely(__pyx_v_self->_args == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(0, 418, __pyx_L4_error) + } + __pyx_t_5 = __Pyx_GetItemInt_Tuple(__pyx_v_self->_args, 3, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 418, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_v_thread = __pyx_t_5; + __pyx_t_5 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":420 + * thread = self._args[3] + * + * try: # <<<<<<<<<<<<<< + * frame_id_to_frame = {} + * frame_id_to_frame[id(frame)] = frame + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_13, &__pyx_t_12, &__pyx_t_11); + __Pyx_XGOTREF(__pyx_t_13); + __Pyx_XGOTREF(__pyx_t_12); + __Pyx_XGOTREF(__pyx_t_11); + /*try:*/ { + + /* "_pydevd_bundle/pydevd_cython.pyx":421 + * + * try: + * frame_id_to_frame = {} # <<<<<<<<<<<<<< + * frame_id_to_frame[id(frame)] = frame + * f = trace_obj.tb_frame + */ + __pyx_t_5 = PyDict_New(); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 421, __pyx_L52_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_v_frame_id_to_frame = __pyx_t_5; + __pyx_t_5 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":422 + * try: + * frame_id_to_frame = {} + * frame_id_to_frame[id(frame)] = frame # <<<<<<<<<<<<<< + * f = trace_obj.tb_frame + * while f is not None: + */ + __pyx_t_5 = PyTuple_New(1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 422, __pyx_L52_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_INCREF(__pyx_v_frame); + __Pyx_GIVEREF(__pyx_v_frame); + PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_v_frame); + __pyx_t_14 = __Pyx_PyObject_Call(__pyx_builtin_id, __pyx_t_5, NULL); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 422, __pyx_L52_error) + __Pyx_GOTREF(__pyx_t_14); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + if (unlikely(PyObject_SetItem(__pyx_v_frame_id_to_frame, __pyx_t_14, __pyx_v_frame) < 0)) __PYX_ERR(0, 422, __pyx_L52_error) + __Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":423 + * frame_id_to_frame = {} + * frame_id_to_frame[id(frame)] = frame + * f = trace_obj.tb_frame # <<<<<<<<<<<<<< + * while f is not None: + * frame_id_to_frame[id(f)] = f + */ + __pyx_t_14 = __Pyx_PyObject_GetAttrStr(__pyx_v_trace_obj, __pyx_n_s_tb_frame); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 423, __pyx_L52_error) + __Pyx_GOTREF(__pyx_t_14); + __pyx_v_f = __pyx_t_14; + __pyx_t_14 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":424 + * frame_id_to_frame[id(frame)] = frame + * f = trace_obj.tb_frame + * while f is not None: # <<<<<<<<<<<<<< + * frame_id_to_frame[id(f)] = f + * f = f.f_back + */ + while (1) { + __pyx_t_3 = (__pyx_v_f != Py_None); + __pyx_t_2 = (__pyx_t_3 != 0); + if (!__pyx_t_2) break; + + /* "_pydevd_bundle/pydevd_cython.pyx":425 + * f = trace_obj.tb_frame + * while f is not None: + * frame_id_to_frame[id(f)] = f # <<<<<<<<<<<<<< + * f = f.f_back + * f = None + */ + __pyx_t_14 = PyTuple_New(1); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 425, __pyx_L52_error) + __Pyx_GOTREF(__pyx_t_14); + __Pyx_INCREF(__pyx_v_f); + __Pyx_GIVEREF(__pyx_v_f); + PyTuple_SET_ITEM(__pyx_t_14, 0, __pyx_v_f); + __pyx_t_5 = __Pyx_PyObject_Call(__pyx_builtin_id, __pyx_t_14, NULL); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 425, __pyx_L52_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0; + if (unlikely(PyObject_SetItem(__pyx_v_frame_id_to_frame, __pyx_t_5, __pyx_v_f) < 0)) __PYX_ERR(0, 425, __pyx_L52_error) + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":426 + * while f is not None: + * frame_id_to_frame[id(f)] = f + * f = f.f_back # <<<<<<<<<<<<<< + * f = None + * + */ + __pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_v_f, __pyx_n_s_f_back); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 426, __pyx_L52_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF_SET(__pyx_v_f, __pyx_t_5); + __pyx_t_5 = 0; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":427 + * frame_id_to_frame[id(f)] = f + * f = f.f_back + * f = None # <<<<<<<<<<<<<< + * + * thread_id = get_thread_id(thread) + */ + __Pyx_INCREF(Py_None); + __Pyx_DECREF_SET(__pyx_v_f, Py_None); + + /* "_pydevd_bundle/pydevd_cython.pyx":429 + * f = None + * + * thread_id = get_thread_id(thread) # <<<<<<<<<<<<<< + * pydevd_vars.add_additional_frame_by_id(thread_id, frame_id_to_frame) + * try: + */ + __pyx_t_14 = __Pyx_GetModuleGlobalName(__pyx_n_s_get_thread_id); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 429, __pyx_L52_error) + __Pyx_GOTREF(__pyx_t_14); + __pyx_t_8 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_14))) { + __pyx_t_8 = PyMethod_GET_SELF(__pyx_t_14); + if (likely(__pyx_t_8)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_14); + __Pyx_INCREF(__pyx_t_8); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_14, function); + } + } + if (!__pyx_t_8) { + __pyx_t_5 = __Pyx_PyObject_CallOneArg(__pyx_t_14, __pyx_v_thread); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 429, __pyx_L52_error) + __Pyx_GOTREF(__pyx_t_5); + } else { + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_14)) { + PyObject *__pyx_temp[2] = {__pyx_t_8, __pyx_v_thread}; + __pyx_t_5 = __Pyx_PyFunction_FastCall(__pyx_t_14, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 429, __pyx_L52_error) + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_GOTREF(__pyx_t_5); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_14)) { + PyObject *__pyx_temp[2] = {__pyx_t_8, __pyx_v_thread}; + __pyx_t_5 = __Pyx_PyCFunction_FastCall(__pyx_t_14, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 429, __pyx_L52_error) + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_GOTREF(__pyx_t_5); + } else + #endif + { + __pyx_t_10 = PyTuple_New(1+1); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 429, __pyx_L52_error) + __Pyx_GOTREF(__pyx_t_10); + __Pyx_GIVEREF(__pyx_t_8); PyTuple_SET_ITEM(__pyx_t_10, 0, __pyx_t_8); __pyx_t_8 = NULL; + __Pyx_INCREF(__pyx_v_thread); + __Pyx_GIVEREF(__pyx_v_thread); + PyTuple_SET_ITEM(__pyx_t_10, 0+1, __pyx_v_thread); + __pyx_t_5 = __Pyx_PyObject_Call(__pyx_t_14, __pyx_t_10, NULL); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 429, __pyx_L52_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; + } + } + __Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0; + __pyx_v_thread_id = __pyx_t_5; + __pyx_t_5 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":430 + * + * thread_id = get_thread_id(thread) + * pydevd_vars.add_additional_frame_by_id(thread_id, frame_id_to_frame) # <<<<<<<<<<<<<< + * try: + * main_debugger.send_caught_exception_stack(thread, arg, id(frame)) + */ + __pyx_t_14 = __Pyx_GetModuleGlobalName(__pyx_n_s_pydevd_vars); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 430, __pyx_L52_error) + __Pyx_GOTREF(__pyx_t_14); + __pyx_t_10 = __Pyx_PyObject_GetAttrStr(__pyx_t_14, __pyx_n_s_add_additional_frame_by_id); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 430, __pyx_L52_error) + __Pyx_GOTREF(__pyx_t_10); + __Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0; + __pyx_t_14 = NULL; + __pyx_t_15 = 0; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_10))) { + __pyx_t_14 = PyMethod_GET_SELF(__pyx_t_10); + if (likely(__pyx_t_14)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_10); + __Pyx_INCREF(__pyx_t_14); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_10, function); + __pyx_t_15 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_10)) { + PyObject *__pyx_temp[3] = {__pyx_t_14, __pyx_v_thread_id, __pyx_v_frame_id_to_frame}; + __pyx_t_5 = __Pyx_PyFunction_FastCall(__pyx_t_10, __pyx_temp+1-__pyx_t_15, 2+__pyx_t_15); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 430, __pyx_L52_error) + __Pyx_XDECREF(__pyx_t_14); __pyx_t_14 = 0; + __Pyx_GOTREF(__pyx_t_5); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_10)) { + PyObject *__pyx_temp[3] = {__pyx_t_14, __pyx_v_thread_id, __pyx_v_frame_id_to_frame}; + __pyx_t_5 = __Pyx_PyCFunction_FastCall(__pyx_t_10, __pyx_temp+1-__pyx_t_15, 2+__pyx_t_15); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 430, __pyx_L52_error) + __Pyx_XDECREF(__pyx_t_14); __pyx_t_14 = 0; + __Pyx_GOTREF(__pyx_t_5); + } else + #endif + { + __pyx_t_8 = PyTuple_New(2+__pyx_t_15); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 430, __pyx_L52_error) + __Pyx_GOTREF(__pyx_t_8); + if (__pyx_t_14) { + __Pyx_GIVEREF(__pyx_t_14); PyTuple_SET_ITEM(__pyx_t_8, 0, __pyx_t_14); __pyx_t_14 = NULL; + } + __Pyx_INCREF(__pyx_v_thread_id); + __Pyx_GIVEREF(__pyx_v_thread_id); + PyTuple_SET_ITEM(__pyx_t_8, 0+__pyx_t_15, __pyx_v_thread_id); + __Pyx_INCREF(__pyx_v_frame_id_to_frame); + __Pyx_GIVEREF(__pyx_v_frame_id_to_frame); + PyTuple_SET_ITEM(__pyx_t_8, 1+__pyx_t_15, __pyx_v_frame_id_to_frame); + __pyx_t_5 = __Pyx_PyObject_Call(__pyx_t_10, __pyx_t_8, NULL); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 430, __pyx_L52_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + } + __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":431 + * thread_id = get_thread_id(thread) + * pydevd_vars.add_additional_frame_by_id(thread_id, frame_id_to_frame) + * try: # <<<<<<<<<<<<<< + * main_debugger.send_caught_exception_stack(thread, arg, id(frame)) + * self.set_suspend(thread, CMD_STEP_CAUGHT_EXCEPTION) + */ + /*try:*/ { + + /* "_pydevd_bundle/pydevd_cython.pyx":432 + * pydevd_vars.add_additional_frame_by_id(thread_id, frame_id_to_frame) + * try: + * main_debugger.send_caught_exception_stack(thread, arg, id(frame)) # <<<<<<<<<<<<<< + * self.set_suspend(thread, CMD_STEP_CAUGHT_EXCEPTION) + * self.do_wait_suspend(thread, frame, event, arg) + */ + __pyx_t_10 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_send_caught_exception_stack); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 432, __pyx_L61_error) + __Pyx_GOTREF(__pyx_t_10); + __pyx_t_8 = PyTuple_New(1); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 432, __pyx_L61_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_INCREF(__pyx_v_frame); + __Pyx_GIVEREF(__pyx_v_frame); + PyTuple_SET_ITEM(__pyx_t_8, 0, __pyx_v_frame); + __pyx_t_14 = __Pyx_PyObject_Call(__pyx_builtin_id, __pyx_t_8, NULL); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 432, __pyx_L61_error) + __Pyx_GOTREF(__pyx_t_14); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __pyx_t_8 = NULL; + __pyx_t_15 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_10))) { + __pyx_t_8 = PyMethod_GET_SELF(__pyx_t_10); + if (likely(__pyx_t_8)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_10); + __Pyx_INCREF(__pyx_t_8); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_10, function); + __pyx_t_15 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_10)) { + PyObject *__pyx_temp[4] = {__pyx_t_8, __pyx_v_thread, __pyx_v_arg, __pyx_t_14}; + __pyx_t_5 = __Pyx_PyFunction_FastCall(__pyx_t_10, __pyx_temp+1-__pyx_t_15, 3+__pyx_t_15); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 432, __pyx_L61_error) + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0; + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_10)) { + PyObject *__pyx_temp[4] = {__pyx_t_8, __pyx_v_thread, __pyx_v_arg, __pyx_t_14}; + __pyx_t_5 = __Pyx_PyCFunction_FastCall(__pyx_t_10, __pyx_temp+1-__pyx_t_15, 3+__pyx_t_15); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 432, __pyx_L61_error) + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0; + } else + #endif + { + __pyx_t_1 = PyTuple_New(3+__pyx_t_15); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 432, __pyx_L61_error) + __Pyx_GOTREF(__pyx_t_1); + if (__pyx_t_8) { + __Pyx_GIVEREF(__pyx_t_8); PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_t_8); __pyx_t_8 = NULL; + } + __Pyx_INCREF(__pyx_v_thread); + __Pyx_GIVEREF(__pyx_v_thread); + PyTuple_SET_ITEM(__pyx_t_1, 0+__pyx_t_15, __pyx_v_thread); + __Pyx_INCREF(__pyx_v_arg); + __Pyx_GIVEREF(__pyx_v_arg); + PyTuple_SET_ITEM(__pyx_t_1, 1+__pyx_t_15, __pyx_v_arg); + __Pyx_GIVEREF(__pyx_t_14); + PyTuple_SET_ITEM(__pyx_t_1, 2+__pyx_t_15, __pyx_t_14); + __pyx_t_14 = 0; + __pyx_t_5 = __Pyx_PyObject_Call(__pyx_t_10, __pyx_t_1, NULL); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 432, __pyx_L61_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + } + __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":433 + * try: + * main_debugger.send_caught_exception_stack(thread, arg, id(frame)) + * self.set_suspend(thread, CMD_STEP_CAUGHT_EXCEPTION) # <<<<<<<<<<<<<< + * self.do_wait_suspend(thread, frame, event, arg) + * main_debugger.send_caught_exception_stack_proceeded(thread) + */ + __pyx_t_10 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_set_suspend); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 433, __pyx_L61_error) + __Pyx_GOTREF(__pyx_t_10); + __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_CMD_STEP_CAUGHT_EXCEPTION); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 433, __pyx_L61_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_14 = NULL; + __pyx_t_15 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_10))) { + __pyx_t_14 = PyMethod_GET_SELF(__pyx_t_10); + if (likely(__pyx_t_14)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_10); + __Pyx_INCREF(__pyx_t_14); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_10, function); + __pyx_t_15 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_10)) { + PyObject *__pyx_temp[3] = {__pyx_t_14, __pyx_v_thread, __pyx_t_1}; + __pyx_t_5 = __Pyx_PyFunction_FastCall(__pyx_t_10, __pyx_temp+1-__pyx_t_15, 2+__pyx_t_15); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 433, __pyx_L61_error) + __Pyx_XDECREF(__pyx_t_14); __pyx_t_14 = 0; + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_10)) { + PyObject *__pyx_temp[3] = {__pyx_t_14, __pyx_v_thread, __pyx_t_1}; + __pyx_t_5 = __Pyx_PyCFunction_FastCall(__pyx_t_10, __pyx_temp+1-__pyx_t_15, 2+__pyx_t_15); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 433, __pyx_L61_error) + __Pyx_XDECREF(__pyx_t_14); __pyx_t_14 = 0; + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + } else + #endif + { + __pyx_t_8 = PyTuple_New(2+__pyx_t_15); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 433, __pyx_L61_error) + __Pyx_GOTREF(__pyx_t_8); + if (__pyx_t_14) { + __Pyx_GIVEREF(__pyx_t_14); PyTuple_SET_ITEM(__pyx_t_8, 0, __pyx_t_14); __pyx_t_14 = NULL; + } + __Pyx_INCREF(__pyx_v_thread); + __Pyx_GIVEREF(__pyx_v_thread); + PyTuple_SET_ITEM(__pyx_t_8, 0+__pyx_t_15, __pyx_v_thread); + __Pyx_GIVEREF(__pyx_t_1); + PyTuple_SET_ITEM(__pyx_t_8, 1+__pyx_t_15, __pyx_t_1); + __pyx_t_1 = 0; + __pyx_t_5 = __Pyx_PyObject_Call(__pyx_t_10, __pyx_t_8, NULL); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 433, __pyx_L61_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + } + __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":434 + * main_debugger.send_caught_exception_stack(thread, arg, id(frame)) + * self.set_suspend(thread, CMD_STEP_CAUGHT_EXCEPTION) + * self.do_wait_suspend(thread, frame, event, arg) # <<<<<<<<<<<<<< + * main_debugger.send_caught_exception_stack_proceeded(thread) + * + */ + __pyx_t_10 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_do_wait_suspend); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 434, __pyx_L61_error) + __Pyx_GOTREF(__pyx_t_10); + __pyx_t_8 = NULL; + __pyx_t_15 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_10))) { + __pyx_t_8 = PyMethod_GET_SELF(__pyx_t_10); + if (likely(__pyx_t_8)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_10); + __Pyx_INCREF(__pyx_t_8); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_10, function); + __pyx_t_15 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_10)) { + PyObject *__pyx_temp[5] = {__pyx_t_8, __pyx_v_thread, __pyx_v_frame, __pyx_v_event, __pyx_v_arg}; + __pyx_t_5 = __Pyx_PyFunction_FastCall(__pyx_t_10, __pyx_temp+1-__pyx_t_15, 4+__pyx_t_15); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 434, __pyx_L61_error) + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_GOTREF(__pyx_t_5); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_10)) { + PyObject *__pyx_temp[5] = {__pyx_t_8, __pyx_v_thread, __pyx_v_frame, __pyx_v_event, __pyx_v_arg}; + __pyx_t_5 = __Pyx_PyCFunction_FastCall(__pyx_t_10, __pyx_temp+1-__pyx_t_15, 4+__pyx_t_15); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 434, __pyx_L61_error) + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_GOTREF(__pyx_t_5); + } else + #endif + { + __pyx_t_1 = PyTuple_New(4+__pyx_t_15); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 434, __pyx_L61_error) + __Pyx_GOTREF(__pyx_t_1); + if (__pyx_t_8) { + __Pyx_GIVEREF(__pyx_t_8); PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_t_8); __pyx_t_8 = NULL; + } + __Pyx_INCREF(__pyx_v_thread); + __Pyx_GIVEREF(__pyx_v_thread); + PyTuple_SET_ITEM(__pyx_t_1, 0+__pyx_t_15, __pyx_v_thread); + __Pyx_INCREF(__pyx_v_frame); + __Pyx_GIVEREF(__pyx_v_frame); + PyTuple_SET_ITEM(__pyx_t_1, 1+__pyx_t_15, __pyx_v_frame); + __Pyx_INCREF(__pyx_v_event); + __Pyx_GIVEREF(__pyx_v_event); + PyTuple_SET_ITEM(__pyx_t_1, 2+__pyx_t_15, __pyx_v_event); + __Pyx_INCREF(__pyx_v_arg); + __Pyx_GIVEREF(__pyx_v_arg); + PyTuple_SET_ITEM(__pyx_t_1, 3+__pyx_t_15, __pyx_v_arg); + __pyx_t_5 = __Pyx_PyObject_Call(__pyx_t_10, __pyx_t_1, NULL); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 434, __pyx_L61_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + } + __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":435 + * self.set_suspend(thread, CMD_STEP_CAUGHT_EXCEPTION) + * self.do_wait_suspend(thread, frame, event, arg) + * main_debugger.send_caught_exception_stack_proceeded(thread) # <<<<<<<<<<<<<< + * + * finally: + */ + __pyx_t_10 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_send_caught_exception_stack_proc); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 435, __pyx_L61_error) + __Pyx_GOTREF(__pyx_t_10); + __pyx_t_1 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_10))) { + __pyx_t_1 = PyMethod_GET_SELF(__pyx_t_10); + if (likely(__pyx_t_1)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_10); + __Pyx_INCREF(__pyx_t_1); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_10, function); + } + } + if (!__pyx_t_1) { + __pyx_t_5 = __Pyx_PyObject_CallOneArg(__pyx_t_10, __pyx_v_thread); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 435, __pyx_L61_error) + __Pyx_GOTREF(__pyx_t_5); + } else { + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_10)) { + PyObject *__pyx_temp[2] = {__pyx_t_1, __pyx_v_thread}; + __pyx_t_5 = __Pyx_PyFunction_FastCall(__pyx_t_10, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 435, __pyx_L61_error) + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_GOTREF(__pyx_t_5); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_10)) { + PyObject *__pyx_temp[2] = {__pyx_t_1, __pyx_v_thread}; + __pyx_t_5 = __Pyx_PyCFunction_FastCall(__pyx_t_10, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 435, __pyx_L61_error) + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_GOTREF(__pyx_t_5); + } else + #endif + { + __pyx_t_8 = PyTuple_New(1+1); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 435, __pyx_L61_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_GIVEREF(__pyx_t_1); PyTuple_SET_ITEM(__pyx_t_8, 0, __pyx_t_1); __pyx_t_1 = NULL; + __Pyx_INCREF(__pyx_v_thread); + __Pyx_GIVEREF(__pyx_v_thread); + PyTuple_SET_ITEM(__pyx_t_8, 0+1, __pyx_v_thread); + __pyx_t_5 = __Pyx_PyObject_Call(__pyx_t_10, __pyx_t_8, NULL); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 435, __pyx_L61_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + } + } + __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":438 + * + * finally: + * pydevd_vars.remove_additional_frame_by_id(thread_id) # <<<<<<<<<<<<<< + * except: + * traceback.print_exc() + */ + /*finally:*/ { + /*normal exit:*/{ + __pyx_t_10 = __Pyx_GetModuleGlobalName(__pyx_n_s_pydevd_vars); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 438, __pyx_L52_error) + __Pyx_GOTREF(__pyx_t_10); + __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_t_10, __pyx_n_s_remove_additional_frame_by_id); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 438, __pyx_L52_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; + __pyx_t_10 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_8))) { + __pyx_t_10 = PyMethod_GET_SELF(__pyx_t_8); + if (likely(__pyx_t_10)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_8); + __Pyx_INCREF(__pyx_t_10); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_8, function); + } + } + if (!__pyx_t_10) { + __pyx_t_5 = __Pyx_PyObject_CallOneArg(__pyx_t_8, __pyx_v_thread_id); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 438, __pyx_L52_error) + __Pyx_GOTREF(__pyx_t_5); + } else { + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_8)) { + PyObject *__pyx_temp[2] = {__pyx_t_10, __pyx_v_thread_id}; + __pyx_t_5 = __Pyx_PyFunction_FastCall(__pyx_t_8, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 438, __pyx_L52_error) + __Pyx_XDECREF(__pyx_t_10); __pyx_t_10 = 0; + __Pyx_GOTREF(__pyx_t_5); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_8)) { + PyObject *__pyx_temp[2] = {__pyx_t_10, __pyx_v_thread_id}; + __pyx_t_5 = __Pyx_PyCFunction_FastCall(__pyx_t_8, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 438, __pyx_L52_error) + __Pyx_XDECREF(__pyx_t_10); __pyx_t_10 = 0; + __Pyx_GOTREF(__pyx_t_5); + } else + #endif + { + __pyx_t_1 = PyTuple_New(1+1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 438, __pyx_L52_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_10); PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_t_10); __pyx_t_10 = NULL; + __Pyx_INCREF(__pyx_v_thread_id); + __Pyx_GIVEREF(__pyx_v_thread_id); + PyTuple_SET_ITEM(__pyx_t_1, 0+1, __pyx_v_thread_id); + __pyx_t_5 = __Pyx_PyObject_Call(__pyx_t_8, __pyx_t_1, NULL); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 438, __pyx_L52_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + } + } + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + goto __pyx_L62; + } + /*exception exit:*/{ + __Pyx_PyThreadState_declare + __pyx_L61_error:; + __pyx_t_19 = 0; __pyx_t_20 = 0; __pyx_t_21 = 0; __pyx_t_22 = 0; __pyx_t_23 = 0; __pyx_t_24 = 0; + __Pyx_PyThreadState_assign + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_XDECREF(__pyx_t_16); __pyx_t_16 = 0; + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_XDECREF(__pyx_t_14); __pyx_t_14 = 0; + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_XDECREF(__pyx_t_10); __pyx_t_10 = 0; + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + if (PY_MAJOR_VERSION >= 3) __Pyx_ExceptionSwap(&__pyx_t_22, &__pyx_t_23, &__pyx_t_24); + if ((PY_MAJOR_VERSION < 3) || unlikely(__Pyx_GetException(&__pyx_t_19, &__pyx_t_20, &__pyx_t_21) < 0)) __Pyx_ErrFetch(&__pyx_t_19, &__pyx_t_20, &__pyx_t_21); + __Pyx_XGOTREF(__pyx_t_19); + __Pyx_XGOTREF(__pyx_t_20); + __Pyx_XGOTREF(__pyx_t_21); + __Pyx_XGOTREF(__pyx_t_22); + __Pyx_XGOTREF(__pyx_t_23); + __Pyx_XGOTREF(__pyx_t_24); + __pyx_t_15 = __pyx_lineno; __pyx_t_17 = __pyx_clineno; __pyx_t_18 = __pyx_filename; + { + __pyx_t_8 = __Pyx_GetModuleGlobalName(__pyx_n_s_pydevd_vars); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 438, __pyx_L64_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_8, __pyx_n_s_remove_additional_frame_by_id); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 438, __pyx_L64_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __pyx_t_8 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_1))) { + __pyx_t_8 = PyMethod_GET_SELF(__pyx_t_1); + if (likely(__pyx_t_8)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1); + __Pyx_INCREF(__pyx_t_8); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_1, function); + } + } + if (!__pyx_t_8) { + __pyx_t_5 = __Pyx_PyObject_CallOneArg(__pyx_t_1, __pyx_v_thread_id); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 438, __pyx_L64_error) + __Pyx_GOTREF(__pyx_t_5); + } else { + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_1)) { + PyObject *__pyx_temp[2] = {__pyx_t_8, __pyx_v_thread_id}; + __pyx_t_5 = __Pyx_PyFunction_FastCall(__pyx_t_1, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 438, __pyx_L64_error) + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_GOTREF(__pyx_t_5); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_1)) { + PyObject *__pyx_temp[2] = {__pyx_t_8, __pyx_v_thread_id}; + __pyx_t_5 = __Pyx_PyCFunction_FastCall(__pyx_t_1, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 438, __pyx_L64_error) + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_GOTREF(__pyx_t_5); + } else + #endif + { + __pyx_t_10 = PyTuple_New(1+1); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 438, __pyx_L64_error) + __Pyx_GOTREF(__pyx_t_10); + __Pyx_GIVEREF(__pyx_t_8); PyTuple_SET_ITEM(__pyx_t_10, 0, __pyx_t_8); __pyx_t_8 = NULL; + __Pyx_INCREF(__pyx_v_thread_id); + __Pyx_GIVEREF(__pyx_v_thread_id); + PyTuple_SET_ITEM(__pyx_t_10, 0+1, __pyx_v_thread_id); + __pyx_t_5 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_t_10, NULL); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 438, __pyx_L64_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; + } + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + } + __Pyx_PyThreadState_assign + if (PY_MAJOR_VERSION >= 3) { + __Pyx_XGIVEREF(__pyx_t_22); + __Pyx_XGIVEREF(__pyx_t_23); + __Pyx_XGIVEREF(__pyx_t_24); + __Pyx_ExceptionReset(__pyx_t_22, __pyx_t_23, __pyx_t_24); + } + __Pyx_XGIVEREF(__pyx_t_19); + __Pyx_XGIVEREF(__pyx_t_20); + __Pyx_XGIVEREF(__pyx_t_21); + __Pyx_ErrRestore(__pyx_t_19, __pyx_t_20, __pyx_t_21); + __pyx_t_19 = 0; __pyx_t_20 = 0; __pyx_t_21 = 0; __pyx_t_22 = 0; __pyx_t_23 = 0; __pyx_t_24 = 0; + __pyx_lineno = __pyx_t_15; __pyx_clineno = __pyx_t_17; __pyx_filename = __pyx_t_18; + goto __pyx_L52_error; + __pyx_L64_error:; + __Pyx_PyThreadState_assign + if (PY_MAJOR_VERSION >= 3) { + __Pyx_XGIVEREF(__pyx_t_22); + __Pyx_XGIVEREF(__pyx_t_23); + __Pyx_XGIVEREF(__pyx_t_24); + __Pyx_ExceptionReset(__pyx_t_22, __pyx_t_23, __pyx_t_24); + } + __Pyx_XDECREF(__pyx_t_19); __pyx_t_19 = 0; + __Pyx_XDECREF(__pyx_t_20); __pyx_t_20 = 0; + __Pyx_XDECREF(__pyx_t_21); __pyx_t_21 = 0; + __pyx_t_22 = 0; __pyx_t_23 = 0; __pyx_t_24 = 0; + goto __pyx_L52_error; + } + __pyx_L62:; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":420 + * thread = self._args[3] + * + * try: # <<<<<<<<<<<<<< + * frame_id_to_frame = {} + * frame_id_to_frame[id(frame)] = frame + */ + } + __Pyx_XDECREF(__pyx_t_13); __pyx_t_13 = 0; + __Pyx_XDECREF(__pyx_t_12); __pyx_t_12 = 0; + __Pyx_XDECREF(__pyx_t_11); __pyx_t_11 = 0; + goto __pyx_L57_try_end; + __pyx_L52_error:; + __Pyx_PyThreadState_assign + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_XDECREF(__pyx_t_16); __pyx_t_16 = 0; + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_XDECREF(__pyx_t_14); __pyx_t_14 = 0; + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_XDECREF(__pyx_t_10); __pyx_t_10 = 0; + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":439 + * finally: + * pydevd_vars.remove_additional_frame_by_id(thread_id) + * except: # <<<<<<<<<<<<<< + * traceback.print_exc() + * + */ + /*except:*/ { + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBFrame.handle_exception", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_5, &__pyx_t_1, &__pyx_t_10) < 0) __PYX_ERR(0, 439, __pyx_L54_except_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_t_10); + + /* "_pydevd_bundle/pydevd_cython.pyx":440 + * pydevd_vars.remove_additional_frame_by_id(thread_id) + * except: + * traceback.print_exc() # <<<<<<<<<<<<<< + * + * main_debugger.set_trace_for_frame_and_parents(frame) + */ + __pyx_t_14 = __Pyx_GetModuleGlobalName(__pyx_n_s_traceback); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 440, __pyx_L54_except_error) + __Pyx_GOTREF(__pyx_t_14); + __pyx_t_9 = __Pyx_PyObject_GetAttrStr(__pyx_t_14, __pyx_n_s_print_exc); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 440, __pyx_L54_except_error) + __Pyx_GOTREF(__pyx_t_9); + __Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0; + __pyx_t_14 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_9))) { + __pyx_t_14 = PyMethod_GET_SELF(__pyx_t_9); + if (likely(__pyx_t_14)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_9); + __Pyx_INCREF(__pyx_t_14); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_9, function); + } + } + if (__pyx_t_14) { + __pyx_t_8 = __Pyx_PyObject_CallOneArg(__pyx_t_9, __pyx_t_14); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 440, __pyx_L54_except_error) + __Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0; + } else { + __pyx_t_8 = __Pyx_PyObject_CallNoArg(__pyx_t_9); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 440, __pyx_L54_except_error) + } + __Pyx_GOTREF(__pyx_t_8); + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; + goto __pyx_L53_exception_handled; + } + __pyx_L54_except_error:; + + /* "_pydevd_bundle/pydevd_cython.pyx":420 + * thread = self._args[3] + * + * try: # <<<<<<<<<<<<<< + * frame_id_to_frame = {} + * frame_id_to_frame[id(frame)] = frame + */ + __Pyx_PyThreadState_assign + __Pyx_XGIVEREF(__pyx_t_13); + __Pyx_XGIVEREF(__pyx_t_12); + __Pyx_XGIVEREF(__pyx_t_11); + __Pyx_ExceptionReset(__pyx_t_13, __pyx_t_12, __pyx_t_11); + goto __pyx_L4_error; + __pyx_L53_exception_handled:; + __Pyx_PyThreadState_assign + __Pyx_XGIVEREF(__pyx_t_13); + __Pyx_XGIVEREF(__pyx_t_12); + __Pyx_XGIVEREF(__pyx_t_11); + __Pyx_ExceptionReset(__pyx_t_13, __pyx_t_12, __pyx_t_11); + __pyx_L57_try_end:; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":442 + * traceback.print_exc() + * + * main_debugger.set_trace_for_frame_and_parents(frame) # <<<<<<<<<<<<<< + * finally: + * #Clear some local variables... + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_set_trace_for_frame_and_parents); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 442, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_5 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_1))) { + __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_1); + if (likely(__pyx_t_5)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1); + __Pyx_INCREF(__pyx_t_5); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_1, function); + } + } + if (!__pyx_t_5) { + __pyx_t_10 = __Pyx_PyObject_CallOneArg(__pyx_t_1, __pyx_v_frame); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 442, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_10); + } else { + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_1)) { + PyObject *__pyx_temp[2] = {__pyx_t_5, __pyx_v_frame}; + __pyx_t_10 = __Pyx_PyFunction_FastCall(__pyx_t_1, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 442, __pyx_L4_error) + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_GOTREF(__pyx_t_10); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_1)) { + PyObject *__pyx_temp[2] = {__pyx_t_5, __pyx_v_frame}; + __pyx_t_10 = __Pyx_PyCFunction_FastCall(__pyx_t_1, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 442, __pyx_L4_error) + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_GOTREF(__pyx_t_10); + } else + #endif + { + __pyx_t_8 = PyTuple_New(1+1); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 442, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_GIVEREF(__pyx_t_5); PyTuple_SET_ITEM(__pyx_t_8, 0, __pyx_t_5); __pyx_t_5 = NULL; + __Pyx_INCREF(__pyx_v_frame); + __Pyx_GIVEREF(__pyx_v_frame); + PyTuple_SET_ITEM(__pyx_t_8, 0+1, __pyx_v_frame); + __pyx_t_10 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_t_8, NULL); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 442, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_10); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + } + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":445 + * finally: + * #Clear some local variables... + * trace_obj = None # <<<<<<<<<<<<<< + * initial_trace_obj = None + * check_trace_obj = None + */ + /*finally:*/ { + /*normal exit:*/{ + __Pyx_INCREF(Py_None); + __Pyx_DECREF_SET(__pyx_v_trace_obj, Py_None); + + /* "_pydevd_bundle/pydevd_cython.pyx":446 + * #Clear some local variables... + * trace_obj = None + * initial_trace_obj = None # <<<<<<<<<<<<<< + * check_trace_obj = None + * f = None + */ + __Pyx_INCREF(Py_None); + __Pyx_XDECREF_SET(__pyx_v_initial_trace_obj, Py_None); + + /* "_pydevd_bundle/pydevd_cython.pyx":447 + * trace_obj = None + * initial_trace_obj = None + * check_trace_obj = None # <<<<<<<<<<<<<< + * f = None + * frame_id_to_frame = None + */ + __Pyx_INCREF(Py_None); + __Pyx_XDECREF_SET(__pyx_v_check_trace_obj, Py_None); + + /* "_pydevd_bundle/pydevd_cython.pyx":448 + * initial_trace_obj = None + * check_trace_obj = None + * f = None # <<<<<<<<<<<<<< + * frame_id_to_frame = None + * main_debugger = None + */ + __Pyx_INCREF(Py_None); + __Pyx_XDECREF_SET(__pyx_v_f, Py_None); + + /* "_pydevd_bundle/pydevd_cython.pyx":449 + * check_trace_obj = None + * f = None + * frame_id_to_frame = None # <<<<<<<<<<<<<< + * main_debugger = None + * thread = None + */ + __Pyx_INCREF(Py_None); + __Pyx_XDECREF_SET(__pyx_v_frame_id_to_frame, Py_None); + + /* "_pydevd_bundle/pydevd_cython.pyx":450 + * f = None + * frame_id_to_frame = None + * main_debugger = None # <<<<<<<<<<<<<< + * thread = None + * + */ + __Pyx_INCREF(Py_None); + __Pyx_DECREF_SET(__pyx_v_main_debugger, Py_None); + + /* "_pydevd_bundle/pydevd_cython.pyx":451 + * frame_id_to_frame = None + * main_debugger = None + * thread = None # <<<<<<<<<<<<<< + * + * def get_func_name(self, frame): + */ + __Pyx_INCREF(Py_None); + __Pyx_XDECREF_SET(__pyx_v_thread, Py_None); + goto __pyx_L5; + } + /*exception exit:*/{ + __Pyx_PyThreadState_declare + __pyx_L4_error:; + __pyx_t_11 = 0; __pyx_t_12 = 0; __pyx_t_13 = 0; __pyx_t_24 = 0; __pyx_t_23 = 0; __pyx_t_22 = 0; + __Pyx_PyThreadState_assign + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_XDECREF(__pyx_t_16); __pyx_t_16 = 0; + __Pyx_XDECREF(__pyx_t_14); __pyx_t_14 = 0; + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_10); __pyx_t_10 = 0; + if (PY_MAJOR_VERSION >= 3) __Pyx_ExceptionSwap(&__pyx_t_24, &__pyx_t_23, &__pyx_t_22); + if ((PY_MAJOR_VERSION < 3) || unlikely(__Pyx_GetException(&__pyx_t_11, &__pyx_t_12, &__pyx_t_13) < 0)) __Pyx_ErrFetch(&__pyx_t_11, &__pyx_t_12, &__pyx_t_13); + __Pyx_XGOTREF(__pyx_t_11); + __Pyx_XGOTREF(__pyx_t_12); + __Pyx_XGOTREF(__pyx_t_13); + __Pyx_XGOTREF(__pyx_t_24); + __Pyx_XGOTREF(__pyx_t_23); + __Pyx_XGOTREF(__pyx_t_22); + __pyx_t_17 = __pyx_lineno; __pyx_t_15 = __pyx_clineno; __pyx_t_25 = __pyx_filename; + { + + /* "_pydevd_bundle/pydevd_cython.pyx":445 + * finally: + * #Clear some local variables... + * trace_obj = None # <<<<<<<<<<<<<< + * initial_trace_obj = None + * check_trace_obj = None + */ + __Pyx_INCREF(Py_None); + __Pyx_XDECREF_SET(__pyx_v_trace_obj, Py_None); + + /* "_pydevd_bundle/pydevd_cython.pyx":446 + * #Clear some local variables... + * trace_obj = None + * initial_trace_obj = None # <<<<<<<<<<<<<< + * check_trace_obj = None + * f = None + */ + __Pyx_INCREF(Py_None); + __Pyx_XDECREF_SET(__pyx_v_initial_trace_obj, Py_None); + + /* "_pydevd_bundle/pydevd_cython.pyx":447 + * trace_obj = None + * initial_trace_obj = None + * check_trace_obj = None # <<<<<<<<<<<<<< + * f = None + * frame_id_to_frame = None + */ + __Pyx_INCREF(Py_None); + __Pyx_XDECREF_SET(__pyx_v_check_trace_obj, Py_None); + + /* "_pydevd_bundle/pydevd_cython.pyx":448 + * initial_trace_obj = None + * check_trace_obj = None + * f = None # <<<<<<<<<<<<<< + * frame_id_to_frame = None + * main_debugger = None + */ + __Pyx_INCREF(Py_None); + __Pyx_XDECREF_SET(__pyx_v_f, Py_None); + + /* "_pydevd_bundle/pydevd_cython.pyx":449 + * check_trace_obj = None + * f = None + * frame_id_to_frame = None # <<<<<<<<<<<<<< + * main_debugger = None + * thread = None + */ + __Pyx_INCREF(Py_None); + __Pyx_XDECREF_SET(__pyx_v_frame_id_to_frame, Py_None); + + /* "_pydevd_bundle/pydevd_cython.pyx":450 + * f = None + * frame_id_to_frame = None + * main_debugger = None # <<<<<<<<<<<<<< + * thread = None + * + */ + __Pyx_INCREF(Py_None); + __Pyx_XDECREF_SET(__pyx_v_main_debugger, Py_None); + + /* "_pydevd_bundle/pydevd_cython.pyx":451 + * frame_id_to_frame = None + * main_debugger = None + * thread = None # <<<<<<<<<<<<<< + * + * def get_func_name(self, frame): + */ + __Pyx_INCREF(Py_None); + __Pyx_XDECREF_SET(__pyx_v_thread, Py_None); + } + __Pyx_PyThreadState_assign + if (PY_MAJOR_VERSION >= 3) { + __Pyx_XGIVEREF(__pyx_t_24); + __Pyx_XGIVEREF(__pyx_t_23); + __Pyx_XGIVEREF(__pyx_t_22); + __Pyx_ExceptionReset(__pyx_t_24, __pyx_t_23, __pyx_t_22); + } + __Pyx_XGIVEREF(__pyx_t_11); + __Pyx_XGIVEREF(__pyx_t_12); + __Pyx_XGIVEREF(__pyx_t_13); + __Pyx_ErrRestore(__pyx_t_11, __pyx_t_12, __pyx_t_13); + __pyx_t_11 = 0; __pyx_t_12 = 0; __pyx_t_13 = 0; __pyx_t_24 = 0; __pyx_t_23 = 0; __pyx_t_22 = 0; + __pyx_lineno = __pyx_t_17; __pyx_clineno = __pyx_t_15; __pyx_filename = __pyx_t_25; + goto __pyx_L1_error; + } + __pyx_L3_return: { + __pyx_t_22 = __pyx_r; + __pyx_r = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":445 + * finally: + * #Clear some local variables... + * trace_obj = None # <<<<<<<<<<<<<< + * initial_trace_obj = None + * check_trace_obj = None + */ + __Pyx_INCREF(Py_None); + __Pyx_DECREF_SET(__pyx_v_trace_obj, Py_None); + + /* "_pydevd_bundle/pydevd_cython.pyx":446 + * #Clear some local variables... + * trace_obj = None + * initial_trace_obj = None # <<<<<<<<<<<<<< + * check_trace_obj = None + * f = None + */ + __Pyx_INCREF(Py_None); + __Pyx_XDECREF_SET(__pyx_v_initial_trace_obj, Py_None); + + /* "_pydevd_bundle/pydevd_cython.pyx":447 + * trace_obj = None + * initial_trace_obj = None + * check_trace_obj = None # <<<<<<<<<<<<<< + * f = None + * frame_id_to_frame = None + */ + __Pyx_INCREF(Py_None); + __Pyx_XDECREF_SET(__pyx_v_check_trace_obj, Py_None); + + /* "_pydevd_bundle/pydevd_cython.pyx":448 + * initial_trace_obj = None + * check_trace_obj = None + * f = None # <<<<<<<<<<<<<< + * frame_id_to_frame = None + * main_debugger = None + */ + __Pyx_INCREF(Py_None); + __Pyx_XDECREF_SET(__pyx_v_f, Py_None); + + /* "_pydevd_bundle/pydevd_cython.pyx":449 + * check_trace_obj = None + * f = None + * frame_id_to_frame = None # <<<<<<<<<<<<<< + * main_debugger = None + * thread = None + */ + __Pyx_INCREF(Py_None); + __Pyx_XDECREF_SET(__pyx_v_frame_id_to_frame, Py_None); + + /* "_pydevd_bundle/pydevd_cython.pyx":450 + * f = None + * frame_id_to_frame = None + * main_debugger = None # <<<<<<<<<<<<<< + * thread = None + * + */ + __Pyx_INCREF(Py_None); + __Pyx_DECREF_SET(__pyx_v_main_debugger, Py_None); + + /* "_pydevd_bundle/pydevd_cython.pyx":451 + * frame_id_to_frame = None + * main_debugger = None + * thread = None # <<<<<<<<<<<<<< + * + * def get_func_name(self, frame): + */ + __Pyx_INCREF(Py_None); + __Pyx_XDECREF_SET(__pyx_v_thread, Py_None); + __pyx_r = __pyx_t_22; + __pyx_t_22 = 0; + goto __pyx_L0; + } + __pyx_L5:; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":332 + * return flag, frame + * + * def handle_exception(self, frame, event, arg): # <<<<<<<<<<<<<< + * try: + * # print 'handle_exception', frame.f_lineno, frame.f_code.co_name + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_XDECREF(__pyx_t_8); + __Pyx_XDECREF(__pyx_t_9); + __Pyx_XDECREF(__pyx_t_10); + __Pyx_XDECREF(__pyx_t_14); + __Pyx_XDECREF(__pyx_t_16); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBFrame.handle_exception", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_trace_obj); + __Pyx_XDECREF(__pyx_v_main_debugger); + __Pyx_XDECREF(__pyx_v_initial_trace_obj); + __Pyx_XDECREF(__pyx_v_check_trace_obj); + __Pyx_XDECREF(__pyx_v_filename); + __Pyx_XDECREF(__pyx_v_filename_to_lines_where_exceptions_are_ignored); + __Pyx_XDECREF(__pyx_v_lines_ignored); + __Pyx_XDECREF(__pyx_v_curr_stat); + __Pyx_XDECREF(__pyx_v_last_stat); + __Pyx_XDECREF(__pyx_v_from_user_input); + __Pyx_XDECREF(__pyx_v_merged); + __Pyx_XDECREF(__pyx_v_exc_lineno); + __Pyx_XDECREF(__pyx_v_line); + __Pyx_XDECREF(__pyx_v_thread); + __Pyx_XDECREF(__pyx_v_frame_id_to_frame); + __Pyx_XDECREF(__pyx_v_f); + __Pyx_XDECREF(__pyx_v_thread_id); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pyx":453 + * thread = None + * + * def get_func_name(self, frame): # <<<<<<<<<<<<<< + * code_obj = frame.f_code + * func_name = code_obj.co_name + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_15get_func_name(PyObject *__pyx_v_self, PyObject *__pyx_v_frame); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_15get_func_name(PyObject *__pyx_v_self, PyObject *__pyx_v_frame) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("get_func_name (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_14get_func_name(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *)__pyx_v_self), ((PyObject *)__pyx_v_frame)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_14get_func_name(CYTHON_UNUSED struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *__pyx_v_self, PyObject *__pyx_v_frame) { + PyObject *__pyx_v_code_obj = NULL; + PyObject *__pyx_v_func_name = NULL; + PyObject *__pyx_v_cls_name = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + int __pyx_t_7; + PyObject *__pyx_t_8 = NULL; + int __pyx_t_9; + int __pyx_t_10; + PyObject *__pyx_t_11 = NULL; + PyObject *__pyx_t_12 = NULL; + __Pyx_RefNannySetupContext("get_func_name", 0); + + /* "_pydevd_bundle/pydevd_cython.pyx":454 + * + * def get_func_name(self, frame): + * code_obj = frame.f_code # <<<<<<<<<<<<<< + * func_name = code_obj.co_name + * try: + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_code); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 454, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v_code_obj = __pyx_t_1; + __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":455 + * def get_func_name(self, frame): + * code_obj = frame.f_code + * func_name = code_obj.co_name # <<<<<<<<<<<<<< + * try: + * cls_name = get_clsname_for_code(code_obj, frame) + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_code_obj, __pyx_n_s_co_name); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 455, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v_func_name = __pyx_t_1; + __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":456 + * code_obj = frame.f_code + * func_name = code_obj.co_name + * try: # <<<<<<<<<<<<<< + * cls_name = get_clsname_for_code(code_obj, frame) + * if cls_name is not None: + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_2, &__pyx_t_3, &__pyx_t_4); + __Pyx_XGOTREF(__pyx_t_2); + __Pyx_XGOTREF(__pyx_t_3); + __Pyx_XGOTREF(__pyx_t_4); + /*try:*/ { + + /* "_pydevd_bundle/pydevd_cython.pyx":457 + * func_name = code_obj.co_name + * try: + * cls_name = get_clsname_for_code(code_obj, frame) # <<<<<<<<<<<<<< + * if cls_name is not None: + * return "%s.%s" % (cls_name, func_name) + */ + __pyx_t_5 = __Pyx_GetModuleGlobalName(__pyx_n_s_get_clsname_for_code); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 457, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_6 = NULL; + __pyx_t_7 = 0; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_5))) { + __pyx_t_6 = PyMethod_GET_SELF(__pyx_t_5); + if (likely(__pyx_t_6)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_5); + __Pyx_INCREF(__pyx_t_6); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_5, function); + __pyx_t_7 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_5)) { + PyObject *__pyx_temp[3] = {__pyx_t_6, __pyx_v_code_obj, __pyx_v_frame}; + __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_5, __pyx_temp+1-__pyx_t_7, 2+__pyx_t_7); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 457, __pyx_L3_error) + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_GOTREF(__pyx_t_1); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_5)) { + PyObject *__pyx_temp[3] = {__pyx_t_6, __pyx_v_code_obj, __pyx_v_frame}; + __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_5, __pyx_temp+1-__pyx_t_7, 2+__pyx_t_7); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 457, __pyx_L3_error) + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_GOTREF(__pyx_t_1); + } else + #endif + { + __pyx_t_8 = PyTuple_New(2+__pyx_t_7); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 457, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_8); + if (__pyx_t_6) { + __Pyx_GIVEREF(__pyx_t_6); PyTuple_SET_ITEM(__pyx_t_8, 0, __pyx_t_6); __pyx_t_6 = NULL; + } + __Pyx_INCREF(__pyx_v_code_obj); + __Pyx_GIVEREF(__pyx_v_code_obj); + PyTuple_SET_ITEM(__pyx_t_8, 0+__pyx_t_7, __pyx_v_code_obj); + __Pyx_INCREF(__pyx_v_frame); + __Pyx_GIVEREF(__pyx_v_frame); + PyTuple_SET_ITEM(__pyx_t_8, 1+__pyx_t_7, __pyx_v_frame); + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_5, __pyx_t_8, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 457, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + } + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_v_cls_name = __pyx_t_1; + __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":458 + * try: + * cls_name = get_clsname_for_code(code_obj, frame) + * if cls_name is not None: # <<<<<<<<<<<<<< + * return "%s.%s" % (cls_name, func_name) + * else: + */ + __pyx_t_9 = (__pyx_v_cls_name != Py_None); + __pyx_t_10 = (__pyx_t_9 != 0); + if (__pyx_t_10) { + + /* "_pydevd_bundle/pydevd_cython.pyx":459 + * cls_name = get_clsname_for_code(code_obj, frame) + * if cls_name is not None: + * return "%s.%s" % (cls_name, func_name) # <<<<<<<<<<<<<< + * else: + * return func_name + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = PyTuple_New(2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 459, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_v_cls_name); + __Pyx_GIVEREF(__pyx_v_cls_name); + PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_v_cls_name); + __Pyx_INCREF(__pyx_v_func_name); + __Pyx_GIVEREF(__pyx_v_func_name); + PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_v_func_name); + __pyx_t_5 = __Pyx_PyString_Format(__pyx_kp_s_s_s, __pyx_t_1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 459, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_r = __pyx_t_5; + __pyx_t_5 = 0; + goto __pyx_L7_try_return; + + /* "_pydevd_bundle/pydevd_cython.pyx":458 + * try: + * cls_name = get_clsname_for_code(code_obj, frame) + * if cls_name is not None: # <<<<<<<<<<<<<< + * return "%s.%s" % (cls_name, func_name) + * else: + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":461 + * return "%s.%s" % (cls_name, func_name) + * else: + * return func_name # <<<<<<<<<<<<<< + * except: + * traceback.print_exc() + */ + /*else*/ { + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_func_name); + __pyx_r = __pyx_v_func_name; + goto __pyx_L7_try_return; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":456 + * code_obj = frame.f_code + * func_name = code_obj.co_name + * try: # <<<<<<<<<<<<<< + * cls_name = get_clsname_for_code(code_obj, frame) + * if cls_name is not None: + */ + } + __pyx_L3_error:; + __Pyx_PyThreadState_assign + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":462 + * else: + * return func_name + * except: # <<<<<<<<<<<<<< + * traceback.print_exc() + * return func_name + */ + /*except:*/ { + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBFrame.get_func_name", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_5, &__pyx_t_1, &__pyx_t_8) < 0) __PYX_ERR(0, 462, __pyx_L5_except_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_t_8); + + /* "_pydevd_bundle/pydevd_cython.pyx":463 + * return func_name + * except: + * traceback.print_exc() # <<<<<<<<<<<<<< + * return func_name + * + */ + __pyx_t_11 = __Pyx_GetModuleGlobalName(__pyx_n_s_traceback); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 463, __pyx_L5_except_error) + __Pyx_GOTREF(__pyx_t_11); + __pyx_t_12 = __Pyx_PyObject_GetAttrStr(__pyx_t_11, __pyx_n_s_print_exc); if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 463, __pyx_L5_except_error) + __Pyx_GOTREF(__pyx_t_12); + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; + __pyx_t_11 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_12))) { + __pyx_t_11 = PyMethod_GET_SELF(__pyx_t_12); + if (likely(__pyx_t_11)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_12); + __Pyx_INCREF(__pyx_t_11); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_12, function); + } + } + if (__pyx_t_11) { + __pyx_t_6 = __Pyx_PyObject_CallOneArg(__pyx_t_12, __pyx_t_11); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 463, __pyx_L5_except_error) + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; + } else { + __pyx_t_6 = __Pyx_PyObject_CallNoArg(__pyx_t_12); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 463, __pyx_L5_except_error) + } + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_12); __pyx_t_12 = 0; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":464 + * except: + * traceback.print_exc() + * return func_name # <<<<<<<<<<<<<< + * + * def show_return_values(self, frame, arg): + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_func_name); + __pyx_r = __pyx_v_func_name; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + goto __pyx_L6_except_return; + } + __pyx_L5_except_error:; + + /* "_pydevd_bundle/pydevd_cython.pyx":456 + * code_obj = frame.f_code + * func_name = code_obj.co_name + * try: # <<<<<<<<<<<<<< + * cls_name = get_clsname_for_code(code_obj, frame) + * if cls_name is not None: + */ + __Pyx_PyThreadState_assign + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_XGIVEREF(__pyx_t_4); + __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); + goto __pyx_L1_error; + __pyx_L7_try_return:; + __Pyx_PyThreadState_assign + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_XGIVEREF(__pyx_t_4); + __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); + goto __pyx_L0; + __pyx_L6_except_return:; + __Pyx_PyThreadState_assign + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_XGIVEREF(__pyx_t_4); + __Pyx_ExceptionReset(__pyx_t_2, __pyx_t_3, __pyx_t_4); + goto __pyx_L0; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":453 + * thread = None + * + * def get_func_name(self, frame): # <<<<<<<<<<<<<< + * code_obj = frame.f_code + * func_name = code_obj.co_name + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_8); + __Pyx_XDECREF(__pyx_t_11); + __Pyx_XDECREF(__pyx_t_12); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBFrame.get_func_name", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_code_obj); + __Pyx_XDECREF(__pyx_v_func_name); + __Pyx_XDECREF(__pyx_v_cls_name); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pyx":466 + * return func_name + * + * def show_return_values(self, frame, arg): # <<<<<<<<<<<<<< + * try: + * try: + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_17show_return_values(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_17show_return_values(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_frame = 0; + PyObject *__pyx_v_arg = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("show_return_values (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_frame,&__pyx_n_s_arg,0}; + PyObject* values[2] = {0,0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_frame)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (likely((values[1] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_arg)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("show_return_values", 1, 2, 2, 1); __PYX_ERR(0, 466, __pyx_L3_error) + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "show_return_values") < 0)) __PYX_ERR(0, 466, __pyx_L3_error) + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 2) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + } + __pyx_v_frame = values[0]; + __pyx_v_arg = values[1]; + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("show_return_values", 1, 2, 2, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 466, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBFrame.show_return_values", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_16show_return_values(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *)__pyx_v_self), __pyx_v_frame, __pyx_v_arg); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_16show_return_values(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *__pyx_v_self, PyObject *__pyx_v_frame, PyObject *__pyx_v_arg) { + PyObject *__pyx_v_f_locals_back = NULL; + PyObject *__pyx_v_return_values_dict = NULL; + PyObject *__pyx_v_name = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + int __pyx_t_6; + int __pyx_t_7; + PyObject *__pyx_t_8 = NULL; + PyObject *__pyx_t_9 = NULL; + int __pyx_t_10; + PyObject *__pyx_t_11 = NULL; + PyObject *__pyx_t_12 = NULL; + int __pyx_t_13; + char const *__pyx_t_14; + PyObject *__pyx_t_15 = NULL; + PyObject *__pyx_t_16 = NULL; + PyObject *__pyx_t_17 = NULL; + __Pyx_RefNannySetupContext("show_return_values", 0); + + /* "_pydevd_bundle/pydevd_cython.pyx":467 + * + * def show_return_values(self, frame, arg): + * try: # <<<<<<<<<<<<<< + * try: + * f_locals_back = getattr(frame.f_back, "f_locals", None) + */ + /*try:*/ { + + /* "_pydevd_bundle/pydevd_cython.pyx":468 + * def show_return_values(self, frame, arg): + * try: + * try: # <<<<<<<<<<<<<< + * f_locals_back = getattr(frame.f_back, "f_locals", None) + * if f_locals_back is not None: + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_1, &__pyx_t_2, &__pyx_t_3); + __Pyx_XGOTREF(__pyx_t_1); + __Pyx_XGOTREF(__pyx_t_2); + __Pyx_XGOTREF(__pyx_t_3); + /*try:*/ { + + /* "_pydevd_bundle/pydevd_cython.pyx":469 + * try: + * try: + * f_locals_back = getattr(frame.f_back, "f_locals", None) # <<<<<<<<<<<<<< + * if f_locals_back is not None: + * return_values_dict = f_locals_back.get(RETURN_VALUES_DICT, None) + */ + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_back); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 469, __pyx_L6_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_5 = __Pyx_GetAttr3(__pyx_t_4, __pyx_n_s_f_locals, Py_None); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 469, __pyx_L6_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_v_f_locals_back = __pyx_t_5; + __pyx_t_5 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":470 + * try: + * f_locals_back = getattr(frame.f_back, "f_locals", None) + * if f_locals_back is not None: # <<<<<<<<<<<<<< + * return_values_dict = f_locals_back.get(RETURN_VALUES_DICT, None) + * if return_values_dict is None: + */ + __pyx_t_6 = (__pyx_v_f_locals_back != Py_None); + __pyx_t_7 = (__pyx_t_6 != 0); + if (__pyx_t_7) { + + /* "_pydevd_bundle/pydevd_cython.pyx":471 + * f_locals_back = getattr(frame.f_back, "f_locals", None) + * if f_locals_back is not None: + * return_values_dict = f_locals_back.get(RETURN_VALUES_DICT, None) # <<<<<<<<<<<<<< + * if return_values_dict is None: + * return_values_dict = {} + */ + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_f_locals_back, __pyx_n_s_get); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 471, __pyx_L6_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_8 = __Pyx_GetModuleGlobalName(__pyx_n_s_RETURN_VALUES_DICT); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 471, __pyx_L6_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_9 = NULL; + __pyx_t_10 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_4))) { + __pyx_t_9 = PyMethod_GET_SELF(__pyx_t_4); + if (likely(__pyx_t_9)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); + __Pyx_INCREF(__pyx_t_9); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_4, function); + __pyx_t_10 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_4)) { + PyObject *__pyx_temp[3] = {__pyx_t_9, __pyx_t_8, Py_None}; + __pyx_t_5 = __Pyx_PyFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_10, 2+__pyx_t_10); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 471, __pyx_L6_error) + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_4)) { + PyObject *__pyx_temp[3] = {__pyx_t_9, __pyx_t_8, Py_None}; + __pyx_t_5 = __Pyx_PyCFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_10, 2+__pyx_t_10); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 471, __pyx_L6_error) + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + } else + #endif + { + __pyx_t_11 = PyTuple_New(2+__pyx_t_10); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 471, __pyx_L6_error) + __Pyx_GOTREF(__pyx_t_11); + if (__pyx_t_9) { + __Pyx_GIVEREF(__pyx_t_9); PyTuple_SET_ITEM(__pyx_t_11, 0, __pyx_t_9); __pyx_t_9 = NULL; + } + __Pyx_GIVEREF(__pyx_t_8); + PyTuple_SET_ITEM(__pyx_t_11, 0+__pyx_t_10, __pyx_t_8); + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + PyTuple_SET_ITEM(__pyx_t_11, 1+__pyx_t_10, Py_None); + __pyx_t_8 = 0; + __pyx_t_5 = __Pyx_PyObject_Call(__pyx_t_4, __pyx_t_11, NULL); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 471, __pyx_L6_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; + } + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_v_return_values_dict = __pyx_t_5; + __pyx_t_5 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":472 + * if f_locals_back is not None: + * return_values_dict = f_locals_back.get(RETURN_VALUES_DICT, None) + * if return_values_dict is None: # <<<<<<<<<<<<<< + * return_values_dict = {} + * f_locals_back[RETURN_VALUES_DICT] = return_values_dict + */ + __pyx_t_7 = (__pyx_v_return_values_dict == Py_None); + __pyx_t_6 = (__pyx_t_7 != 0); + if (__pyx_t_6) { + + /* "_pydevd_bundle/pydevd_cython.pyx":473 + * return_values_dict = f_locals_back.get(RETURN_VALUES_DICT, None) + * if return_values_dict is None: + * return_values_dict = {} # <<<<<<<<<<<<<< + * f_locals_back[RETURN_VALUES_DICT] = return_values_dict + * name = self.get_func_name(frame) + */ + __pyx_t_5 = PyDict_New(); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 473, __pyx_L6_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF_SET(__pyx_v_return_values_dict, __pyx_t_5); + __pyx_t_5 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":474 + * if return_values_dict is None: + * return_values_dict = {} + * f_locals_back[RETURN_VALUES_DICT] = return_values_dict # <<<<<<<<<<<<<< + * name = self.get_func_name(frame) + * return_values_dict[name] = arg + */ + __pyx_t_5 = __Pyx_GetModuleGlobalName(__pyx_n_s_RETURN_VALUES_DICT); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 474, __pyx_L6_error) + __Pyx_GOTREF(__pyx_t_5); + if (unlikely(PyObject_SetItem(__pyx_v_f_locals_back, __pyx_t_5, __pyx_v_return_values_dict) < 0)) __PYX_ERR(0, 474, __pyx_L6_error) + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":472 + * if f_locals_back is not None: + * return_values_dict = f_locals_back.get(RETURN_VALUES_DICT, None) + * if return_values_dict is None: # <<<<<<<<<<<<<< + * return_values_dict = {} + * f_locals_back[RETURN_VALUES_DICT] = return_values_dict + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":475 + * return_values_dict = {} + * f_locals_back[RETURN_VALUES_DICT] = return_values_dict + * name = self.get_func_name(frame) # <<<<<<<<<<<<<< + * return_values_dict[name] = arg + * except: + */ + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_get_func_name); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 475, __pyx_L6_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_11 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_4))) { + __pyx_t_11 = PyMethod_GET_SELF(__pyx_t_4); + if (likely(__pyx_t_11)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); + __Pyx_INCREF(__pyx_t_11); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_4, function); + } + } + if (!__pyx_t_11) { + __pyx_t_5 = __Pyx_PyObject_CallOneArg(__pyx_t_4, __pyx_v_frame); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 475, __pyx_L6_error) + __Pyx_GOTREF(__pyx_t_5); + } else { + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_4)) { + PyObject *__pyx_temp[2] = {__pyx_t_11, __pyx_v_frame}; + __pyx_t_5 = __Pyx_PyFunction_FastCall(__pyx_t_4, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 475, __pyx_L6_error) + __Pyx_XDECREF(__pyx_t_11); __pyx_t_11 = 0; + __Pyx_GOTREF(__pyx_t_5); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_4)) { + PyObject *__pyx_temp[2] = {__pyx_t_11, __pyx_v_frame}; + __pyx_t_5 = __Pyx_PyCFunction_FastCall(__pyx_t_4, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 475, __pyx_L6_error) + __Pyx_XDECREF(__pyx_t_11); __pyx_t_11 = 0; + __Pyx_GOTREF(__pyx_t_5); + } else + #endif + { + __pyx_t_8 = PyTuple_New(1+1); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 475, __pyx_L6_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_GIVEREF(__pyx_t_11); PyTuple_SET_ITEM(__pyx_t_8, 0, __pyx_t_11); __pyx_t_11 = NULL; + __Pyx_INCREF(__pyx_v_frame); + __Pyx_GIVEREF(__pyx_v_frame); + PyTuple_SET_ITEM(__pyx_t_8, 0+1, __pyx_v_frame); + __pyx_t_5 = __Pyx_PyObject_Call(__pyx_t_4, __pyx_t_8, NULL); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 475, __pyx_L6_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + } + } + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_v_name = __pyx_t_5; + __pyx_t_5 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":476 + * f_locals_back[RETURN_VALUES_DICT] = return_values_dict + * name = self.get_func_name(frame) + * return_values_dict[name] = arg # <<<<<<<<<<<<<< + * except: + * traceback.print_exc() + */ + if (unlikely(PyObject_SetItem(__pyx_v_return_values_dict, __pyx_v_name, __pyx_v_arg) < 0)) __PYX_ERR(0, 476, __pyx_L6_error) + + /* "_pydevd_bundle/pydevd_cython.pyx":470 + * try: + * f_locals_back = getattr(frame.f_back, "f_locals", None) + * if f_locals_back is not None: # <<<<<<<<<<<<<< + * return_values_dict = f_locals_back.get(RETURN_VALUES_DICT, None) + * if return_values_dict is None: + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":468 + * def show_return_values(self, frame, arg): + * try: + * try: # <<<<<<<<<<<<<< + * f_locals_back = getattr(frame.f_back, "f_locals", None) + * if f_locals_back is not None: + */ + } + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + goto __pyx_L11_try_end; + __pyx_L6_error:; + __Pyx_PyThreadState_assign + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_XDECREF(__pyx_t_11); __pyx_t_11 = 0; + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":477 + * name = self.get_func_name(frame) + * return_values_dict[name] = arg + * except: # <<<<<<<<<<<<<< + * traceback.print_exc() + * finally: + */ + /*except:*/ { + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBFrame.show_return_values", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_5, &__pyx_t_4, &__pyx_t_8) < 0) __PYX_ERR(0, 477, __pyx_L8_except_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_GOTREF(__pyx_t_4); + __Pyx_GOTREF(__pyx_t_8); + + /* "_pydevd_bundle/pydevd_cython.pyx":478 + * return_values_dict[name] = arg + * except: + * traceback.print_exc() # <<<<<<<<<<<<<< + * finally: + * f_locals_back = None + */ + __pyx_t_9 = __Pyx_GetModuleGlobalName(__pyx_n_s_traceback); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 478, __pyx_L8_except_error) + __Pyx_GOTREF(__pyx_t_9); + __pyx_t_12 = __Pyx_PyObject_GetAttrStr(__pyx_t_9, __pyx_n_s_print_exc); if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 478, __pyx_L8_except_error) + __Pyx_GOTREF(__pyx_t_12); + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + __pyx_t_9 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_12))) { + __pyx_t_9 = PyMethod_GET_SELF(__pyx_t_12); + if (likely(__pyx_t_9)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_12); + __Pyx_INCREF(__pyx_t_9); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_12, function); + } + } + if (__pyx_t_9) { + __pyx_t_11 = __Pyx_PyObject_CallOneArg(__pyx_t_12, __pyx_t_9); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 478, __pyx_L8_except_error) + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + } else { + __pyx_t_11 = __Pyx_PyObject_CallNoArg(__pyx_t_12); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 478, __pyx_L8_except_error) + } + __Pyx_GOTREF(__pyx_t_11); + __Pyx_DECREF(__pyx_t_12); __pyx_t_12 = 0; + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + goto __pyx_L7_exception_handled; + } + __pyx_L8_except_error:; + + /* "_pydevd_bundle/pydevd_cython.pyx":468 + * def show_return_values(self, frame, arg): + * try: + * try: # <<<<<<<<<<<<<< + * f_locals_back = getattr(frame.f_back, "f_locals", None) + * if f_locals_back is not None: + */ + __Pyx_PyThreadState_assign + __Pyx_XGIVEREF(__pyx_t_1); + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_ExceptionReset(__pyx_t_1, __pyx_t_2, __pyx_t_3); + goto __pyx_L4_error; + __pyx_L7_exception_handled:; + __Pyx_PyThreadState_assign + __Pyx_XGIVEREF(__pyx_t_1); + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_ExceptionReset(__pyx_t_1, __pyx_t_2, __pyx_t_3); + __pyx_L11_try_end:; + } + } + + /* "_pydevd_bundle/pydevd_cython.pyx":480 + * traceback.print_exc() + * finally: + * f_locals_back = None # <<<<<<<<<<<<<< + * + * def remove_return_values(self, main_debugger, frame): + */ + /*finally:*/ { + /*normal exit:*/{ + __Pyx_INCREF(Py_None); + __Pyx_XDECREF_SET(__pyx_v_f_locals_back, Py_None); + goto __pyx_L5; + } + /*exception exit:*/{ + __Pyx_PyThreadState_declare + __pyx_L4_error:; + __pyx_t_3 = 0; __pyx_t_2 = 0; __pyx_t_1 = 0; __pyx_t_15 = 0; __pyx_t_16 = 0; __pyx_t_17 = 0; + __Pyx_PyThreadState_assign + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_XDECREF(__pyx_t_12); __pyx_t_12 = 0; + __Pyx_XDECREF(__pyx_t_11); __pyx_t_11 = 0; + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + if (PY_MAJOR_VERSION >= 3) __Pyx_ExceptionSwap(&__pyx_t_15, &__pyx_t_16, &__pyx_t_17); + if ((PY_MAJOR_VERSION < 3) || unlikely(__Pyx_GetException(&__pyx_t_3, &__pyx_t_2, &__pyx_t_1) < 0)) __Pyx_ErrFetch(&__pyx_t_3, &__pyx_t_2, &__pyx_t_1); + __Pyx_XGOTREF(__pyx_t_3); + __Pyx_XGOTREF(__pyx_t_2); + __Pyx_XGOTREF(__pyx_t_1); + __Pyx_XGOTREF(__pyx_t_15); + __Pyx_XGOTREF(__pyx_t_16); + __Pyx_XGOTREF(__pyx_t_17); + __pyx_t_10 = __pyx_lineno; __pyx_t_13 = __pyx_clineno; __pyx_t_14 = __pyx_filename; + { + __Pyx_INCREF(Py_None); + __Pyx_XDECREF_SET(__pyx_v_f_locals_back, Py_None); + } + __Pyx_PyThreadState_assign + if (PY_MAJOR_VERSION >= 3) { + __Pyx_XGIVEREF(__pyx_t_15); + __Pyx_XGIVEREF(__pyx_t_16); + __Pyx_XGIVEREF(__pyx_t_17); + __Pyx_ExceptionReset(__pyx_t_15, __pyx_t_16, __pyx_t_17); + } + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_1); + __Pyx_ErrRestore(__pyx_t_3, __pyx_t_2, __pyx_t_1); + __pyx_t_3 = 0; __pyx_t_2 = 0; __pyx_t_1 = 0; __pyx_t_15 = 0; __pyx_t_16 = 0; __pyx_t_17 = 0; + __pyx_lineno = __pyx_t_10; __pyx_clineno = __pyx_t_13; __pyx_filename = __pyx_t_14; + goto __pyx_L1_error; + } + __pyx_L5:; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":466 + * return func_name + * + * def show_return_values(self, frame, arg): # <<<<<<<<<<<<<< + * try: + * try: + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_8); + __Pyx_XDECREF(__pyx_t_9); + __Pyx_XDECREF(__pyx_t_11); + __Pyx_XDECREF(__pyx_t_12); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBFrame.show_return_values", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_f_locals_back); + __Pyx_XDECREF(__pyx_v_return_values_dict); + __Pyx_XDECREF(__pyx_v_name); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pyx":482 + * f_locals_back = None + * + * def remove_return_values(self, main_debugger, frame): # <<<<<<<<<<<<<< + * try: + * try: + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_19remove_return_values(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_19remove_return_values(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + CYTHON_UNUSED PyObject *__pyx_v_main_debugger = 0; + PyObject *__pyx_v_frame = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("remove_return_values (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_main_debugger,&__pyx_n_s_frame,0}; + PyObject* values[2] = {0,0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_main_debugger)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (likely((values[1] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_frame)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("remove_return_values", 1, 2, 2, 1); __PYX_ERR(0, 482, __pyx_L3_error) + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "remove_return_values") < 0)) __PYX_ERR(0, 482, __pyx_L3_error) + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 2) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + } + __pyx_v_main_debugger = values[0]; + __pyx_v_frame = values[1]; + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("remove_return_values", 1, 2, 2, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 482, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBFrame.remove_return_values", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_18remove_return_values(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *)__pyx_v_self), __pyx_v_main_debugger, __pyx_v_frame); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_18remove_return_values(CYTHON_UNUSED struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v_main_debugger, PyObject *__pyx_v_frame) { + PyObject *__pyx_v_f_locals_back = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + PyObject *__pyx_t_7 = NULL; + int __pyx_t_8; + PyObject *__pyx_t_9 = NULL; + int __pyx_t_10; + int __pyx_t_11; + PyObject *__pyx_t_12 = NULL; + int __pyx_t_13; + char const *__pyx_t_14; + PyObject *__pyx_t_15 = NULL; + PyObject *__pyx_t_16 = NULL; + PyObject *__pyx_t_17 = NULL; + __Pyx_RefNannySetupContext("remove_return_values", 0); + + /* "_pydevd_bundle/pydevd_cython.pyx":483 + * + * def remove_return_values(self, main_debugger, frame): + * try: # <<<<<<<<<<<<<< + * try: + * # Showing return values was turned off, we should remove them from locals dict. + */ + /*try:*/ { + + /* "_pydevd_bundle/pydevd_cython.pyx":484 + * def remove_return_values(self, main_debugger, frame): + * try: + * try: # <<<<<<<<<<<<<< + * # Showing return values was turned off, we should remove them from locals dict. + * # The values can be in the current frame or in the back one + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_1, &__pyx_t_2, &__pyx_t_3); + __Pyx_XGOTREF(__pyx_t_1); + __Pyx_XGOTREF(__pyx_t_2); + __Pyx_XGOTREF(__pyx_t_3); + /*try:*/ { + + /* "_pydevd_bundle/pydevd_cython.pyx":487 + * # Showing return values was turned off, we should remove them from locals dict. + * # The values can be in the current frame or in the back one + * frame.f_locals.pop(RETURN_VALUES_DICT, None) # <<<<<<<<<<<<<< + * + * f_locals_back = getattr(frame.f_back, "f_locals", None) + */ + __pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_locals); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 487, __pyx_L6_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_t_5, __pyx_n_s_pop); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 487, __pyx_L6_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_5 = __Pyx_GetModuleGlobalName(__pyx_n_s_RETURN_VALUES_DICT); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 487, __pyx_L6_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_7 = NULL; + __pyx_t_8 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_6))) { + __pyx_t_7 = PyMethod_GET_SELF(__pyx_t_6); + if (likely(__pyx_t_7)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_6); + __Pyx_INCREF(__pyx_t_7); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_6, function); + __pyx_t_8 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_6)) { + PyObject *__pyx_temp[3] = {__pyx_t_7, __pyx_t_5, Py_None}; + __pyx_t_4 = __Pyx_PyFunction_FastCall(__pyx_t_6, __pyx_temp+1-__pyx_t_8, 2+__pyx_t_8); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 487, __pyx_L6_error) + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_6)) { + PyObject *__pyx_temp[3] = {__pyx_t_7, __pyx_t_5, Py_None}; + __pyx_t_4 = __Pyx_PyCFunction_FastCall(__pyx_t_6, __pyx_temp+1-__pyx_t_8, 2+__pyx_t_8); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 487, __pyx_L6_error) + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + } else + #endif + { + __pyx_t_9 = PyTuple_New(2+__pyx_t_8); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 487, __pyx_L6_error) + __Pyx_GOTREF(__pyx_t_9); + if (__pyx_t_7) { + __Pyx_GIVEREF(__pyx_t_7); PyTuple_SET_ITEM(__pyx_t_9, 0, __pyx_t_7); __pyx_t_7 = NULL; + } + __Pyx_GIVEREF(__pyx_t_5); + PyTuple_SET_ITEM(__pyx_t_9, 0+__pyx_t_8, __pyx_t_5); + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + PyTuple_SET_ITEM(__pyx_t_9, 1+__pyx_t_8, Py_None); + __pyx_t_5 = 0; + __pyx_t_4 = __Pyx_PyObject_Call(__pyx_t_6, __pyx_t_9, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 487, __pyx_L6_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + } + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":489 + * frame.f_locals.pop(RETURN_VALUES_DICT, None) + * + * f_locals_back = getattr(frame.f_back, "f_locals", None) # <<<<<<<<<<<<<< + * if f_locals_back is not None: + * f_locals_back.pop(RETURN_VALUES_DICT, None) + */ + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_back); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 489, __pyx_L6_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_6 = __Pyx_GetAttr3(__pyx_t_4, __pyx_n_s_f_locals, Py_None); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 489, __pyx_L6_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_v_f_locals_back = __pyx_t_6; + __pyx_t_6 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":490 + * + * f_locals_back = getattr(frame.f_back, "f_locals", None) + * if f_locals_back is not None: # <<<<<<<<<<<<<< + * f_locals_back.pop(RETURN_VALUES_DICT, None) + * except: + */ + __pyx_t_10 = (__pyx_v_f_locals_back != Py_None); + __pyx_t_11 = (__pyx_t_10 != 0); + if (__pyx_t_11) { + + /* "_pydevd_bundle/pydevd_cython.pyx":491 + * f_locals_back = getattr(frame.f_back, "f_locals", None) + * if f_locals_back is not None: + * f_locals_back.pop(RETURN_VALUES_DICT, None) # <<<<<<<<<<<<<< + * except: + * traceback.print_exc() + */ + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_f_locals_back, __pyx_n_s_pop); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 491, __pyx_L6_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_9 = __Pyx_GetModuleGlobalName(__pyx_n_s_RETURN_VALUES_DICT); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 491, __pyx_L6_error) + __Pyx_GOTREF(__pyx_t_9); + __pyx_t_5 = NULL; + __pyx_t_8 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_4))) { + __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_4); + if (likely(__pyx_t_5)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); + __Pyx_INCREF(__pyx_t_5); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_4, function); + __pyx_t_8 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_4)) { + PyObject *__pyx_temp[3] = {__pyx_t_5, __pyx_t_9, Py_None}; + __pyx_t_6 = __Pyx_PyFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_8, 2+__pyx_t_8); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 491, __pyx_L6_error) + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_4)) { + PyObject *__pyx_temp[3] = {__pyx_t_5, __pyx_t_9, Py_None}; + __pyx_t_6 = __Pyx_PyCFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_8, 2+__pyx_t_8); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 491, __pyx_L6_error) + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + } else + #endif + { + __pyx_t_7 = PyTuple_New(2+__pyx_t_8); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 491, __pyx_L6_error) + __Pyx_GOTREF(__pyx_t_7); + if (__pyx_t_5) { + __Pyx_GIVEREF(__pyx_t_5); PyTuple_SET_ITEM(__pyx_t_7, 0, __pyx_t_5); __pyx_t_5 = NULL; + } + __Pyx_GIVEREF(__pyx_t_9); + PyTuple_SET_ITEM(__pyx_t_7, 0+__pyx_t_8, __pyx_t_9); + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + PyTuple_SET_ITEM(__pyx_t_7, 1+__pyx_t_8, Py_None); + __pyx_t_9 = 0; + __pyx_t_6 = __Pyx_PyObject_Call(__pyx_t_4, __pyx_t_7, NULL); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 491, __pyx_L6_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + } + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":490 + * + * f_locals_back = getattr(frame.f_back, "f_locals", None) + * if f_locals_back is not None: # <<<<<<<<<<<<<< + * f_locals_back.pop(RETURN_VALUES_DICT, None) + * except: + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":484 + * def remove_return_values(self, main_debugger, frame): + * try: + * try: # <<<<<<<<<<<<<< + * # Showing return values was turned off, we should remove them from locals dict. + * # The values can be in the current frame or in the back one + */ + } + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + goto __pyx_L11_try_end; + __pyx_L6_error:; + __Pyx_PyThreadState_assign + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":492 + * if f_locals_back is not None: + * f_locals_back.pop(RETURN_VALUES_DICT, None) + * except: # <<<<<<<<<<<<<< + * traceback.print_exc() + * finally: + */ + /*except:*/ { + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBFrame.remove_return_values", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_6, &__pyx_t_4, &__pyx_t_7) < 0) __PYX_ERR(0, 492, __pyx_L8_except_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_GOTREF(__pyx_t_4); + __Pyx_GOTREF(__pyx_t_7); + + /* "_pydevd_bundle/pydevd_cython.pyx":493 + * f_locals_back.pop(RETURN_VALUES_DICT, None) + * except: + * traceback.print_exc() # <<<<<<<<<<<<<< + * finally: + * f_locals_back = None + */ + __pyx_t_5 = __Pyx_GetModuleGlobalName(__pyx_n_s_traceback); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 493, __pyx_L8_except_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_12 = __Pyx_PyObject_GetAttrStr(__pyx_t_5, __pyx_n_s_print_exc); if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 493, __pyx_L8_except_error) + __Pyx_GOTREF(__pyx_t_12); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_5 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_12))) { + __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_12); + if (likely(__pyx_t_5)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_12); + __Pyx_INCREF(__pyx_t_5); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_12, function); + } + } + if (__pyx_t_5) { + __pyx_t_9 = __Pyx_PyObject_CallOneArg(__pyx_t_12, __pyx_t_5); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 493, __pyx_L8_except_error) + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + } else { + __pyx_t_9 = __Pyx_PyObject_CallNoArg(__pyx_t_12); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 493, __pyx_L8_except_error) + } + __Pyx_GOTREF(__pyx_t_9); + __Pyx_DECREF(__pyx_t_12); __pyx_t_12 = 0; + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + goto __pyx_L7_exception_handled; + } + __pyx_L8_except_error:; + + /* "_pydevd_bundle/pydevd_cython.pyx":484 + * def remove_return_values(self, main_debugger, frame): + * try: + * try: # <<<<<<<<<<<<<< + * # Showing return values was turned off, we should remove them from locals dict. + * # The values can be in the current frame or in the back one + */ + __Pyx_PyThreadState_assign + __Pyx_XGIVEREF(__pyx_t_1); + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_ExceptionReset(__pyx_t_1, __pyx_t_2, __pyx_t_3); + goto __pyx_L4_error; + __pyx_L7_exception_handled:; + __Pyx_PyThreadState_assign + __Pyx_XGIVEREF(__pyx_t_1); + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_ExceptionReset(__pyx_t_1, __pyx_t_2, __pyx_t_3); + __pyx_L11_try_end:; + } + } + + /* "_pydevd_bundle/pydevd_cython.pyx":495 + * traceback.print_exc() + * finally: + * f_locals_back = None # <<<<<<<<<<<<<< + * + * # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + */ + /*finally:*/ { + /*normal exit:*/{ + __Pyx_INCREF(Py_None); + __Pyx_XDECREF_SET(__pyx_v_f_locals_back, Py_None); + goto __pyx_L5; + } + /*exception exit:*/{ + __Pyx_PyThreadState_declare + __pyx_L4_error:; + __pyx_t_3 = 0; __pyx_t_2 = 0; __pyx_t_1 = 0; __pyx_t_15 = 0; __pyx_t_16 = 0; __pyx_t_17 = 0; + __Pyx_PyThreadState_assign + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_XDECREF(__pyx_t_12); __pyx_t_12 = 0; + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + if (PY_MAJOR_VERSION >= 3) __Pyx_ExceptionSwap(&__pyx_t_15, &__pyx_t_16, &__pyx_t_17); + if ((PY_MAJOR_VERSION < 3) || unlikely(__Pyx_GetException(&__pyx_t_3, &__pyx_t_2, &__pyx_t_1) < 0)) __Pyx_ErrFetch(&__pyx_t_3, &__pyx_t_2, &__pyx_t_1); + __Pyx_XGOTREF(__pyx_t_3); + __Pyx_XGOTREF(__pyx_t_2); + __Pyx_XGOTREF(__pyx_t_1); + __Pyx_XGOTREF(__pyx_t_15); + __Pyx_XGOTREF(__pyx_t_16); + __Pyx_XGOTREF(__pyx_t_17); + __pyx_t_8 = __pyx_lineno; __pyx_t_13 = __pyx_clineno; __pyx_t_14 = __pyx_filename; + { + __Pyx_INCREF(Py_None); + __Pyx_XDECREF_SET(__pyx_v_f_locals_back, Py_None); + } + __Pyx_PyThreadState_assign + if (PY_MAJOR_VERSION >= 3) { + __Pyx_XGIVEREF(__pyx_t_15); + __Pyx_XGIVEREF(__pyx_t_16); + __Pyx_XGIVEREF(__pyx_t_17); + __Pyx_ExceptionReset(__pyx_t_15, __pyx_t_16, __pyx_t_17); + } + __Pyx_XGIVEREF(__pyx_t_3); + __Pyx_XGIVEREF(__pyx_t_2); + __Pyx_XGIVEREF(__pyx_t_1); + __Pyx_ErrRestore(__pyx_t_3, __pyx_t_2, __pyx_t_1); + __pyx_t_3 = 0; __pyx_t_2 = 0; __pyx_t_1 = 0; __pyx_t_15 = 0; __pyx_t_16 = 0; __pyx_t_17 = 0; + __pyx_lineno = __pyx_t_8; __pyx_clineno = __pyx_t_13; __pyx_filename = __pyx_t_14; + goto __pyx_L1_error; + } + __pyx_L5:; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":482 + * f_locals_back = None + * + * def remove_return_values(self, main_debugger, frame): # <<<<<<<<<<<<<< + * try: + * try: + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_XDECREF(__pyx_t_9); + __Pyx_XDECREF(__pyx_t_12); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBFrame.remove_return_values", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_f_locals_back); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pyx":498 + * + * # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + * cpdef trace_dispatch(self, frame, str event, arg): # <<<<<<<<<<<<<< + * cdef str filename; + * cdef bint is_exception_event; + */ + +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_21trace_dispatch(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static PyObject *__pyx_f_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_trace_dispatch(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *__pyx_v_self, PyObject *__pyx_v_frame, PyObject *__pyx_v_event, PyObject *__pyx_v_arg, int __pyx_skip_dispatch) { + PyObject *__pyx_v_filename = 0; + int __pyx_v_is_exception_event; + int __pyx_v_has_exception_breakpoints; + int __pyx_v_can_skip; + struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_info = 0; + int __pyx_v_step_cmd; + int __pyx_v_line; + int __pyx_v_is_line; + int __pyx_v_is_call; + int __pyx_v_is_return; + PyObject *__pyx_v_curr_func_name = 0; + int __pyx_v_exist_result; + PyObject *__pyx_v_frame_skips_cache = 0; + PyObject *__pyx_v_frame_cache_key = 0; + PyObject *__pyx_v_line_cache_key = 0; + int __pyx_v_breakpoints_in_line_cache; + int __pyx_v_breakpoints_in_frame_cache; + int __pyx_v_has_breakpoint_in_frame; + PyObject *__pyx_v_main_debugger = NULL; + PyObject *__pyx_v_thread = NULL; + PyObject *__pyx_v_plugin_manager = NULL; + PyObject *__pyx_v_flag = NULL; + PyObject *__pyx_v_need_trace_return = NULL; + PyObject *__pyx_v_stop_frame = NULL; + PyObject *__pyx_v_breakpoints_for_file = NULL; + PyObject *__pyx_v_breakpoint = NULL; + PyObject *__pyx_v_stop_info = NULL; + PyObject *__pyx_v_stop = NULL; + PyObject *__pyx_v_bp_type = NULL; + PyObject *__pyx_v_new_frame = NULL; + PyObject *__pyx_v_result = NULL; + PyObject *__pyx_v_condition = NULL; + PyObject *__pyx_v_back = NULL; + CYTHON_UNUSED PyObject *__pyx_v__ = NULL; + PyObject *__pyx_v_back_filename = NULL; + PyObject *__pyx_v_base = NULL; + long __pyx_v_should_skip; + PyObject *__pyx_v_plugin_stop = NULL; + PyObject *__pyx_v_f_code = NULL; + PyObject *__pyx_v_file_type = NULL; + CYTHON_UNUSED PyObject *__pyx_v_stopped_on_plugin = NULL; + PyObject *__pyx_v_retVal = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + int __pyx_t_5; + PyObject *__pyx_t_6 = NULL; + PyObject *__pyx_t_7 = NULL; + PyObject *__pyx_t_8 = NULL; + int __pyx_t_9; + int __pyx_t_10; + PyObject *(*__pyx_t_11)(PyObject *); + int __pyx_t_12; + PyObject *__pyx_t_13 = NULL; + Py_ssize_t __pyx_t_14; + PyObject *(*__pyx_t_15)(PyObject *); + PyObject *__pyx_t_16 = NULL; + PyObject *__pyx_t_17 = NULL; + PyObject *__pyx_t_18 = NULL; + int __pyx_t_19; + char const *__pyx_t_20; + PyObject *__pyx_t_21 = NULL; + PyObject *__pyx_t_22 = NULL; + PyObject *__pyx_t_23 = NULL; + PyObject *__pyx_t_24 = NULL; + PyObject *__pyx_t_25 = NULL; + PyObject *__pyx_t_26 = NULL; + int __pyx_t_27; + char const *__pyx_t_28; + __Pyx_RefNannySetupContext("trace_dispatch", 0); + __Pyx_INCREF(__pyx_v_frame); + /* Check if called by wrapper */ + if (unlikely(__pyx_skip_dispatch)) ; + /* Check if overridden in Python */ + else if (unlikely(Py_TYPE(((PyObject *)__pyx_v_self))->tp_dictoffset != 0)) { + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_trace_dispatch); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 498, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (!PyCFunction_Check(__pyx_t_1) || (PyCFunction_GET_FUNCTION(__pyx_t_1) != (PyCFunction)__pyx_pw_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_21trace_dispatch)) { + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_t_1); + __pyx_t_3 = __pyx_t_1; __pyx_t_4 = NULL; + __pyx_t_5 = 0; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_3))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_3, function); + __pyx_t_5 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_3)) { + PyObject *__pyx_temp[4] = {__pyx_t_4, __pyx_v_frame, __pyx_v_event, __pyx_v_arg}; + __pyx_t_2 = __Pyx_PyFunction_FastCall(__pyx_t_3, __pyx_temp+1-__pyx_t_5, 3+__pyx_t_5); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 498, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_GOTREF(__pyx_t_2); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_3)) { + PyObject *__pyx_temp[4] = {__pyx_t_4, __pyx_v_frame, __pyx_v_event, __pyx_v_arg}; + __pyx_t_2 = __Pyx_PyCFunction_FastCall(__pyx_t_3, __pyx_temp+1-__pyx_t_5, 3+__pyx_t_5); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 498, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_GOTREF(__pyx_t_2); + } else + #endif + { + __pyx_t_6 = PyTuple_New(3+__pyx_t_5); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 498, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + if (__pyx_t_4) { + __Pyx_GIVEREF(__pyx_t_4); PyTuple_SET_ITEM(__pyx_t_6, 0, __pyx_t_4); __pyx_t_4 = NULL; + } + __Pyx_INCREF(__pyx_v_frame); + __Pyx_GIVEREF(__pyx_v_frame); + PyTuple_SET_ITEM(__pyx_t_6, 0+__pyx_t_5, __pyx_v_frame); + __Pyx_INCREF(__pyx_v_event); + __Pyx_GIVEREF(__pyx_v_event); + PyTuple_SET_ITEM(__pyx_t_6, 1+__pyx_t_5, __pyx_v_event); + __Pyx_INCREF(__pyx_v_arg); + __Pyx_GIVEREF(__pyx_v_arg); + PyTuple_SET_ITEM(__pyx_t_6, 2+__pyx_t_5, __pyx_v_arg); + __pyx_t_2 = __Pyx_PyObject_Call(__pyx_t_3, __pyx_t_6, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 498, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + } + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_r = __pyx_t_2; + __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + goto __pyx_L0; + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":521 + * # ENDIF + * + * main_debugger, filename, info, thread, frame_skips_cache, frame_cache_key = self._args # <<<<<<<<<<<<<< + * # print('frame trace_dispatch', frame.f_lineno, frame.f_code.co_name, event, info.pydev_step_cmd) + * try: + */ + __pyx_t_1 = __pyx_v_self->_args; + __Pyx_INCREF(__pyx_t_1); + if (likely(__pyx_t_1 != Py_None)) { + PyObject* sequence = __pyx_t_1; + #if !CYTHON_COMPILING_IN_PYPY + Py_ssize_t size = Py_SIZE(sequence); + #else + Py_ssize_t size = PySequence_Size(sequence); + #endif + if (unlikely(size != 6)) { + if (size > 6) __Pyx_RaiseTooManyValuesError(6); + else if (size >= 0) __Pyx_RaiseNeedMoreValuesError(size); + __PYX_ERR(0, 521, __pyx_L1_error) + } + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_2 = PyTuple_GET_ITEM(sequence, 0); + __pyx_t_3 = PyTuple_GET_ITEM(sequence, 1); + __pyx_t_6 = PyTuple_GET_ITEM(sequence, 2); + __pyx_t_4 = PyTuple_GET_ITEM(sequence, 3); + __pyx_t_7 = PyTuple_GET_ITEM(sequence, 4); + __pyx_t_8 = PyTuple_GET_ITEM(sequence, 5); + __Pyx_INCREF(__pyx_t_2); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(__pyx_t_6); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(__pyx_t_7); + __Pyx_INCREF(__pyx_t_8); + #else + { + Py_ssize_t i; + PyObject** temps[6] = {&__pyx_t_2,&__pyx_t_3,&__pyx_t_6,&__pyx_t_4,&__pyx_t_7,&__pyx_t_8}; + for (i=0; i < 6; i++) { + PyObject* item = PySequence_ITEM(sequence, i); if (unlikely(!item)) __PYX_ERR(0, 521, __pyx_L1_error) + __Pyx_GOTREF(item); + *(temps[i]) = item; + } + } + #endif + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + } else { + __Pyx_RaiseNoneNotIterableError(); __PYX_ERR(0, 521, __pyx_L1_error) + } + if (!(likely(PyString_CheckExact(__pyx_t_3))||((__pyx_t_3) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "str", Py_TYPE(__pyx_t_3)->tp_name), 0))) __PYX_ERR(0, 521, __pyx_L1_error) + if (!(likely(((__pyx_t_6) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_6, __pyx_ptype_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo))))) __PYX_ERR(0, 521, __pyx_L1_error) + if (!(likely(PyDict_CheckExact(__pyx_t_7))||((__pyx_t_7) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "dict", Py_TYPE(__pyx_t_7)->tp_name), 0))) __PYX_ERR(0, 521, __pyx_L1_error) + if (!(likely(PyTuple_CheckExact(__pyx_t_8))||((__pyx_t_8) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_t_8)->tp_name), 0))) __PYX_ERR(0, 521, __pyx_L1_error) + __pyx_v_main_debugger = __pyx_t_2; + __pyx_t_2 = 0; + __pyx_v_filename = ((PyObject*)__pyx_t_3); + __pyx_t_3 = 0; + __pyx_v_info = ((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_t_6); + __pyx_t_6 = 0; + __pyx_v_thread = __pyx_t_4; + __pyx_t_4 = 0; + __pyx_v_frame_skips_cache = ((PyObject*)__pyx_t_7); + __pyx_t_7 = 0; + __pyx_v_frame_cache_key = ((PyObject*)__pyx_t_8); + __pyx_t_8 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":523 + * main_debugger, filename, info, thread, frame_skips_cache, frame_cache_key = self._args + * # print('frame trace_dispatch', frame.f_lineno, frame.f_code.co_name, event, info.pydev_step_cmd) + * try: # <<<<<<<<<<<<<< + * info.is_tracing = True + * line = frame.f_lineno + */ + /*try:*/ { + + /* "_pydevd_bundle/pydevd_cython.pyx":524 + * # print('frame trace_dispatch', frame.f_lineno, frame.f_code.co_name, event, info.pydev_step_cmd) + * try: + * info.is_tracing = True # <<<<<<<<<<<<<< + * line = frame.f_lineno + * line_cache_key = (frame_cache_key, line) + */ + __pyx_v_info->is_tracing = 1; + + /* "_pydevd_bundle/pydevd_cython.pyx":525 + * try: + * info.is_tracing = True + * line = frame.f_lineno # <<<<<<<<<<<<<< + * line_cache_key = (frame_cache_key, line) + * + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_lineno); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 525, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_5 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_5 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 525, __pyx_L4_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_v_line = __pyx_t_5; + + /* "_pydevd_bundle/pydevd_cython.pyx":526 + * info.is_tracing = True + * line = frame.f_lineno + * line_cache_key = (frame_cache_key, line) # <<<<<<<<<<<<<< + * + * if main_debugger._finish_debugging_session: + */ + __pyx_t_1 = __Pyx_PyInt_From_int(__pyx_v_line); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 526, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_8 = PyTuple_New(2); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 526, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_INCREF(__pyx_v_frame_cache_key); + __Pyx_GIVEREF(__pyx_v_frame_cache_key); + PyTuple_SET_ITEM(__pyx_t_8, 0, __pyx_v_frame_cache_key); + __Pyx_GIVEREF(__pyx_t_1); + PyTuple_SET_ITEM(__pyx_t_8, 1, __pyx_t_1); + __pyx_t_1 = 0; + __pyx_v_line_cache_key = ((PyObject*)__pyx_t_8); + __pyx_t_8 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":528 + * line_cache_key = (frame_cache_key, line) + * + * if main_debugger._finish_debugging_session: # <<<<<<<<<<<<<< + * return None + * + */ + __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_finish_debugging_session); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 528, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_9 = __Pyx_PyObject_IsTrue(__pyx_t_8); if (unlikely(__pyx_t_9 < 0)) __PYX_ERR(0, 528, __pyx_L4_error) + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + if (__pyx_t_9) { + + /* "_pydevd_bundle/pydevd_cython.pyx":529 + * + * if main_debugger._finish_debugging_session: + * return None # <<<<<<<<<<<<<< + * + * plugin_manager = main_debugger.plugin + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(Py_None); + __pyx_r = Py_None; + goto __pyx_L3_return; + + /* "_pydevd_bundle/pydevd_cython.pyx":528 + * line_cache_key = (frame_cache_key, line) + * + * if main_debugger._finish_debugging_session: # <<<<<<<<<<<<<< + * return None + * + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":531 + * return None + * + * plugin_manager = main_debugger.plugin # <<<<<<<<<<<<<< + * + * is_exception_event = event == 'exception' + */ + __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_plugin); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 531, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_v_plugin_manager = __pyx_t_8; + __pyx_t_8 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":533 + * plugin_manager = main_debugger.plugin + * + * is_exception_event = event == 'exception' # <<<<<<<<<<<<<< + * has_exception_breakpoints = main_debugger.break_on_caught_exceptions or main_debugger.has_plugin_exception_breaks + * + */ + __pyx_t_9 = (__Pyx_PyString_Equals(__pyx_v_event, __pyx_n_s_exception, Py_EQ)); if (unlikely(__pyx_t_9 < 0)) __PYX_ERR(0, 533, __pyx_L4_error) + __pyx_v_is_exception_event = __pyx_t_9; + + /* "_pydevd_bundle/pydevd_cython.pyx":534 + * + * is_exception_event = event == 'exception' + * has_exception_breakpoints = main_debugger.break_on_caught_exceptions or main_debugger.has_plugin_exception_breaks # <<<<<<<<<<<<<< + * + * if is_exception_event: + */ + __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_break_on_caught_exceptions); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 534, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_10 = __Pyx_PyObject_IsTrue(__pyx_t_8); if (unlikely(__pyx_t_10 < 0)) __PYX_ERR(0, 534, __pyx_L4_error) + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + if (!__pyx_t_10) { + } else { + __pyx_t_9 = __pyx_t_10; + goto __pyx_L7_bool_binop_done; + } + __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_has_plugin_exception_breaks); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 534, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_10 = __Pyx_PyObject_IsTrue(__pyx_t_8); if (unlikely(__pyx_t_10 < 0)) __PYX_ERR(0, 534, __pyx_L4_error) + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __pyx_t_9 = __pyx_t_10; + __pyx_L7_bool_binop_done:; + __pyx_v_has_exception_breakpoints = __pyx_t_9; + + /* "_pydevd_bundle/pydevd_cython.pyx":536 + * has_exception_breakpoints = main_debugger.break_on_caught_exceptions or main_debugger.has_plugin_exception_breaks + * + * if is_exception_event: # <<<<<<<<<<<<<< + * if has_exception_breakpoints: + * flag, frame = self.should_stop_on_exception(frame, event, arg) + */ + __pyx_t_9 = (__pyx_v_is_exception_event != 0); + if (__pyx_t_9) { + + /* "_pydevd_bundle/pydevd_cython.pyx":537 + * + * if is_exception_event: + * if has_exception_breakpoints: # <<<<<<<<<<<<<< + * flag, frame = self.should_stop_on_exception(frame, event, arg) + * if flag: + */ + __pyx_t_9 = (__pyx_v_has_exception_breakpoints != 0); + if (__pyx_t_9) { + + /* "_pydevd_bundle/pydevd_cython.pyx":538 + * if is_exception_event: + * if has_exception_breakpoints: + * flag, frame = self.should_stop_on_exception(frame, event, arg) # <<<<<<<<<<<<<< + * if flag: + * self.handle_exception(frame, event, arg) + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_should_stop_on_exception); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 538, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_7 = NULL; + __pyx_t_5 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_1))) { + __pyx_t_7 = PyMethod_GET_SELF(__pyx_t_1); + if (likely(__pyx_t_7)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1); + __Pyx_INCREF(__pyx_t_7); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_1, function); + __pyx_t_5 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_1)) { + PyObject *__pyx_temp[4] = {__pyx_t_7, __pyx_v_frame, __pyx_v_event, __pyx_v_arg}; + __pyx_t_8 = __Pyx_PyFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_5, 3+__pyx_t_5); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 538, __pyx_L4_error) + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_GOTREF(__pyx_t_8); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_1)) { + PyObject *__pyx_temp[4] = {__pyx_t_7, __pyx_v_frame, __pyx_v_event, __pyx_v_arg}; + __pyx_t_8 = __Pyx_PyCFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_5, 3+__pyx_t_5); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 538, __pyx_L4_error) + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_GOTREF(__pyx_t_8); + } else + #endif + { + __pyx_t_4 = PyTuple_New(3+__pyx_t_5); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 538, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_4); + if (__pyx_t_7) { + __Pyx_GIVEREF(__pyx_t_7); PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_7); __pyx_t_7 = NULL; + } + __Pyx_INCREF(__pyx_v_frame); + __Pyx_GIVEREF(__pyx_v_frame); + PyTuple_SET_ITEM(__pyx_t_4, 0+__pyx_t_5, __pyx_v_frame); + __Pyx_INCREF(__pyx_v_event); + __Pyx_GIVEREF(__pyx_v_event); + PyTuple_SET_ITEM(__pyx_t_4, 1+__pyx_t_5, __pyx_v_event); + __Pyx_INCREF(__pyx_v_arg); + __Pyx_GIVEREF(__pyx_v_arg); + PyTuple_SET_ITEM(__pyx_t_4, 2+__pyx_t_5, __pyx_v_arg); + __pyx_t_8 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_t_4, NULL); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 538, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + if ((likely(PyTuple_CheckExact(__pyx_t_8))) || (PyList_CheckExact(__pyx_t_8))) { + PyObject* sequence = __pyx_t_8; + #if !CYTHON_COMPILING_IN_PYPY + Py_ssize_t size = Py_SIZE(sequence); + #else + Py_ssize_t size = PySequence_Size(sequence); + #endif + if (unlikely(size != 2)) { + if (size > 2) __Pyx_RaiseTooManyValuesError(2); + else if (size >= 0) __Pyx_RaiseNeedMoreValuesError(size); + __PYX_ERR(0, 538, __pyx_L4_error) + } + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + if (likely(PyTuple_CheckExact(sequence))) { + __pyx_t_1 = PyTuple_GET_ITEM(sequence, 0); + __pyx_t_4 = PyTuple_GET_ITEM(sequence, 1); + } else { + __pyx_t_1 = PyList_GET_ITEM(sequence, 0); + __pyx_t_4 = PyList_GET_ITEM(sequence, 1); + } + __Pyx_INCREF(__pyx_t_1); + __Pyx_INCREF(__pyx_t_4); + #else + __pyx_t_1 = PySequence_ITEM(sequence, 0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 538, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_4 = PySequence_ITEM(sequence, 1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 538, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_4); + #endif + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + } else { + Py_ssize_t index = -1; + __pyx_t_7 = PyObject_GetIter(__pyx_t_8); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 538, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __pyx_t_11 = Py_TYPE(__pyx_t_7)->tp_iternext; + index = 0; __pyx_t_1 = __pyx_t_11(__pyx_t_7); if (unlikely(!__pyx_t_1)) goto __pyx_L11_unpacking_failed; + __Pyx_GOTREF(__pyx_t_1); + index = 1; __pyx_t_4 = __pyx_t_11(__pyx_t_7); if (unlikely(!__pyx_t_4)) goto __pyx_L11_unpacking_failed; + __Pyx_GOTREF(__pyx_t_4); + if (__Pyx_IternextUnpackEndCheck(__pyx_t_11(__pyx_t_7), 2) < 0) __PYX_ERR(0, 538, __pyx_L4_error) + __pyx_t_11 = NULL; + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + goto __pyx_L12_unpacking_done; + __pyx_L11_unpacking_failed:; + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __pyx_t_11 = NULL; + if (__Pyx_IterFinish() == 0) __Pyx_RaiseNeedMoreValuesError(index); + __PYX_ERR(0, 538, __pyx_L4_error) + __pyx_L12_unpacking_done:; + } + __pyx_v_flag = __pyx_t_1; + __pyx_t_1 = 0; + __Pyx_DECREF_SET(__pyx_v_frame, __pyx_t_4); + __pyx_t_4 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":539 + * if has_exception_breakpoints: + * flag, frame = self.should_stop_on_exception(frame, event, arg) + * if flag: # <<<<<<<<<<<<<< + * self.handle_exception(frame, event, arg) + * return self.trace_dispatch + */ + __pyx_t_9 = __Pyx_PyObject_IsTrue(__pyx_v_flag); if (unlikely(__pyx_t_9 < 0)) __PYX_ERR(0, 539, __pyx_L4_error) + if (__pyx_t_9) { + + /* "_pydevd_bundle/pydevd_cython.pyx":540 + * flag, frame = self.should_stop_on_exception(frame, event, arg) + * if flag: + * self.handle_exception(frame, event, arg) # <<<<<<<<<<<<<< + * return self.trace_dispatch + * is_line = False + */ + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_handle_exception); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 540, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_1 = NULL; + __pyx_t_5 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_4))) { + __pyx_t_1 = PyMethod_GET_SELF(__pyx_t_4); + if (likely(__pyx_t_1)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); + __Pyx_INCREF(__pyx_t_1); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_4, function); + __pyx_t_5 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_4)) { + PyObject *__pyx_temp[4] = {__pyx_t_1, __pyx_v_frame, __pyx_v_event, __pyx_v_arg}; + __pyx_t_8 = __Pyx_PyFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_5, 3+__pyx_t_5); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 540, __pyx_L4_error) + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_GOTREF(__pyx_t_8); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_4)) { + PyObject *__pyx_temp[4] = {__pyx_t_1, __pyx_v_frame, __pyx_v_event, __pyx_v_arg}; + __pyx_t_8 = __Pyx_PyCFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_5, 3+__pyx_t_5); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 540, __pyx_L4_error) + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_GOTREF(__pyx_t_8); + } else + #endif + { + __pyx_t_7 = PyTuple_New(3+__pyx_t_5); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 540, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_7); + if (__pyx_t_1) { + __Pyx_GIVEREF(__pyx_t_1); PyTuple_SET_ITEM(__pyx_t_7, 0, __pyx_t_1); __pyx_t_1 = NULL; + } + __Pyx_INCREF(__pyx_v_frame); + __Pyx_GIVEREF(__pyx_v_frame); + PyTuple_SET_ITEM(__pyx_t_7, 0+__pyx_t_5, __pyx_v_frame); + __Pyx_INCREF(__pyx_v_event); + __Pyx_GIVEREF(__pyx_v_event); + PyTuple_SET_ITEM(__pyx_t_7, 1+__pyx_t_5, __pyx_v_event); + __Pyx_INCREF(__pyx_v_arg); + __Pyx_GIVEREF(__pyx_v_arg); + PyTuple_SET_ITEM(__pyx_t_7, 2+__pyx_t_5, __pyx_v_arg); + __pyx_t_8 = __Pyx_PyObject_Call(__pyx_t_4, __pyx_t_7, NULL); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 540, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + } + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":541 + * if flag: + * self.handle_exception(frame, event, arg) + * return self.trace_dispatch # <<<<<<<<<<<<<< + * is_line = False + * is_return = False + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_8 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_trace_dispatch); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 541, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_r = __pyx_t_8; + __pyx_t_8 = 0; + goto __pyx_L3_return; + + /* "_pydevd_bundle/pydevd_cython.pyx":539 + * if has_exception_breakpoints: + * flag, frame = self.should_stop_on_exception(frame, event, arg) + * if flag: # <<<<<<<<<<<<<< + * self.handle_exception(frame, event, arg) + * return self.trace_dispatch + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":537 + * + * if is_exception_event: + * if has_exception_breakpoints: # <<<<<<<<<<<<<< + * flag, frame = self.should_stop_on_exception(frame, event, arg) + * if flag: + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":542 + * self.handle_exception(frame, event, arg) + * return self.trace_dispatch + * is_line = False # <<<<<<<<<<<<<< + * is_return = False + * is_call = False + */ + __pyx_v_is_line = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":543 + * return self.trace_dispatch + * is_line = False + * is_return = False # <<<<<<<<<<<<<< + * is_call = False + * else: + */ + __pyx_v_is_return = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":544 + * is_line = False + * is_return = False + * is_call = False # <<<<<<<<<<<<<< + * else: + * is_line = event == 'line' + */ + __pyx_v_is_call = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":536 + * has_exception_breakpoints = main_debugger.break_on_caught_exceptions or main_debugger.has_plugin_exception_breaks + * + * if is_exception_event: # <<<<<<<<<<<<<< + * if has_exception_breakpoints: + * flag, frame = self.should_stop_on_exception(frame, event, arg) + */ + goto __pyx_L9; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":546 + * is_call = False + * else: + * is_line = event == 'line' # <<<<<<<<<<<<<< + * is_return = event == 'return' + * is_call = event == 'call' + */ + /*else*/ { + __pyx_t_9 = (__Pyx_PyString_Equals(__pyx_v_event, __pyx_n_s_line, Py_EQ)); if (unlikely(__pyx_t_9 < 0)) __PYX_ERR(0, 546, __pyx_L4_error) + __pyx_v_is_line = __pyx_t_9; + + /* "_pydevd_bundle/pydevd_cython.pyx":547 + * else: + * is_line = event == 'line' + * is_return = event == 'return' # <<<<<<<<<<<<<< + * is_call = event == 'call' + * if not is_line and not is_return and not is_call: + */ + __pyx_t_9 = (__Pyx_PyString_Equals(__pyx_v_event, __pyx_n_s_return, Py_EQ)); if (unlikely(__pyx_t_9 < 0)) __PYX_ERR(0, 547, __pyx_L4_error) + __pyx_v_is_return = __pyx_t_9; + + /* "_pydevd_bundle/pydevd_cython.pyx":548 + * is_line = event == 'line' + * is_return = event == 'return' + * is_call = event == 'call' # <<<<<<<<<<<<<< + * if not is_line and not is_return and not is_call: + * # I believe this can only happen in jython on some frontiers on jython and java code, which we don't want to trace. + */ + __pyx_t_9 = (__Pyx_PyString_Equals(__pyx_v_event, __pyx_n_s_call, Py_EQ)); if (unlikely(__pyx_t_9 < 0)) __PYX_ERR(0, 548, __pyx_L4_error) + __pyx_v_is_call = __pyx_t_9; + + /* "_pydevd_bundle/pydevd_cython.pyx":549 + * is_return = event == 'return' + * is_call = event == 'call' + * if not is_line and not is_return and not is_call: # <<<<<<<<<<<<<< + * # I believe this can only happen in jython on some frontiers on jython and java code, which we don't want to trace. + * return None + */ + __pyx_t_10 = ((!(__pyx_v_is_line != 0)) != 0); + if (__pyx_t_10) { + } else { + __pyx_t_9 = __pyx_t_10; + goto __pyx_L15_bool_binop_done; + } + __pyx_t_10 = ((!(__pyx_v_is_return != 0)) != 0); + if (__pyx_t_10) { + } else { + __pyx_t_9 = __pyx_t_10; + goto __pyx_L15_bool_binop_done; + } + __pyx_t_10 = ((!(__pyx_v_is_call != 0)) != 0); + __pyx_t_9 = __pyx_t_10; + __pyx_L15_bool_binop_done:; + if (__pyx_t_9) { + + /* "_pydevd_bundle/pydevd_cython.pyx":551 + * if not is_line and not is_return and not is_call: + * # I believe this can only happen in jython on some frontiers on jython and java code, which we don't want to trace. + * return None # <<<<<<<<<<<<<< + * + * need_trace_return = False + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(Py_None); + __pyx_r = Py_None; + goto __pyx_L3_return; + + /* "_pydevd_bundle/pydevd_cython.pyx":549 + * is_return = event == 'return' + * is_call = event == 'call' + * if not is_line and not is_return and not is_call: # <<<<<<<<<<<<<< + * # I believe this can only happen in jython on some frontiers on jython and java code, which we don't want to trace. + * return None + */ + } + } + __pyx_L9:; + + /* "_pydevd_bundle/pydevd_cython.pyx":553 + * return None + * + * need_trace_return = False # <<<<<<<<<<<<<< + * if is_call and main_debugger.signature_factory: + * need_trace_return = send_signature_call_trace(main_debugger, frame, filename) + */ + __Pyx_INCREF(Py_False); + __pyx_v_need_trace_return = Py_False; + + /* "_pydevd_bundle/pydevd_cython.pyx":554 + * + * need_trace_return = False + * if is_call and main_debugger.signature_factory: # <<<<<<<<<<<<<< + * need_trace_return = send_signature_call_trace(main_debugger, frame, filename) + * if is_return and main_debugger.signature_factory: + */ + __pyx_t_10 = (__pyx_v_is_call != 0); + if (__pyx_t_10) { + } else { + __pyx_t_9 = __pyx_t_10; + goto __pyx_L19_bool_binop_done; + } + __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_signature_factory); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 554, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_10 = __Pyx_PyObject_IsTrue(__pyx_t_8); if (unlikely(__pyx_t_10 < 0)) __PYX_ERR(0, 554, __pyx_L4_error) + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __pyx_t_9 = __pyx_t_10; + __pyx_L19_bool_binop_done:; + if (__pyx_t_9) { + + /* "_pydevd_bundle/pydevd_cython.pyx":555 + * need_trace_return = False + * if is_call and main_debugger.signature_factory: + * need_trace_return = send_signature_call_trace(main_debugger, frame, filename) # <<<<<<<<<<<<<< + * if is_return and main_debugger.signature_factory: + * send_signature_return_trace(main_debugger, frame, filename, arg) + */ + __pyx_t_4 = __Pyx_GetModuleGlobalName(__pyx_n_s_send_signature_call_trace); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 555, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_7 = NULL; + __pyx_t_5 = 0; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_4))) { + __pyx_t_7 = PyMethod_GET_SELF(__pyx_t_4); + if (likely(__pyx_t_7)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); + __Pyx_INCREF(__pyx_t_7); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_4, function); + __pyx_t_5 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_4)) { + PyObject *__pyx_temp[4] = {__pyx_t_7, __pyx_v_main_debugger, __pyx_v_frame, __pyx_v_filename}; + __pyx_t_8 = __Pyx_PyFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_5, 3+__pyx_t_5); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 555, __pyx_L4_error) + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_GOTREF(__pyx_t_8); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_4)) { + PyObject *__pyx_temp[4] = {__pyx_t_7, __pyx_v_main_debugger, __pyx_v_frame, __pyx_v_filename}; + __pyx_t_8 = __Pyx_PyCFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_5, 3+__pyx_t_5); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 555, __pyx_L4_error) + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_GOTREF(__pyx_t_8); + } else + #endif + { + __pyx_t_1 = PyTuple_New(3+__pyx_t_5); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 555, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_1); + if (__pyx_t_7) { + __Pyx_GIVEREF(__pyx_t_7); PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_t_7); __pyx_t_7 = NULL; + } + __Pyx_INCREF(__pyx_v_main_debugger); + __Pyx_GIVEREF(__pyx_v_main_debugger); + PyTuple_SET_ITEM(__pyx_t_1, 0+__pyx_t_5, __pyx_v_main_debugger); + __Pyx_INCREF(__pyx_v_frame); + __Pyx_GIVEREF(__pyx_v_frame); + PyTuple_SET_ITEM(__pyx_t_1, 1+__pyx_t_5, __pyx_v_frame); + __Pyx_INCREF(__pyx_v_filename); + __Pyx_GIVEREF(__pyx_v_filename); + PyTuple_SET_ITEM(__pyx_t_1, 2+__pyx_t_5, __pyx_v_filename); + __pyx_t_8 = __Pyx_PyObject_Call(__pyx_t_4, __pyx_t_1, NULL); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 555, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + } + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_DECREF_SET(__pyx_v_need_trace_return, __pyx_t_8); + __pyx_t_8 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":554 + * + * need_trace_return = False + * if is_call and main_debugger.signature_factory: # <<<<<<<<<<<<<< + * need_trace_return = send_signature_call_trace(main_debugger, frame, filename) + * if is_return and main_debugger.signature_factory: + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":556 + * if is_call and main_debugger.signature_factory: + * need_trace_return = send_signature_call_trace(main_debugger, frame, filename) + * if is_return and main_debugger.signature_factory: # <<<<<<<<<<<<<< + * send_signature_return_trace(main_debugger, frame, filename, arg) + * + */ + __pyx_t_10 = (__pyx_v_is_return != 0); + if (__pyx_t_10) { + } else { + __pyx_t_9 = __pyx_t_10; + goto __pyx_L22_bool_binop_done; + } + __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_signature_factory); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 556, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_10 = __Pyx_PyObject_IsTrue(__pyx_t_8); if (unlikely(__pyx_t_10 < 0)) __PYX_ERR(0, 556, __pyx_L4_error) + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __pyx_t_9 = __pyx_t_10; + __pyx_L22_bool_binop_done:; + if (__pyx_t_9) { + + /* "_pydevd_bundle/pydevd_cython.pyx":557 + * need_trace_return = send_signature_call_trace(main_debugger, frame, filename) + * if is_return and main_debugger.signature_factory: + * send_signature_return_trace(main_debugger, frame, filename, arg) # <<<<<<<<<<<<<< + * + * stop_frame = info.pydev_step_stop + */ + __pyx_t_4 = __Pyx_GetModuleGlobalName(__pyx_n_s_send_signature_return_trace); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 557, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_1 = NULL; + __pyx_t_5 = 0; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_4))) { + __pyx_t_1 = PyMethod_GET_SELF(__pyx_t_4); + if (likely(__pyx_t_1)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); + __Pyx_INCREF(__pyx_t_1); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_4, function); + __pyx_t_5 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_4)) { + PyObject *__pyx_temp[5] = {__pyx_t_1, __pyx_v_main_debugger, __pyx_v_frame, __pyx_v_filename, __pyx_v_arg}; + __pyx_t_8 = __Pyx_PyFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_5, 4+__pyx_t_5); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 557, __pyx_L4_error) + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_GOTREF(__pyx_t_8); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_4)) { + PyObject *__pyx_temp[5] = {__pyx_t_1, __pyx_v_main_debugger, __pyx_v_frame, __pyx_v_filename, __pyx_v_arg}; + __pyx_t_8 = __Pyx_PyCFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_5, 4+__pyx_t_5); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 557, __pyx_L4_error) + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_GOTREF(__pyx_t_8); + } else + #endif + { + __pyx_t_7 = PyTuple_New(4+__pyx_t_5); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 557, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_7); + if (__pyx_t_1) { + __Pyx_GIVEREF(__pyx_t_1); PyTuple_SET_ITEM(__pyx_t_7, 0, __pyx_t_1); __pyx_t_1 = NULL; + } + __Pyx_INCREF(__pyx_v_main_debugger); + __Pyx_GIVEREF(__pyx_v_main_debugger); + PyTuple_SET_ITEM(__pyx_t_7, 0+__pyx_t_5, __pyx_v_main_debugger); + __Pyx_INCREF(__pyx_v_frame); + __Pyx_GIVEREF(__pyx_v_frame); + PyTuple_SET_ITEM(__pyx_t_7, 1+__pyx_t_5, __pyx_v_frame); + __Pyx_INCREF(__pyx_v_filename); + __Pyx_GIVEREF(__pyx_v_filename); + PyTuple_SET_ITEM(__pyx_t_7, 2+__pyx_t_5, __pyx_v_filename); + __Pyx_INCREF(__pyx_v_arg); + __Pyx_GIVEREF(__pyx_v_arg); + PyTuple_SET_ITEM(__pyx_t_7, 3+__pyx_t_5, __pyx_v_arg); + __pyx_t_8 = __Pyx_PyObject_Call(__pyx_t_4, __pyx_t_7, NULL); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 557, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + } + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":556 + * if is_call and main_debugger.signature_factory: + * need_trace_return = send_signature_call_trace(main_debugger, frame, filename) + * if is_return and main_debugger.signature_factory: # <<<<<<<<<<<<<< + * send_signature_return_trace(main_debugger, frame, filename, arg) + * + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":559 + * send_signature_return_trace(main_debugger, frame, filename, arg) + * + * stop_frame = info.pydev_step_stop # <<<<<<<<<<<<<< + * step_cmd = info.pydev_step_cmd + * + */ + __pyx_t_8 = __pyx_v_info->pydev_step_stop; + __Pyx_INCREF(__pyx_t_8); + __pyx_v_stop_frame = __pyx_t_8; + __pyx_t_8 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":560 + * + * stop_frame = info.pydev_step_stop + * step_cmd = info.pydev_step_cmd # <<<<<<<<<<<<<< + * + * if is_exception_event: + */ + __pyx_t_5 = __pyx_v_info->pydev_step_cmd; + __pyx_v_step_cmd = __pyx_t_5; + + /* "_pydevd_bundle/pydevd_cython.pyx":562 + * step_cmd = info.pydev_step_cmd + * + * if is_exception_event: # <<<<<<<<<<<<<< + * breakpoints_for_file = None + * # CMD_STEP_OVER = 108 + */ + __pyx_t_9 = (__pyx_v_is_exception_event != 0); + if (__pyx_t_9) { + + /* "_pydevd_bundle/pydevd_cython.pyx":563 + * + * if is_exception_event: + * breakpoints_for_file = None # <<<<<<<<<<<<<< + * # CMD_STEP_OVER = 108 + * if stop_frame and stop_frame is not frame and step_cmd == 108 and \ + */ + __Pyx_INCREF(Py_None); + __pyx_v_breakpoints_for_file = Py_None; + + /* "_pydevd_bundle/pydevd_cython.pyx":565 + * breakpoints_for_file = None + * # CMD_STEP_OVER = 108 + * if stop_frame and stop_frame is not frame and step_cmd == 108 and \ # <<<<<<<<<<<<<< + * arg[0] in (StopIteration, GeneratorExit) and arg[2] is None: + * info.pydev_step_cmd = 107 # CMD_STEP_INTO = 107 + */ + __pyx_t_10 = __Pyx_PyObject_IsTrue(__pyx_v_stop_frame); if (unlikely(__pyx_t_10 < 0)) __PYX_ERR(0, 565, __pyx_L4_error) + if (__pyx_t_10) { + } else { + __pyx_t_9 = __pyx_t_10; + goto __pyx_L26_bool_binop_done; + } + __pyx_t_10 = (__pyx_v_stop_frame != __pyx_v_frame); + __pyx_t_12 = (__pyx_t_10 != 0); + if (__pyx_t_12) { + } else { + __pyx_t_9 = __pyx_t_12; + goto __pyx_L26_bool_binop_done; + } + __pyx_t_12 = ((__pyx_v_step_cmd == 0x6C) != 0); + if (__pyx_t_12) { + } else { + __pyx_t_9 = __pyx_t_12; + goto __pyx_L26_bool_binop_done; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":566 + * # CMD_STEP_OVER = 108 + * if stop_frame and stop_frame is not frame and step_cmd == 108 and \ + * arg[0] in (StopIteration, GeneratorExit) and arg[2] is None: # <<<<<<<<<<<<<< + * info.pydev_step_cmd = 107 # CMD_STEP_INTO = 107 + * info.pydev_step_stop = None + */ + __pyx_t_8 = __Pyx_GetItemInt(__pyx_v_arg, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 566, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_4 = PyObject_RichCompare(__pyx_t_8, __pyx_builtin_StopIteration, Py_EQ); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 566, __pyx_L4_error) + __pyx_t_10 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_10 < 0)) __PYX_ERR(0, 566, __pyx_L4_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if (!__pyx_t_10) { + } else { + __pyx_t_12 = __pyx_t_10; + goto __pyx_L31_bool_binop_done; + } + __pyx_t_4 = PyObject_RichCompare(__pyx_t_8, __pyx_builtin_GeneratorExit, Py_EQ); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 566, __pyx_L4_error) + __pyx_t_10 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_10 < 0)) __PYX_ERR(0, 566, __pyx_L4_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_12 = __pyx_t_10; + __pyx_L31_bool_binop_done:; + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __pyx_t_10 = (__pyx_t_12 != 0); + if (__pyx_t_10) { + } else { + __pyx_t_9 = __pyx_t_10; + goto __pyx_L26_bool_binop_done; + } + __pyx_t_8 = __Pyx_GetItemInt(__pyx_v_arg, 2, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 566, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_10 = (__pyx_t_8 == Py_None); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __pyx_t_12 = (__pyx_t_10 != 0); + __pyx_t_9 = __pyx_t_12; + __pyx_L26_bool_binop_done:; + + /* "_pydevd_bundle/pydevd_cython.pyx":565 + * breakpoints_for_file = None + * # CMD_STEP_OVER = 108 + * if stop_frame and stop_frame is not frame and step_cmd == 108 and \ # <<<<<<<<<<<<<< + * arg[0] in (StopIteration, GeneratorExit) and arg[2] is None: + * info.pydev_step_cmd = 107 # CMD_STEP_INTO = 107 + */ + if (__pyx_t_9) { + + /* "_pydevd_bundle/pydevd_cython.pyx":567 + * if stop_frame and stop_frame is not frame and step_cmd == 108 and \ + * arg[0] in (StopIteration, GeneratorExit) and arg[2] is None: + * info.pydev_step_cmd = 107 # CMD_STEP_INTO = 107 # <<<<<<<<<<<<<< + * info.pydev_step_stop = None + * else: + */ + __pyx_v_info->pydev_step_cmd = 0x6B; + + /* "_pydevd_bundle/pydevd_cython.pyx":568 + * arg[0] in (StopIteration, GeneratorExit) and arg[2] is None: + * info.pydev_step_cmd = 107 # CMD_STEP_INTO = 107 + * info.pydev_step_stop = None # <<<<<<<<<<<<<< + * else: + * # If we are in single step mode and something causes us to exit the current frame, we need to make sure we break + */ + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_info->pydev_step_stop); + __Pyx_DECREF(__pyx_v_info->pydev_step_stop); + __pyx_v_info->pydev_step_stop = Py_None; + + /* "_pydevd_bundle/pydevd_cython.pyx":565 + * breakpoints_for_file = None + * # CMD_STEP_OVER = 108 + * if stop_frame and stop_frame is not frame and step_cmd == 108 and \ # <<<<<<<<<<<<<< + * arg[0] in (StopIteration, GeneratorExit) and arg[2] is None: + * info.pydev_step_cmd = 107 # CMD_STEP_INTO = 107 + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":562 + * step_cmd = info.pydev_step_cmd + * + * if is_exception_event: # <<<<<<<<<<<<<< + * breakpoints_for_file = None + * # CMD_STEP_OVER = 108 + */ + goto __pyx_L24; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":576 + * # Note: this is especially troublesome when we're skipping code with the + * # @DontTrace comment. + * if stop_frame is frame and is_return and step_cmd in (109, 108): # CMD_STEP_RETURN = 109, CMD_STEP_OVER = 108 # <<<<<<<<<<<<<< + * if not frame.f_code.co_flags & 0x20: # CO_GENERATOR = 0x20 (inspect.CO_GENERATOR) + * info.pydev_step_cmd = 107 # CMD_STEP_INTO = 107 + */ + /*else*/ { + __pyx_t_12 = (__pyx_v_stop_frame == __pyx_v_frame); + __pyx_t_10 = (__pyx_t_12 != 0); + if (__pyx_t_10) { + } else { + __pyx_t_9 = __pyx_t_10; + goto __pyx_L34_bool_binop_done; + } + __pyx_t_10 = (__pyx_v_is_return != 0); + if (__pyx_t_10) { + } else { + __pyx_t_9 = __pyx_t_10; + goto __pyx_L34_bool_binop_done; + } + switch (__pyx_v_step_cmd) { + case 0x6D: + case 0x6C: + __pyx_t_10 = 1; + break; + default: + __pyx_t_10 = 0; + break; + } + __pyx_t_12 = (__pyx_t_10 != 0); + __pyx_t_9 = __pyx_t_12; + __pyx_L34_bool_binop_done:; + if (__pyx_t_9) { + + /* "_pydevd_bundle/pydevd_cython.pyx":577 + * # @DontTrace comment. + * if stop_frame is frame and is_return and step_cmd in (109, 108): # CMD_STEP_RETURN = 109, CMD_STEP_OVER = 108 + * if not frame.f_code.co_flags & 0x20: # CO_GENERATOR = 0x20 (inspect.CO_GENERATOR) # <<<<<<<<<<<<<< + * info.pydev_step_cmd = 107 # CMD_STEP_INTO = 107 + * info.pydev_step_stop = None + */ + __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_code); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 577, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_t_8, __pyx_n_s_co_flags); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 577, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __pyx_t_8 = __Pyx_PyInt_AndObjC(__pyx_t_4, __pyx_int_32, 0x20, 0); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 577, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_9 = __Pyx_PyObject_IsTrue(__pyx_t_8); if (unlikely(__pyx_t_9 < 0)) __PYX_ERR(0, 577, __pyx_L4_error) + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __pyx_t_12 = ((!__pyx_t_9) != 0); + if (__pyx_t_12) { + + /* "_pydevd_bundle/pydevd_cython.pyx":578 + * if stop_frame is frame and is_return and step_cmd in (109, 108): # CMD_STEP_RETURN = 109, CMD_STEP_OVER = 108 + * if not frame.f_code.co_flags & 0x20: # CO_GENERATOR = 0x20 (inspect.CO_GENERATOR) + * info.pydev_step_cmd = 107 # CMD_STEP_INTO = 107 # <<<<<<<<<<<<<< + * info.pydev_step_stop = None + * + */ + __pyx_v_info->pydev_step_cmd = 0x6B; + + /* "_pydevd_bundle/pydevd_cython.pyx":579 + * if not frame.f_code.co_flags & 0x20: # CO_GENERATOR = 0x20 (inspect.CO_GENERATOR) + * info.pydev_step_cmd = 107 # CMD_STEP_INTO = 107 + * info.pydev_step_stop = None # <<<<<<<<<<<<<< + * + * breakpoints_for_file = main_debugger.breakpoints.get(filename) + */ + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_info->pydev_step_stop); + __Pyx_DECREF(__pyx_v_info->pydev_step_stop); + __pyx_v_info->pydev_step_stop = Py_None; + + /* "_pydevd_bundle/pydevd_cython.pyx":577 + * # @DontTrace comment. + * if stop_frame is frame and is_return and step_cmd in (109, 108): # CMD_STEP_RETURN = 109, CMD_STEP_OVER = 108 + * if not frame.f_code.co_flags & 0x20: # CO_GENERATOR = 0x20 (inspect.CO_GENERATOR) # <<<<<<<<<<<<<< + * info.pydev_step_cmd = 107 # CMD_STEP_INTO = 107 + * info.pydev_step_stop = None + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":576 + * # Note: this is especially troublesome when we're skipping code with the + * # @DontTrace comment. + * if stop_frame is frame and is_return and step_cmd in (109, 108): # CMD_STEP_RETURN = 109, CMD_STEP_OVER = 108 # <<<<<<<<<<<<<< + * if not frame.f_code.co_flags & 0x20: # CO_GENERATOR = 0x20 (inspect.CO_GENERATOR) + * info.pydev_step_cmd = 107 # CMD_STEP_INTO = 107 + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":581 + * info.pydev_step_stop = None + * + * breakpoints_for_file = main_debugger.breakpoints.get(filename) # <<<<<<<<<<<<<< + * + * can_skip = False + */ + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_breakpoints); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 581, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_t_4, __pyx_n_s_get); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 581, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_4 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_7))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_7); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_7); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_7, function); + } + } + if (!__pyx_t_4) { + __pyx_t_8 = __Pyx_PyObject_CallOneArg(__pyx_t_7, __pyx_v_filename); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 581, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_8); + } else { + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_7)) { + PyObject *__pyx_temp[2] = {__pyx_t_4, __pyx_v_filename}; + __pyx_t_8 = __Pyx_PyFunction_FastCall(__pyx_t_7, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 581, __pyx_L4_error) + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_GOTREF(__pyx_t_8); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_7)) { + PyObject *__pyx_temp[2] = {__pyx_t_4, __pyx_v_filename}; + __pyx_t_8 = __Pyx_PyCFunction_FastCall(__pyx_t_7, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 581, __pyx_L4_error) + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_GOTREF(__pyx_t_8); + } else + #endif + { + __pyx_t_1 = PyTuple_New(1+1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 581, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_4); PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_t_4); __pyx_t_4 = NULL; + __Pyx_INCREF(__pyx_v_filename); + __Pyx_GIVEREF(__pyx_v_filename); + PyTuple_SET_ITEM(__pyx_t_1, 0+1, __pyx_v_filename); + __pyx_t_8 = __Pyx_PyObject_Call(__pyx_t_7, __pyx_t_1, NULL); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 581, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + } + } + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __pyx_v_breakpoints_for_file = __pyx_t_8; + __pyx_t_8 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":583 + * breakpoints_for_file = main_debugger.breakpoints.get(filename) + * + * can_skip = False # <<<<<<<<<<<<<< + * + * if info.pydev_state == 1: # STATE_RUN = 1 + */ + __pyx_v_can_skip = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":585 + * can_skip = False + * + * if info.pydev_state == 1: # STATE_RUN = 1 # <<<<<<<<<<<<<< + * #we can skip if: + * #- we have no stop marked + */ + __pyx_t_12 = ((__pyx_v_info->pydev_state == 1) != 0); + if (__pyx_t_12) { + + /* "_pydevd_bundle/pydevd_cython.pyx":590 + * #- we should make a step return/step over and we're not in the current frame + * # CMD_STEP_RETURN = 109, CMD_STEP_OVER = 108 + * can_skip = (step_cmd == -1 and stop_frame is None)\ # <<<<<<<<<<<<<< + * or (step_cmd in (109, 108) and stop_frame is not frame) + * + */ + __pyx_t_9 = ((__pyx_v_step_cmd == -1L) != 0); + if (!__pyx_t_9) { + goto __pyx_L40_next_or; + } else { + } + __pyx_t_9 = (__pyx_v_stop_frame == Py_None); + __pyx_t_10 = (__pyx_t_9 != 0); + if (!__pyx_t_10) { + } else { + __pyx_t_12 = __pyx_t_10; + goto __pyx_L39_bool_binop_done; + } + __pyx_L40_next_or:; + + /* "_pydevd_bundle/pydevd_cython.pyx":591 + * # CMD_STEP_RETURN = 109, CMD_STEP_OVER = 108 + * can_skip = (step_cmd == -1 and stop_frame is None)\ + * or (step_cmd in (109, 108) and stop_frame is not frame) # <<<<<<<<<<<<<< + * + * if can_skip: + */ + switch (__pyx_v_step_cmd) { + case 0x6D: + case 0x6C: + __pyx_t_10 = 1; + break; + default: + __pyx_t_10 = 0; + break; + } + __pyx_t_9 = (__pyx_t_10 != 0); + if (__pyx_t_9) { + } else { + __pyx_t_12 = __pyx_t_9; + goto __pyx_L39_bool_binop_done; + } + __pyx_t_9 = (__pyx_v_stop_frame != __pyx_v_frame); + __pyx_t_10 = (__pyx_t_9 != 0); + __pyx_t_12 = __pyx_t_10; + __pyx_L39_bool_binop_done:; + __pyx_v_can_skip = __pyx_t_12; + + /* "_pydevd_bundle/pydevd_cython.pyx":593 + * or (step_cmd in (109, 108) and stop_frame is not frame) + * + * if can_skip: # <<<<<<<<<<<<<< + * if plugin_manager is not None and main_debugger.has_plugin_line_breaks: + * can_skip = not plugin_manager.can_not_skip(main_debugger, self, frame) + */ + __pyx_t_12 = (__pyx_v_can_skip != 0); + if (__pyx_t_12) { + + /* "_pydevd_bundle/pydevd_cython.pyx":594 + * + * if can_skip: + * if plugin_manager is not None and main_debugger.has_plugin_line_breaks: # <<<<<<<<<<<<<< + * can_skip = not plugin_manager.can_not_skip(main_debugger, self, frame) + * + */ + __pyx_t_10 = (__pyx_v_plugin_manager != Py_None); + __pyx_t_9 = (__pyx_t_10 != 0); + if (__pyx_t_9) { + } else { + __pyx_t_12 = __pyx_t_9; + goto __pyx_L45_bool_binop_done; + } + __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_has_plugin_line_breaks); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 594, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_9 = __Pyx_PyObject_IsTrue(__pyx_t_8); if (unlikely(__pyx_t_9 < 0)) __PYX_ERR(0, 594, __pyx_L4_error) + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __pyx_t_12 = __pyx_t_9; + __pyx_L45_bool_binop_done:; + if (__pyx_t_12) { + + /* "_pydevd_bundle/pydevd_cython.pyx":595 + * if can_skip: + * if plugin_manager is not None and main_debugger.has_plugin_line_breaks: + * can_skip = not plugin_manager.can_not_skip(main_debugger, self, frame) # <<<<<<<<<<<<<< + * + * # CMD_STEP_OVER = 108 + */ + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_v_plugin_manager, __pyx_n_s_can_not_skip); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 595, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_1 = NULL; + __pyx_t_5 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_7))) { + __pyx_t_1 = PyMethod_GET_SELF(__pyx_t_7); + if (likely(__pyx_t_1)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_7); + __Pyx_INCREF(__pyx_t_1); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_7, function); + __pyx_t_5 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_7)) { + PyObject *__pyx_temp[4] = {__pyx_t_1, __pyx_v_main_debugger, ((PyObject *)__pyx_v_self), __pyx_v_frame}; + __pyx_t_8 = __Pyx_PyFunction_FastCall(__pyx_t_7, __pyx_temp+1-__pyx_t_5, 3+__pyx_t_5); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 595, __pyx_L4_error) + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_GOTREF(__pyx_t_8); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_7)) { + PyObject *__pyx_temp[4] = {__pyx_t_1, __pyx_v_main_debugger, ((PyObject *)__pyx_v_self), __pyx_v_frame}; + __pyx_t_8 = __Pyx_PyCFunction_FastCall(__pyx_t_7, __pyx_temp+1-__pyx_t_5, 3+__pyx_t_5); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 595, __pyx_L4_error) + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_GOTREF(__pyx_t_8); + } else + #endif + { + __pyx_t_4 = PyTuple_New(3+__pyx_t_5); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 595, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_4); + if (__pyx_t_1) { + __Pyx_GIVEREF(__pyx_t_1); PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_1); __pyx_t_1 = NULL; + } + __Pyx_INCREF(__pyx_v_main_debugger); + __Pyx_GIVEREF(__pyx_v_main_debugger); + PyTuple_SET_ITEM(__pyx_t_4, 0+__pyx_t_5, __pyx_v_main_debugger); + __Pyx_INCREF(((PyObject *)__pyx_v_self)); + __Pyx_GIVEREF(((PyObject *)__pyx_v_self)); + PyTuple_SET_ITEM(__pyx_t_4, 1+__pyx_t_5, ((PyObject *)__pyx_v_self)); + __Pyx_INCREF(__pyx_v_frame); + __Pyx_GIVEREF(__pyx_v_frame); + PyTuple_SET_ITEM(__pyx_t_4, 2+__pyx_t_5, __pyx_v_frame); + __pyx_t_8 = __Pyx_PyObject_Call(__pyx_t_7, __pyx_t_4, NULL); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 595, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + } + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __pyx_t_12 = __Pyx_PyObject_IsTrue(__pyx_t_8); if (unlikely(__pyx_t_12 < 0)) __PYX_ERR(0, 595, __pyx_L4_error) + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __pyx_v_can_skip = (!__pyx_t_12); + + /* "_pydevd_bundle/pydevd_cython.pyx":594 + * + * if can_skip: + * if plugin_manager is not None and main_debugger.has_plugin_line_breaks: # <<<<<<<<<<<<<< + * can_skip = not plugin_manager.can_not_skip(main_debugger, self, frame) + * + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":598 + * + * # CMD_STEP_OVER = 108 + * if can_skip and is_return and main_debugger.show_return_values and info.pydev_step_cmd == 108 and frame.f_back is info.pydev_step_stop: # <<<<<<<<<<<<<< + * # trace function for showing return values after step over + * can_skip = False + */ + __pyx_t_9 = (__pyx_v_can_skip != 0); + if (__pyx_t_9) { + } else { + __pyx_t_12 = __pyx_t_9; + goto __pyx_L48_bool_binop_done; + } + __pyx_t_9 = (__pyx_v_is_return != 0); + if (__pyx_t_9) { + } else { + __pyx_t_12 = __pyx_t_9; + goto __pyx_L48_bool_binop_done; + } + __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_show_return_values); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 598, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_9 = __Pyx_PyObject_IsTrue(__pyx_t_8); if (unlikely(__pyx_t_9 < 0)) __PYX_ERR(0, 598, __pyx_L4_error) + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + if (__pyx_t_9) { + } else { + __pyx_t_12 = __pyx_t_9; + goto __pyx_L48_bool_binop_done; + } + __pyx_t_9 = ((__pyx_v_info->pydev_step_cmd == 0x6C) != 0); + if (__pyx_t_9) { + } else { + __pyx_t_12 = __pyx_t_9; + goto __pyx_L48_bool_binop_done; + } + __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_back); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 598, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_9 = (__pyx_t_8 == __pyx_v_info->pydev_step_stop); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __pyx_t_10 = (__pyx_t_9 != 0); + __pyx_t_12 = __pyx_t_10; + __pyx_L48_bool_binop_done:; + if (__pyx_t_12) { + + /* "_pydevd_bundle/pydevd_cython.pyx":600 + * if can_skip and is_return and main_debugger.show_return_values and info.pydev_step_cmd == 108 and frame.f_back is info.pydev_step_stop: + * # trace function for showing return values after step over + * can_skip = False # <<<<<<<<<<<<<< + * + * # Let's check to see if we are in a function that has a breakpoint. If we don't have a breakpoint, + */ + __pyx_v_can_skip = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":598 + * + * # CMD_STEP_OVER = 108 + * if can_skip and is_return and main_debugger.show_return_values and info.pydev_step_cmd == 108 and frame.f_back is info.pydev_step_stop: # <<<<<<<<<<<<<< + * # trace function for showing return values after step over + * can_skip = False + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":593 + * or (step_cmd in (109, 108) and stop_frame is not frame) + * + * if can_skip: # <<<<<<<<<<<<<< + * if plugin_manager is not None and main_debugger.has_plugin_line_breaks: + * can_skip = not plugin_manager.can_not_skip(main_debugger, self, frame) + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":585 + * can_skip = False + * + * if info.pydev_state == 1: # STATE_RUN = 1 # <<<<<<<<<<<<<< + * #we can skip if: + * #- we have no stop marked + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":606 + * # also, after we hit a breakpoint and go to some other debugging state, we have to force the set trace anyway, + * # so, that's why the additional checks are there. + * if not breakpoints_for_file: # <<<<<<<<<<<<<< + * if can_skip: + * if has_exception_breakpoints: + */ + __pyx_t_12 = __Pyx_PyObject_IsTrue(__pyx_v_breakpoints_for_file); if (unlikely(__pyx_t_12 < 0)) __PYX_ERR(0, 606, __pyx_L4_error) + __pyx_t_10 = ((!__pyx_t_12) != 0); + if (__pyx_t_10) { + + /* "_pydevd_bundle/pydevd_cython.pyx":607 + * # so, that's why the additional checks are there. + * if not breakpoints_for_file: + * if can_skip: # <<<<<<<<<<<<<< + * if has_exception_breakpoints: + * return self.trace_exception + */ + __pyx_t_10 = (__pyx_v_can_skip != 0); + if (__pyx_t_10) { + + /* "_pydevd_bundle/pydevd_cython.pyx":608 + * if not breakpoints_for_file: + * if can_skip: + * if has_exception_breakpoints: # <<<<<<<<<<<<<< + * return self.trace_exception + * else: + */ + __pyx_t_10 = (__pyx_v_has_exception_breakpoints != 0); + if (__pyx_t_10) { + + /* "_pydevd_bundle/pydevd_cython.pyx":609 + * if can_skip: + * if has_exception_breakpoints: + * return self.trace_exception # <<<<<<<<<<<<<< + * else: + * if need_trace_return: + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_8 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_trace_exception); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 609, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_r = __pyx_t_8; + __pyx_t_8 = 0; + goto __pyx_L3_return; + + /* "_pydevd_bundle/pydevd_cython.pyx":608 + * if not breakpoints_for_file: + * if can_skip: + * if has_exception_breakpoints: # <<<<<<<<<<<<<< + * return self.trace_exception + * else: + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":611 + * return self.trace_exception + * else: + * if need_trace_return: # <<<<<<<<<<<<<< + * return self.trace_return + * else: + */ + /*else*/ { + __pyx_t_10 = __Pyx_PyObject_IsTrue(__pyx_v_need_trace_return); if (unlikely(__pyx_t_10 < 0)) __PYX_ERR(0, 611, __pyx_L4_error) + if (__pyx_t_10) { + + /* "_pydevd_bundle/pydevd_cython.pyx":612 + * else: + * if need_trace_return: + * return self.trace_return # <<<<<<<<<<<<<< + * else: + * return None + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_8 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_trace_return); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 612, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_r = __pyx_t_8; + __pyx_t_8 = 0; + goto __pyx_L3_return; + + /* "_pydevd_bundle/pydevd_cython.pyx":611 + * return self.trace_exception + * else: + * if need_trace_return: # <<<<<<<<<<<<<< + * return self.trace_return + * else: + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":614 + * return self.trace_return + * else: + * return None # <<<<<<<<<<<<<< + * + * else: + */ + /*else*/ { + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(Py_None); + __pyx_r = Py_None; + goto __pyx_L3_return; + } + } + + /* "_pydevd_bundle/pydevd_cython.pyx":607 + * # so, that's why the additional checks are there. + * if not breakpoints_for_file: + * if can_skip: # <<<<<<<<<<<<<< + * if has_exception_breakpoints: + * return self.trace_exception + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":606 + * # also, after we hit a breakpoint and go to some other debugging state, we have to force the set trace anyway, + * # so, that's why the additional checks are there. + * if not breakpoints_for_file: # <<<<<<<<<<<<<< + * if can_skip: + * if has_exception_breakpoints: + */ + goto __pyx_L53; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":618 + * else: + * # When cached, 0 means we don't have a breakpoint and 1 means we have. + * if can_skip: # <<<<<<<<<<<<<< + * breakpoints_in_line_cache = frame_skips_cache.get(line_cache_key, -1) + * if breakpoints_in_line_cache == 0: + */ + /*else*/ { + __pyx_t_10 = (__pyx_v_can_skip != 0); + if (__pyx_t_10) { + + /* "_pydevd_bundle/pydevd_cython.pyx":619 + * # When cached, 0 means we don't have a breakpoint and 1 means we have. + * if can_skip: + * breakpoints_in_line_cache = frame_skips_cache.get(line_cache_key, -1) # <<<<<<<<<<<<<< + * if breakpoints_in_line_cache == 0: + * return self.trace_dispatch + */ + if (unlikely(__pyx_v_frame_skips_cache == Py_None)) { + PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%s'", "get"); + __PYX_ERR(0, 619, __pyx_L4_error) + } + __pyx_t_8 = __Pyx_PyDict_GetItemDefault(__pyx_v_frame_skips_cache, __pyx_v_line_cache_key, __pyx_int_neg_1); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 619, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_5 = __Pyx_PyInt_As_int(__pyx_t_8); if (unlikely((__pyx_t_5 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 619, __pyx_L4_error) + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __pyx_v_breakpoints_in_line_cache = __pyx_t_5; + + /* "_pydevd_bundle/pydevd_cython.pyx":620 + * if can_skip: + * breakpoints_in_line_cache = frame_skips_cache.get(line_cache_key, -1) + * if breakpoints_in_line_cache == 0: # <<<<<<<<<<<<<< + * return self.trace_dispatch + * + */ + __pyx_t_10 = ((__pyx_v_breakpoints_in_line_cache == 0) != 0); + if (__pyx_t_10) { + + /* "_pydevd_bundle/pydevd_cython.pyx":621 + * breakpoints_in_line_cache = frame_skips_cache.get(line_cache_key, -1) + * if breakpoints_in_line_cache == 0: + * return self.trace_dispatch # <<<<<<<<<<<<<< + * + * breakpoints_in_frame_cache = frame_skips_cache.get(frame_cache_key, -1) + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_8 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_trace_dispatch); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 621, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_r = __pyx_t_8; + __pyx_t_8 = 0; + goto __pyx_L3_return; + + /* "_pydevd_bundle/pydevd_cython.pyx":620 + * if can_skip: + * breakpoints_in_line_cache = frame_skips_cache.get(line_cache_key, -1) + * if breakpoints_in_line_cache == 0: # <<<<<<<<<<<<<< + * return self.trace_dispatch + * + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":618 + * else: + * # When cached, 0 means we don't have a breakpoint and 1 means we have. + * if can_skip: # <<<<<<<<<<<<<< + * breakpoints_in_line_cache = frame_skips_cache.get(line_cache_key, -1) + * if breakpoints_in_line_cache == 0: + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":623 + * return self.trace_dispatch + * + * breakpoints_in_frame_cache = frame_skips_cache.get(frame_cache_key, -1) # <<<<<<<<<<<<<< + * if breakpoints_in_frame_cache != -1: + * # Gotten from cache. + */ + if (unlikely(__pyx_v_frame_skips_cache == Py_None)) { + PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%s'", "get"); + __PYX_ERR(0, 623, __pyx_L4_error) + } + __pyx_t_8 = __Pyx_PyDict_GetItemDefault(__pyx_v_frame_skips_cache, __pyx_v_frame_cache_key, __pyx_int_neg_1); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 623, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_5 = __Pyx_PyInt_As_int(__pyx_t_8); if (unlikely((__pyx_t_5 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 623, __pyx_L4_error) + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __pyx_v_breakpoints_in_frame_cache = __pyx_t_5; + + /* "_pydevd_bundle/pydevd_cython.pyx":624 + * + * breakpoints_in_frame_cache = frame_skips_cache.get(frame_cache_key, -1) + * if breakpoints_in_frame_cache != -1: # <<<<<<<<<<<<<< + * # Gotten from cache. + * has_breakpoint_in_frame = breakpoints_in_frame_cache == 1 + */ + __pyx_t_10 = ((__pyx_v_breakpoints_in_frame_cache != -1L) != 0); + if (__pyx_t_10) { + + /* "_pydevd_bundle/pydevd_cython.pyx":626 + * if breakpoints_in_frame_cache != -1: + * # Gotten from cache. + * has_breakpoint_in_frame = breakpoints_in_frame_cache == 1 # <<<<<<<<<<<<<< + * + * else: + */ + __pyx_v_has_breakpoint_in_frame = (__pyx_v_breakpoints_in_frame_cache == 1); + + /* "_pydevd_bundle/pydevd_cython.pyx":624 + * + * breakpoints_in_frame_cache = frame_skips_cache.get(frame_cache_key, -1) + * if breakpoints_in_frame_cache != -1: # <<<<<<<<<<<<<< + * # Gotten from cache. + * has_breakpoint_in_frame = breakpoints_in_frame_cache == 1 + */ + goto __pyx_L59; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":629 + * + * else: + * has_breakpoint_in_frame = False # <<<<<<<<<<<<<< + * # Checks the breakpoint to see if there is a context match in some function + * curr_func_name = frame.f_code.co_name + */ + /*else*/ { + __pyx_v_has_breakpoint_in_frame = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":631 + * has_breakpoint_in_frame = False + * # Checks the breakpoint to see if there is a context match in some function + * curr_func_name = frame.f_code.co_name # <<<<<<<<<<<<<< + * + * #global context is set with an empty name + */ + __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_code); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 631, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_t_8, __pyx_n_s_co_name); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 631, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + if (!(likely(PyString_CheckExact(__pyx_t_7))||((__pyx_t_7) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "str", Py_TYPE(__pyx_t_7)->tp_name), 0))) __PYX_ERR(0, 631, __pyx_L4_error) + __pyx_v_curr_func_name = ((PyObject*)__pyx_t_7); + __pyx_t_7 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":634 + * + * #global context is set with an empty name + * if curr_func_name in ('?', ''): # <<<<<<<<<<<<<< + * curr_func_name = '' + * + */ + __Pyx_INCREF(__pyx_v_curr_func_name); + __pyx_t_13 = __pyx_v_curr_func_name; + __pyx_t_12 = (__Pyx_PyString_Equals(__pyx_t_13, __pyx_kp_s__5, Py_EQ)); if (unlikely(__pyx_t_12 < 0)) __PYX_ERR(0, 634, __pyx_L4_error) + __pyx_t_9 = (__pyx_t_12 != 0); + if (!__pyx_t_9) { + } else { + __pyx_t_10 = __pyx_t_9; + goto __pyx_L61_bool_binop_done; + } + __pyx_t_9 = (__Pyx_PyString_Equals(__pyx_t_13, __pyx_kp_s_module, Py_EQ)); if (unlikely(__pyx_t_9 < 0)) __PYX_ERR(0, 634, __pyx_L4_error) + __pyx_t_12 = (__pyx_t_9 != 0); + __pyx_t_10 = __pyx_t_12; + __pyx_L61_bool_binop_done:; + __Pyx_DECREF(__pyx_t_13); __pyx_t_13 = 0; + __pyx_t_12 = (__pyx_t_10 != 0); + if (__pyx_t_12) { + + /* "_pydevd_bundle/pydevd_cython.pyx":635 + * #global context is set with an empty name + * if curr_func_name in ('?', ''): + * curr_func_name = '' # <<<<<<<<<<<<<< + * + * for breakpoint in dict_iter_values(breakpoints_for_file): #jython does not support itervalues() + */ + __Pyx_INCREF(__pyx_kp_s_); + __Pyx_DECREF_SET(__pyx_v_curr_func_name, __pyx_kp_s_); + + /* "_pydevd_bundle/pydevd_cython.pyx":634 + * + * #global context is set with an empty name + * if curr_func_name in ('?', ''): # <<<<<<<<<<<<<< + * curr_func_name = '' + * + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":637 + * curr_func_name = '' + * + * for breakpoint in dict_iter_values(breakpoints_for_file): #jython does not support itervalues() # <<<<<<<<<<<<<< + * #will match either global or some function + * if breakpoint.func_name in ('None', curr_func_name): + */ + __pyx_t_8 = __Pyx_GetModuleGlobalName(__pyx_n_s_dict_iter_values); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 637, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_4 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_8))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_8); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_8); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_8, function); + } + } + if (!__pyx_t_4) { + __pyx_t_7 = __Pyx_PyObject_CallOneArg(__pyx_t_8, __pyx_v_breakpoints_for_file); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 637, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_7); + } else { + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_8)) { + PyObject *__pyx_temp[2] = {__pyx_t_4, __pyx_v_breakpoints_for_file}; + __pyx_t_7 = __Pyx_PyFunction_FastCall(__pyx_t_8, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 637, __pyx_L4_error) + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_GOTREF(__pyx_t_7); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_8)) { + PyObject *__pyx_temp[2] = {__pyx_t_4, __pyx_v_breakpoints_for_file}; + __pyx_t_7 = __Pyx_PyCFunction_FastCall(__pyx_t_8, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 637, __pyx_L4_error) + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_GOTREF(__pyx_t_7); + } else + #endif + { + __pyx_t_1 = PyTuple_New(1+1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 637, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_4); PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_t_4); __pyx_t_4 = NULL; + __Pyx_INCREF(__pyx_v_breakpoints_for_file); + __Pyx_GIVEREF(__pyx_v_breakpoints_for_file); + PyTuple_SET_ITEM(__pyx_t_1, 0+1, __pyx_v_breakpoints_for_file); + __pyx_t_7 = __Pyx_PyObject_Call(__pyx_t_8, __pyx_t_1, NULL); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 637, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + } + } + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + if (likely(PyList_CheckExact(__pyx_t_7)) || PyTuple_CheckExact(__pyx_t_7)) { + __pyx_t_8 = __pyx_t_7; __Pyx_INCREF(__pyx_t_8); __pyx_t_14 = 0; + __pyx_t_15 = NULL; + } else { + __pyx_t_14 = -1; __pyx_t_8 = PyObject_GetIter(__pyx_t_7); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 637, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_15 = Py_TYPE(__pyx_t_8)->tp_iternext; if (unlikely(!__pyx_t_15)) __PYX_ERR(0, 637, __pyx_L4_error) + } + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + for (;;) { + if (likely(!__pyx_t_15)) { + if (likely(PyList_CheckExact(__pyx_t_8))) { + if (__pyx_t_14 >= PyList_GET_SIZE(__pyx_t_8)) break; + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_7 = PyList_GET_ITEM(__pyx_t_8, __pyx_t_14); __Pyx_INCREF(__pyx_t_7); __pyx_t_14++; if (unlikely(0 < 0)) __PYX_ERR(0, 637, __pyx_L4_error) + #else + __pyx_t_7 = PySequence_ITEM(__pyx_t_8, __pyx_t_14); __pyx_t_14++; if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 637, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_7); + #endif + } else { + if (__pyx_t_14 >= PyTuple_GET_SIZE(__pyx_t_8)) break; + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_7 = PyTuple_GET_ITEM(__pyx_t_8, __pyx_t_14); __Pyx_INCREF(__pyx_t_7); __pyx_t_14++; if (unlikely(0 < 0)) __PYX_ERR(0, 637, __pyx_L4_error) + #else + __pyx_t_7 = PySequence_ITEM(__pyx_t_8, __pyx_t_14); __pyx_t_14++; if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 637, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_7); + #endif + } + } else { + __pyx_t_7 = __pyx_t_15(__pyx_t_8); + if (unlikely(!__pyx_t_7)) { + PyObject* exc_type = PyErr_Occurred(); + if (exc_type) { + if (likely(exc_type == PyExc_StopIteration || PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear(); + else __PYX_ERR(0, 637, __pyx_L4_error) + } + break; + } + __Pyx_GOTREF(__pyx_t_7); + } + __Pyx_XDECREF_SET(__pyx_v_breakpoint, __pyx_t_7); + __pyx_t_7 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":639 + * for breakpoint in dict_iter_values(breakpoints_for_file): #jython does not support itervalues() + * #will match either global or some function + * if breakpoint.func_name in ('None', curr_func_name): # <<<<<<<<<<<<<< + * has_breakpoint_in_frame = True + * break + */ + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_v_breakpoint, __pyx_n_s_func_name); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 639, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_10 = (__Pyx_PyString_Equals(__pyx_t_7, __pyx_n_s_None, Py_EQ)); if (unlikely(__pyx_t_10 < 0)) __PYX_ERR(0, 639, __pyx_L4_error) + if (!__pyx_t_10) { + } else { + __pyx_t_12 = __pyx_t_10; + goto __pyx_L66_bool_binop_done; + } + __pyx_t_10 = (__Pyx_PyString_Equals(__pyx_t_7, __pyx_v_curr_func_name, Py_EQ)); if (unlikely(__pyx_t_10 < 0)) __PYX_ERR(0, 639, __pyx_L4_error) + __pyx_t_12 = __pyx_t_10; + __pyx_L66_bool_binop_done:; + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __pyx_t_10 = (__pyx_t_12 != 0); + if (__pyx_t_10) { + + /* "_pydevd_bundle/pydevd_cython.pyx":640 + * #will match either global or some function + * if breakpoint.func_name in ('None', curr_func_name): + * has_breakpoint_in_frame = True # <<<<<<<<<<<<<< + * break + * + */ + __pyx_v_has_breakpoint_in_frame = 1; + + /* "_pydevd_bundle/pydevd_cython.pyx":641 + * if breakpoint.func_name in ('None', curr_func_name): + * has_breakpoint_in_frame = True + * break # <<<<<<<<<<<<<< + * + * # Cache the value (1 or 0 or -1 for default because of cython). + */ + goto __pyx_L64_break; + + /* "_pydevd_bundle/pydevd_cython.pyx":639 + * for breakpoint in dict_iter_values(breakpoints_for_file): #jython does not support itervalues() + * #will match either global or some function + * if breakpoint.func_name in ('None', curr_func_name): # <<<<<<<<<<<<<< + * has_breakpoint_in_frame = True + * break + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":637 + * curr_func_name = '' + * + * for breakpoint in dict_iter_values(breakpoints_for_file): #jython does not support itervalues() # <<<<<<<<<<<<<< + * #will match either global or some function + * if breakpoint.func_name in ('None', curr_func_name): + */ + } + __pyx_L64_break:; + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":644 + * + * # Cache the value (1 or 0 or -1 for default because of cython). + * if has_breakpoint_in_frame: # <<<<<<<<<<<<<< + * frame_skips_cache[frame_cache_key] = 1 + * else: + */ + __pyx_t_10 = (__pyx_v_has_breakpoint_in_frame != 0); + if (__pyx_t_10) { + + /* "_pydevd_bundle/pydevd_cython.pyx":645 + * # Cache the value (1 or 0 or -1 for default because of cython). + * if has_breakpoint_in_frame: + * frame_skips_cache[frame_cache_key] = 1 # <<<<<<<<<<<<<< + * else: + * frame_skips_cache[frame_cache_key] = 0 + */ + if (unlikely(__pyx_v_frame_skips_cache == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(0, 645, __pyx_L4_error) + } + if (unlikely(PyDict_SetItem(__pyx_v_frame_skips_cache, __pyx_v_frame_cache_key, __pyx_int_1) < 0)) __PYX_ERR(0, 645, __pyx_L4_error) + + /* "_pydevd_bundle/pydevd_cython.pyx":644 + * + * # Cache the value (1 or 0 or -1 for default because of cython). + * if has_breakpoint_in_frame: # <<<<<<<<<<<<<< + * frame_skips_cache[frame_cache_key] = 1 + * else: + */ + goto __pyx_L68; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":647 + * frame_skips_cache[frame_cache_key] = 1 + * else: + * frame_skips_cache[frame_cache_key] = 0 # <<<<<<<<<<<<<< + * + * + */ + /*else*/ { + if (unlikely(__pyx_v_frame_skips_cache == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(0, 647, __pyx_L4_error) + } + if (unlikely(PyDict_SetItem(__pyx_v_frame_skips_cache, __pyx_v_frame_cache_key, __pyx_int_0) < 0)) __PYX_ERR(0, 647, __pyx_L4_error) + } + __pyx_L68:; + } + __pyx_L59:; + + /* "_pydevd_bundle/pydevd_cython.pyx":650 + * + * + * if can_skip and not has_breakpoint_in_frame: # <<<<<<<<<<<<<< + * if has_exception_breakpoints: + * return self.trace_exception + */ + __pyx_t_12 = (__pyx_v_can_skip != 0); + if (__pyx_t_12) { + } else { + __pyx_t_10 = __pyx_t_12; + goto __pyx_L70_bool_binop_done; + } + __pyx_t_12 = ((!(__pyx_v_has_breakpoint_in_frame != 0)) != 0); + __pyx_t_10 = __pyx_t_12; + __pyx_L70_bool_binop_done:; + if (__pyx_t_10) { + + /* "_pydevd_bundle/pydevd_cython.pyx":651 + * + * if can_skip and not has_breakpoint_in_frame: + * if has_exception_breakpoints: # <<<<<<<<<<<<<< + * return self.trace_exception + * else: + */ + __pyx_t_10 = (__pyx_v_has_exception_breakpoints != 0); + if (__pyx_t_10) { + + /* "_pydevd_bundle/pydevd_cython.pyx":652 + * if can_skip and not has_breakpoint_in_frame: + * if has_exception_breakpoints: + * return self.trace_exception # <<<<<<<<<<<<<< + * else: + * if need_trace_return: + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_8 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_trace_exception); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 652, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_r = __pyx_t_8; + __pyx_t_8 = 0; + goto __pyx_L3_return; + + /* "_pydevd_bundle/pydevd_cython.pyx":651 + * + * if can_skip and not has_breakpoint_in_frame: + * if has_exception_breakpoints: # <<<<<<<<<<<<<< + * return self.trace_exception + * else: + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":654 + * return self.trace_exception + * else: + * if need_trace_return: # <<<<<<<<<<<<<< + * return self.trace_return + * else: + */ + /*else*/ { + __pyx_t_10 = __Pyx_PyObject_IsTrue(__pyx_v_need_trace_return); if (unlikely(__pyx_t_10 < 0)) __PYX_ERR(0, 654, __pyx_L4_error) + if (__pyx_t_10) { + + /* "_pydevd_bundle/pydevd_cython.pyx":655 + * else: + * if need_trace_return: + * return self.trace_return # <<<<<<<<<<<<<< + * else: + * return None + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_8 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_trace_return); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 655, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_r = __pyx_t_8; + __pyx_t_8 = 0; + goto __pyx_L3_return; + + /* "_pydevd_bundle/pydevd_cython.pyx":654 + * return self.trace_exception + * else: + * if need_trace_return: # <<<<<<<<<<<<<< + * return self.trace_return + * else: + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":657 + * return self.trace_return + * else: + * return None # <<<<<<<<<<<<<< + * + * #We may have hit a breakpoint or we are already in step mode. Either way, let's check what we should do in this frame + */ + /*else*/ { + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(Py_None); + __pyx_r = Py_None; + goto __pyx_L3_return; + } + } + + /* "_pydevd_bundle/pydevd_cython.pyx":650 + * + * + * if can_skip and not has_breakpoint_in_frame: # <<<<<<<<<<<<<< + * if has_exception_breakpoints: + * return self.trace_exception + */ + } + } + __pyx_L53:; + } + __pyx_L24:; + + /* "_pydevd_bundle/pydevd_cython.pyx":662 + * # print('NOT skipped', frame.f_lineno, frame.f_code.co_name, event) + * + * try: # <<<<<<<<<<<<<< + * flag = False + * #return is not taken into account for breakpoint hit because we'd have a double-hit in this case + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_16, &__pyx_t_17, &__pyx_t_18); + __Pyx_XGOTREF(__pyx_t_16); + __Pyx_XGOTREF(__pyx_t_17); + __Pyx_XGOTREF(__pyx_t_18); + /*try:*/ { + + /* "_pydevd_bundle/pydevd_cython.pyx":663 + * + * try: + * flag = False # <<<<<<<<<<<<<< + * #return is not taken into account for breakpoint hit because we'd have a double-hit in this case + * #(one for the line and the other for the return). + */ + __Pyx_INCREF(Py_False); + __Pyx_XDECREF_SET(__pyx_v_flag, Py_False); + + /* "_pydevd_bundle/pydevd_cython.pyx":667 + * #(one for the line and the other for the return). + * + * stop_info = {} # <<<<<<<<<<<<<< + * breakpoint = None + * exist_result = False + */ + __pyx_t_8 = PyDict_New(); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 667, __pyx_L74_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_v_stop_info = ((PyObject*)__pyx_t_8); + __pyx_t_8 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":668 + * + * stop_info = {} + * breakpoint = None # <<<<<<<<<<<<<< + * exist_result = False + * stop = False + */ + __Pyx_INCREF(Py_None); + __Pyx_XDECREF_SET(__pyx_v_breakpoint, Py_None); + + /* "_pydevd_bundle/pydevd_cython.pyx":669 + * stop_info = {} + * breakpoint = None + * exist_result = False # <<<<<<<<<<<<<< + * stop = False + * bp_type = None + */ + __pyx_v_exist_result = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":670 + * breakpoint = None + * exist_result = False + * stop = False # <<<<<<<<<<<<<< + * bp_type = None + * if not is_return and info.pydev_state != STATE_SUSPEND and breakpoints_for_file is not None and line in breakpoints_for_file: + */ + __Pyx_INCREF(Py_False); + __pyx_v_stop = Py_False; + + /* "_pydevd_bundle/pydevd_cython.pyx":671 + * exist_result = False + * stop = False + * bp_type = None # <<<<<<<<<<<<<< + * if not is_return and info.pydev_state != STATE_SUSPEND and breakpoints_for_file is not None and line in breakpoints_for_file: + * breakpoint = breakpoints_for_file[line] + */ + __Pyx_INCREF(Py_None); + __pyx_v_bp_type = Py_None; + + /* "_pydevd_bundle/pydevd_cython.pyx":672 + * stop = False + * bp_type = None + * if not is_return and info.pydev_state != STATE_SUSPEND and breakpoints_for_file is not None and line in breakpoints_for_file: # <<<<<<<<<<<<<< + * breakpoint = breakpoints_for_file[line] + * new_frame = frame + */ + __pyx_t_12 = ((!(__pyx_v_is_return != 0)) != 0); + if (__pyx_t_12) { + } else { + __pyx_t_10 = __pyx_t_12; + goto __pyx_L81_bool_binop_done; + } + __pyx_t_8 = __Pyx_PyInt_From_int(__pyx_v_info->pydev_state); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 672, __pyx_L74_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_7 = __Pyx_GetModuleGlobalName(__pyx_n_s_STATE_SUSPEND); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 672, __pyx_L74_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_1 = PyObject_RichCompare(__pyx_t_8, __pyx_t_7, Py_NE); __Pyx_XGOTREF(__pyx_t_1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 672, __pyx_L74_error) + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __pyx_t_12 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_12 < 0)) __PYX_ERR(0, 672, __pyx_L74_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + if (__pyx_t_12) { + } else { + __pyx_t_10 = __pyx_t_12; + goto __pyx_L81_bool_binop_done; + } + __pyx_t_12 = (__pyx_v_breakpoints_for_file != Py_None); + __pyx_t_9 = (__pyx_t_12 != 0); + if (__pyx_t_9) { + } else { + __pyx_t_10 = __pyx_t_9; + goto __pyx_L81_bool_binop_done; + } + __pyx_t_1 = __Pyx_PyInt_From_int(__pyx_v_line); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 672, __pyx_L74_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_9 = (__Pyx_PySequence_ContainsTF(__pyx_t_1, __pyx_v_breakpoints_for_file, Py_EQ)); if (unlikely(__pyx_t_9 < 0)) __PYX_ERR(0, 672, __pyx_L74_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_12 = (__pyx_t_9 != 0); + __pyx_t_10 = __pyx_t_12; + __pyx_L81_bool_binop_done:; + if (__pyx_t_10) { + + /* "_pydevd_bundle/pydevd_cython.pyx":673 + * bp_type = None + * if not is_return and info.pydev_state != STATE_SUSPEND and breakpoints_for_file is not None and line in breakpoints_for_file: + * breakpoint = breakpoints_for_file[line] # <<<<<<<<<<<<<< + * new_frame = frame + * stop = True + */ + __pyx_t_1 = __Pyx_GetItemInt(__pyx_v_breakpoints_for_file, __pyx_v_line, int, 1, __Pyx_PyInt_From_int, 0, 1, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 673, __pyx_L74_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF_SET(__pyx_v_breakpoint, __pyx_t_1); + __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":674 + * if not is_return and info.pydev_state != STATE_SUSPEND and breakpoints_for_file is not None and line in breakpoints_for_file: + * breakpoint = breakpoints_for_file[line] + * new_frame = frame # <<<<<<<<<<<<<< + * stop = True + * if step_cmd == CMD_STEP_OVER and stop_frame is frame and (is_line or is_return): + */ + __Pyx_INCREF(__pyx_v_frame); + __pyx_v_new_frame = __pyx_v_frame; + + /* "_pydevd_bundle/pydevd_cython.pyx":675 + * breakpoint = breakpoints_for_file[line] + * new_frame = frame + * stop = True # <<<<<<<<<<<<<< + * if step_cmd == CMD_STEP_OVER and stop_frame is frame and (is_line or is_return): + * stop = False #we don't stop on breakpoint if we have to stop by step-over (it will be processed later) + */ + __Pyx_INCREF(Py_True); + __Pyx_DECREF_SET(__pyx_v_stop, Py_True); + + /* "_pydevd_bundle/pydevd_cython.pyx":676 + * new_frame = frame + * stop = True + * if step_cmd == CMD_STEP_OVER and stop_frame is frame and (is_line or is_return): # <<<<<<<<<<<<<< + * stop = False #we don't stop on breakpoint if we have to stop by step-over (it will be processed later) + * elif plugin_manager is not None and main_debugger.has_plugin_line_breaks: + */ + __pyx_t_1 = __Pyx_PyInt_From_int(__pyx_v_step_cmd); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 676, __pyx_L74_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_7 = __Pyx_GetModuleGlobalName(__pyx_n_s_CMD_STEP_OVER); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 676, __pyx_L74_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_8 = PyObject_RichCompare(__pyx_t_1, __pyx_t_7, Py_EQ); __Pyx_XGOTREF(__pyx_t_8); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 676, __pyx_L74_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __pyx_t_12 = __Pyx_PyObject_IsTrue(__pyx_t_8); if (unlikely(__pyx_t_12 < 0)) __PYX_ERR(0, 676, __pyx_L74_error) + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + if (__pyx_t_12) { + } else { + __pyx_t_10 = __pyx_t_12; + goto __pyx_L86_bool_binop_done; + } + __pyx_t_12 = (__pyx_v_stop_frame == __pyx_v_frame); + __pyx_t_9 = (__pyx_t_12 != 0); + if (__pyx_t_9) { + } else { + __pyx_t_10 = __pyx_t_9; + goto __pyx_L86_bool_binop_done; + } + __pyx_t_9 = (__pyx_v_is_line != 0); + if (!__pyx_t_9) { + } else { + __pyx_t_10 = __pyx_t_9; + goto __pyx_L86_bool_binop_done; + } + __pyx_t_9 = (__pyx_v_is_return != 0); + __pyx_t_10 = __pyx_t_9; + __pyx_L86_bool_binop_done:; + if (__pyx_t_10) { + + /* "_pydevd_bundle/pydevd_cython.pyx":677 + * stop = True + * if step_cmd == CMD_STEP_OVER and stop_frame is frame and (is_line or is_return): + * stop = False #we don't stop on breakpoint if we have to stop by step-over (it will be processed later) # <<<<<<<<<<<<<< + * elif plugin_manager is not None and main_debugger.has_plugin_line_breaks: + * result = plugin_manager.get_breakpoint(main_debugger, self, frame, event, self._args) + */ + __Pyx_INCREF(Py_False); + __Pyx_DECREF_SET(__pyx_v_stop, Py_False); + + /* "_pydevd_bundle/pydevd_cython.pyx":676 + * new_frame = frame + * stop = True + * if step_cmd == CMD_STEP_OVER and stop_frame is frame and (is_line or is_return): # <<<<<<<<<<<<<< + * stop = False #we don't stop on breakpoint if we have to stop by step-over (it will be processed later) + * elif plugin_manager is not None and main_debugger.has_plugin_line_breaks: + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":672 + * stop = False + * bp_type = None + * if not is_return and info.pydev_state != STATE_SUSPEND and breakpoints_for_file is not None and line in breakpoints_for_file: # <<<<<<<<<<<<<< + * breakpoint = breakpoints_for_file[line] + * new_frame = frame + */ + goto __pyx_L80; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":678 + * if step_cmd == CMD_STEP_OVER and stop_frame is frame and (is_line or is_return): + * stop = False #we don't stop on breakpoint if we have to stop by step-over (it will be processed later) + * elif plugin_manager is not None and main_debugger.has_plugin_line_breaks: # <<<<<<<<<<<<<< + * result = plugin_manager.get_breakpoint(main_debugger, self, frame, event, self._args) + * if result: + */ + __pyx_t_9 = (__pyx_v_plugin_manager != Py_None); + __pyx_t_12 = (__pyx_t_9 != 0); + if (__pyx_t_12) { + } else { + __pyx_t_10 = __pyx_t_12; + goto __pyx_L90_bool_binop_done; + } + __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_has_plugin_line_breaks); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 678, __pyx_L74_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_12 = __Pyx_PyObject_IsTrue(__pyx_t_8); if (unlikely(__pyx_t_12 < 0)) __PYX_ERR(0, 678, __pyx_L74_error) + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __pyx_t_10 = __pyx_t_12; + __pyx_L90_bool_binop_done:; + if (__pyx_t_10) { + + /* "_pydevd_bundle/pydevd_cython.pyx":679 + * stop = False #we don't stop on breakpoint if we have to stop by step-over (it will be processed later) + * elif plugin_manager is not None and main_debugger.has_plugin_line_breaks: + * result = plugin_manager.get_breakpoint(main_debugger, self, frame, event, self._args) # <<<<<<<<<<<<<< + * if result: + * exist_result = True + */ + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_v_plugin_manager, __pyx_n_s_get_breakpoint); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 679, __pyx_L74_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_1 = NULL; + __pyx_t_5 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_7))) { + __pyx_t_1 = PyMethod_GET_SELF(__pyx_t_7); + if (likely(__pyx_t_1)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_7); + __Pyx_INCREF(__pyx_t_1); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_7, function); + __pyx_t_5 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_7)) { + PyObject *__pyx_temp[6] = {__pyx_t_1, __pyx_v_main_debugger, ((PyObject *)__pyx_v_self), __pyx_v_frame, __pyx_v_event, __pyx_v_self->_args}; + __pyx_t_8 = __Pyx_PyFunction_FastCall(__pyx_t_7, __pyx_temp+1-__pyx_t_5, 5+__pyx_t_5); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 679, __pyx_L74_error) + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_GOTREF(__pyx_t_8); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_7)) { + PyObject *__pyx_temp[6] = {__pyx_t_1, __pyx_v_main_debugger, ((PyObject *)__pyx_v_self), __pyx_v_frame, __pyx_v_event, __pyx_v_self->_args}; + __pyx_t_8 = __Pyx_PyCFunction_FastCall(__pyx_t_7, __pyx_temp+1-__pyx_t_5, 5+__pyx_t_5); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 679, __pyx_L74_error) + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_GOTREF(__pyx_t_8); + } else + #endif + { + __pyx_t_4 = PyTuple_New(5+__pyx_t_5); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 679, __pyx_L74_error) + __Pyx_GOTREF(__pyx_t_4); + if (__pyx_t_1) { + __Pyx_GIVEREF(__pyx_t_1); PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_1); __pyx_t_1 = NULL; + } + __Pyx_INCREF(__pyx_v_main_debugger); + __Pyx_GIVEREF(__pyx_v_main_debugger); + PyTuple_SET_ITEM(__pyx_t_4, 0+__pyx_t_5, __pyx_v_main_debugger); + __Pyx_INCREF(((PyObject *)__pyx_v_self)); + __Pyx_GIVEREF(((PyObject *)__pyx_v_self)); + PyTuple_SET_ITEM(__pyx_t_4, 1+__pyx_t_5, ((PyObject *)__pyx_v_self)); + __Pyx_INCREF(__pyx_v_frame); + __Pyx_GIVEREF(__pyx_v_frame); + PyTuple_SET_ITEM(__pyx_t_4, 2+__pyx_t_5, __pyx_v_frame); + __Pyx_INCREF(__pyx_v_event); + __Pyx_GIVEREF(__pyx_v_event); + PyTuple_SET_ITEM(__pyx_t_4, 3+__pyx_t_5, __pyx_v_event); + __Pyx_INCREF(__pyx_v_self->_args); + __Pyx_GIVEREF(__pyx_v_self->_args); + PyTuple_SET_ITEM(__pyx_t_4, 4+__pyx_t_5, __pyx_v_self->_args); + __pyx_t_8 = __Pyx_PyObject_Call(__pyx_t_7, __pyx_t_4, NULL); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 679, __pyx_L74_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + } + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __pyx_v_result = __pyx_t_8; + __pyx_t_8 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":680 + * elif plugin_manager is not None and main_debugger.has_plugin_line_breaks: + * result = plugin_manager.get_breakpoint(main_debugger, self, frame, event, self._args) + * if result: # <<<<<<<<<<<<<< + * exist_result = True + * flag, breakpoint, new_frame, bp_type = result + */ + __pyx_t_10 = __Pyx_PyObject_IsTrue(__pyx_v_result); if (unlikely(__pyx_t_10 < 0)) __PYX_ERR(0, 680, __pyx_L74_error) + if (__pyx_t_10) { + + /* "_pydevd_bundle/pydevd_cython.pyx":681 + * result = plugin_manager.get_breakpoint(main_debugger, self, frame, event, self._args) + * if result: + * exist_result = True # <<<<<<<<<<<<<< + * flag, breakpoint, new_frame, bp_type = result + * + */ + __pyx_v_exist_result = 1; + + /* "_pydevd_bundle/pydevd_cython.pyx":682 + * if result: + * exist_result = True + * flag, breakpoint, new_frame, bp_type = result # <<<<<<<<<<<<<< + * + * if breakpoint: + */ + if ((likely(PyTuple_CheckExact(__pyx_v_result))) || (PyList_CheckExact(__pyx_v_result))) { + PyObject* sequence = __pyx_v_result; + #if !CYTHON_COMPILING_IN_PYPY + Py_ssize_t size = Py_SIZE(sequence); + #else + Py_ssize_t size = PySequence_Size(sequence); + #endif + if (unlikely(size != 4)) { + if (size > 4) __Pyx_RaiseTooManyValuesError(4); + else if (size >= 0) __Pyx_RaiseNeedMoreValuesError(size); + __PYX_ERR(0, 682, __pyx_L74_error) + } + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + if (likely(PyTuple_CheckExact(sequence))) { + __pyx_t_8 = PyTuple_GET_ITEM(sequence, 0); + __pyx_t_7 = PyTuple_GET_ITEM(sequence, 1); + __pyx_t_4 = PyTuple_GET_ITEM(sequence, 2); + __pyx_t_1 = PyTuple_GET_ITEM(sequence, 3); + } else { + __pyx_t_8 = PyList_GET_ITEM(sequence, 0); + __pyx_t_7 = PyList_GET_ITEM(sequence, 1); + __pyx_t_4 = PyList_GET_ITEM(sequence, 2); + __pyx_t_1 = PyList_GET_ITEM(sequence, 3); + } + __Pyx_INCREF(__pyx_t_8); + __Pyx_INCREF(__pyx_t_7); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(__pyx_t_1); + #else + { + Py_ssize_t i; + PyObject** temps[4] = {&__pyx_t_8,&__pyx_t_7,&__pyx_t_4,&__pyx_t_1}; + for (i=0; i < 4; i++) { + PyObject* item = PySequence_ITEM(sequence, i); if (unlikely(!item)) __PYX_ERR(0, 682, __pyx_L74_error) + __Pyx_GOTREF(item); + *(temps[i]) = item; + } + } + #endif + } else { + Py_ssize_t index = -1; + PyObject** temps[4] = {&__pyx_t_8,&__pyx_t_7,&__pyx_t_4,&__pyx_t_1}; + __pyx_t_6 = PyObject_GetIter(__pyx_v_result); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 682, __pyx_L74_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_11 = Py_TYPE(__pyx_t_6)->tp_iternext; + for (index=0; index < 4; index++) { + PyObject* item = __pyx_t_11(__pyx_t_6); if (unlikely(!item)) goto __pyx_L93_unpacking_failed; + __Pyx_GOTREF(item); + *(temps[index]) = item; + } + if (__Pyx_IternextUnpackEndCheck(__pyx_t_11(__pyx_t_6), 4) < 0) __PYX_ERR(0, 682, __pyx_L74_error) + __pyx_t_11 = NULL; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + goto __pyx_L94_unpacking_done; + __pyx_L93_unpacking_failed:; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __pyx_t_11 = NULL; + if (__Pyx_IterFinish() == 0) __Pyx_RaiseNeedMoreValuesError(index); + __PYX_ERR(0, 682, __pyx_L74_error) + __pyx_L94_unpacking_done:; + } + __Pyx_DECREF_SET(__pyx_v_flag, __pyx_t_8); + __pyx_t_8 = 0; + __Pyx_DECREF_SET(__pyx_v_breakpoint, __pyx_t_7); + __pyx_t_7 = 0; + __pyx_v_new_frame = __pyx_t_4; + __pyx_t_4 = 0; + __Pyx_DECREF_SET(__pyx_v_bp_type, __pyx_t_1); + __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":680 + * elif plugin_manager is not None and main_debugger.has_plugin_line_breaks: + * result = plugin_manager.get_breakpoint(main_debugger, self, frame, event, self._args) + * if result: # <<<<<<<<<<<<<< + * exist_result = True + * flag, breakpoint, new_frame, bp_type = result + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":678 + * if step_cmd == CMD_STEP_OVER and stop_frame is frame and (is_line or is_return): + * stop = False #we don't stop on breakpoint if we have to stop by step-over (it will be processed later) + * elif plugin_manager is not None and main_debugger.has_plugin_line_breaks: # <<<<<<<<<<<<<< + * result = plugin_manager.get_breakpoint(main_debugger, self, frame, event, self._args) + * if result: + */ + } + __pyx_L80:; + + /* "_pydevd_bundle/pydevd_cython.pyx":684 + * flag, breakpoint, new_frame, bp_type = result + * + * if breakpoint: # <<<<<<<<<<<<<< + * #ok, hit breakpoint, now, we have to discover if it is a conditional breakpoint + * # lets do the conditional stuff here + */ + __pyx_t_10 = __Pyx_PyObject_IsTrue(__pyx_v_breakpoint); if (unlikely(__pyx_t_10 < 0)) __PYX_ERR(0, 684, __pyx_L74_error) + if (__pyx_t_10) { + + /* "_pydevd_bundle/pydevd_cython.pyx":687 + * #ok, hit breakpoint, now, we have to discover if it is a conditional breakpoint + * # lets do the conditional stuff here + * if stop or exist_result: # <<<<<<<<<<<<<< + * condition = breakpoint.condition + * if condition is not None: + */ + __pyx_t_12 = __Pyx_PyObject_IsTrue(__pyx_v_stop); if (unlikely(__pyx_t_12 < 0)) __PYX_ERR(0, 687, __pyx_L74_error) + if (!__pyx_t_12) { + } else { + __pyx_t_10 = __pyx_t_12; + goto __pyx_L97_bool_binop_done; + } + __pyx_t_12 = (__pyx_v_exist_result != 0); + __pyx_t_10 = __pyx_t_12; + __pyx_L97_bool_binop_done:; + if (__pyx_t_10) { + + /* "_pydevd_bundle/pydevd_cython.pyx":688 + * # lets do the conditional stuff here + * if stop or exist_result: + * condition = breakpoint.condition # <<<<<<<<<<<<<< + * if condition is not None: + * result = handle_breakpoint_condition(main_debugger, info, breakpoint, new_frame, + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_breakpoint, __pyx_n_s_condition); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 688, __pyx_L74_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v_condition = __pyx_t_1; + __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":689 + * if stop or exist_result: + * condition = breakpoint.condition + * if condition is not None: # <<<<<<<<<<<<<< + * result = handle_breakpoint_condition(main_debugger, info, breakpoint, new_frame, + * self.trace_dispatch) + */ + __pyx_t_10 = (__pyx_v_condition != Py_None); + __pyx_t_12 = (__pyx_t_10 != 0); + if (__pyx_t_12) { + + /* "_pydevd_bundle/pydevd_cython.pyx":690 + * condition = breakpoint.condition + * if condition is not None: + * result = handle_breakpoint_condition(main_debugger, info, breakpoint, new_frame, # <<<<<<<<<<<<<< + * self.trace_dispatch) + * if result is not None: + */ + __pyx_t_4 = __Pyx_GetModuleGlobalName(__pyx_n_s_handle_breakpoint_condition); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 690, __pyx_L74_error) + __Pyx_GOTREF(__pyx_t_4); + if (unlikely(!__pyx_v_new_frame)) { __Pyx_RaiseUnboundLocalError("new_frame"); __PYX_ERR(0, 690, __pyx_L74_error) } + + /* "_pydevd_bundle/pydevd_cython.pyx":691 + * if condition is not None: + * result = handle_breakpoint_condition(main_debugger, info, breakpoint, new_frame, + * self.trace_dispatch) # <<<<<<<<<<<<<< + * if result is not None: + * return result + */ + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_trace_dispatch); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 691, __pyx_L74_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_8 = NULL; + __pyx_t_5 = 0; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_4))) { + __pyx_t_8 = PyMethod_GET_SELF(__pyx_t_4); + if (likely(__pyx_t_8)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); + __Pyx_INCREF(__pyx_t_8); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_4, function); + __pyx_t_5 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_4)) { + PyObject *__pyx_temp[6] = {__pyx_t_8, __pyx_v_main_debugger, ((PyObject *)__pyx_v_info), __pyx_v_breakpoint, __pyx_v_new_frame, __pyx_t_7}; + __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_5, 5+__pyx_t_5); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 690, __pyx_L74_error) + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_4)) { + PyObject *__pyx_temp[6] = {__pyx_t_8, __pyx_v_main_debugger, ((PyObject *)__pyx_v_info), __pyx_v_breakpoint, __pyx_v_new_frame, __pyx_t_7}; + __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_5, 5+__pyx_t_5); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 690, __pyx_L74_error) + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + } else + #endif + { + __pyx_t_6 = PyTuple_New(5+__pyx_t_5); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 690, __pyx_L74_error) + __Pyx_GOTREF(__pyx_t_6); + if (__pyx_t_8) { + __Pyx_GIVEREF(__pyx_t_8); PyTuple_SET_ITEM(__pyx_t_6, 0, __pyx_t_8); __pyx_t_8 = NULL; + } + __Pyx_INCREF(__pyx_v_main_debugger); + __Pyx_GIVEREF(__pyx_v_main_debugger); + PyTuple_SET_ITEM(__pyx_t_6, 0+__pyx_t_5, __pyx_v_main_debugger); + __Pyx_INCREF(((PyObject *)__pyx_v_info)); + __Pyx_GIVEREF(((PyObject *)__pyx_v_info)); + PyTuple_SET_ITEM(__pyx_t_6, 1+__pyx_t_5, ((PyObject *)__pyx_v_info)); + __Pyx_INCREF(__pyx_v_breakpoint); + __Pyx_GIVEREF(__pyx_v_breakpoint); + PyTuple_SET_ITEM(__pyx_t_6, 2+__pyx_t_5, __pyx_v_breakpoint); + __Pyx_INCREF(__pyx_v_new_frame); + __Pyx_GIVEREF(__pyx_v_new_frame); + PyTuple_SET_ITEM(__pyx_t_6, 3+__pyx_t_5, __pyx_v_new_frame); + __Pyx_GIVEREF(__pyx_t_7); + PyTuple_SET_ITEM(__pyx_t_6, 4+__pyx_t_5, __pyx_t_7); + __pyx_t_7 = 0; + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_4, __pyx_t_6, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 690, __pyx_L74_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + } + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_XDECREF_SET(__pyx_v_result, __pyx_t_1); + __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":692 + * result = handle_breakpoint_condition(main_debugger, info, breakpoint, new_frame, + * self.trace_dispatch) + * if result is not None: # <<<<<<<<<<<<<< + * return result + * + */ + __pyx_t_12 = (__pyx_v_result != Py_None); + __pyx_t_10 = (__pyx_t_12 != 0); + if (__pyx_t_10) { + + /* "_pydevd_bundle/pydevd_cython.pyx":693 + * self.trace_dispatch) + * if result is not None: + * return result # <<<<<<<<<<<<<< + * + * if breakpoint.expression is not None: + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_result); + __pyx_r = __pyx_v_result; + goto __pyx_L78_try_return; + + /* "_pydevd_bundle/pydevd_cython.pyx":692 + * result = handle_breakpoint_condition(main_debugger, info, breakpoint, new_frame, + * self.trace_dispatch) + * if result is not None: # <<<<<<<<<<<<<< + * return result + * + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":689 + * if stop or exist_result: + * condition = breakpoint.condition + * if condition is not None: # <<<<<<<<<<<<<< + * result = handle_breakpoint_condition(main_debugger, info, breakpoint, new_frame, + * self.trace_dispatch) + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":695 + * return result + * + * if breakpoint.expression is not None: # <<<<<<<<<<<<<< + * handle_breakpoint_expression(breakpoint, info, new_frame) + * + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_breakpoint, __pyx_n_s_expression); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 695, __pyx_L74_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_10 = (__pyx_t_1 != Py_None); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_12 = (__pyx_t_10 != 0); + if (__pyx_t_12) { + + /* "_pydevd_bundle/pydevd_cython.pyx":696 + * + * if breakpoint.expression is not None: + * handle_breakpoint_expression(breakpoint, info, new_frame) # <<<<<<<<<<<<<< + * + * if not main_debugger.first_breakpoint_reached: + */ + __pyx_t_4 = __Pyx_GetModuleGlobalName(__pyx_n_s_handle_breakpoint_expression); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 696, __pyx_L74_error) + __Pyx_GOTREF(__pyx_t_4); + if (unlikely(!__pyx_v_new_frame)) { __Pyx_RaiseUnboundLocalError("new_frame"); __PYX_ERR(0, 696, __pyx_L74_error) } + __pyx_t_6 = NULL; + __pyx_t_5 = 0; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_4))) { + __pyx_t_6 = PyMethod_GET_SELF(__pyx_t_4); + if (likely(__pyx_t_6)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); + __Pyx_INCREF(__pyx_t_6); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_4, function); + __pyx_t_5 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_4)) { + PyObject *__pyx_temp[4] = {__pyx_t_6, __pyx_v_breakpoint, ((PyObject *)__pyx_v_info), __pyx_v_new_frame}; + __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_5, 3+__pyx_t_5); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 696, __pyx_L74_error) + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_GOTREF(__pyx_t_1); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_4)) { + PyObject *__pyx_temp[4] = {__pyx_t_6, __pyx_v_breakpoint, ((PyObject *)__pyx_v_info), __pyx_v_new_frame}; + __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_4, __pyx_temp+1-__pyx_t_5, 3+__pyx_t_5); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 696, __pyx_L74_error) + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_GOTREF(__pyx_t_1); + } else + #endif + { + __pyx_t_7 = PyTuple_New(3+__pyx_t_5); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 696, __pyx_L74_error) + __Pyx_GOTREF(__pyx_t_7); + if (__pyx_t_6) { + __Pyx_GIVEREF(__pyx_t_6); PyTuple_SET_ITEM(__pyx_t_7, 0, __pyx_t_6); __pyx_t_6 = NULL; + } + __Pyx_INCREF(__pyx_v_breakpoint); + __Pyx_GIVEREF(__pyx_v_breakpoint); + PyTuple_SET_ITEM(__pyx_t_7, 0+__pyx_t_5, __pyx_v_breakpoint); + __Pyx_INCREF(((PyObject *)__pyx_v_info)); + __Pyx_GIVEREF(((PyObject *)__pyx_v_info)); + PyTuple_SET_ITEM(__pyx_t_7, 1+__pyx_t_5, ((PyObject *)__pyx_v_info)); + __Pyx_INCREF(__pyx_v_new_frame); + __Pyx_GIVEREF(__pyx_v_new_frame); + PyTuple_SET_ITEM(__pyx_t_7, 2+__pyx_t_5, __pyx_v_new_frame); + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_4, __pyx_t_7, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 696, __pyx_L74_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + } + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":695 + * return result + * + * if breakpoint.expression is not None: # <<<<<<<<<<<<<< + * handle_breakpoint_expression(breakpoint, info, new_frame) + * + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":698 + * handle_breakpoint_expression(breakpoint, info, new_frame) + * + * if not main_debugger.first_breakpoint_reached: # <<<<<<<<<<<<<< + * if is_call: + * back = frame.f_back + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_first_breakpoint_reached); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 698, __pyx_L74_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_12 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_12 < 0)) __PYX_ERR(0, 698, __pyx_L74_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_10 = ((!__pyx_t_12) != 0); + if (__pyx_t_10) { + + /* "_pydevd_bundle/pydevd_cython.pyx":699 + * + * if not main_debugger.first_breakpoint_reached: + * if is_call: # <<<<<<<<<<<<<< + * back = frame.f_back + * if back is not None: + */ + __pyx_t_10 = (__pyx_v_is_call != 0); + if (__pyx_t_10) { + + /* "_pydevd_bundle/pydevd_cython.pyx":700 + * if not main_debugger.first_breakpoint_reached: + * if is_call: + * back = frame.f_back # <<<<<<<<<<<<<< + * if back is not None: + * # When we start debug session, we call execfile in pydevd run function. It produces an additional + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_back); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 700, __pyx_L74_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v_back = __pyx_t_1; + __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":701 + * if is_call: + * back = frame.f_back + * if back is not None: # <<<<<<<<<<<<<< + * # When we start debug session, we call execfile in pydevd run function. It produces an additional + * # 'call' event for tracing and we stop on the first line of code twice. + */ + __pyx_t_10 = (__pyx_v_back != Py_None); + __pyx_t_12 = (__pyx_t_10 != 0); + if (__pyx_t_12) { + + /* "_pydevd_bundle/pydevd_cython.pyx":704 + * # When we start debug session, we call execfile in pydevd run function. It produces an additional + * # 'call' event for tracing and we stop on the first line of code twice. + * _, back_filename, base = get_abs_path_real_path_and_base_from_frame(back) # <<<<<<<<<<<<<< + * if (base == DEBUG_START[0] and back.f_code.co_name == DEBUG_START[1]) or \ + * (base == DEBUG_START_PY3K[0] and back.f_code.co_name == DEBUG_START_PY3K[1]): + */ + __pyx_t_4 = __Pyx_GetModuleGlobalName(__pyx_n_s_get_abs_path_real_path_and_base); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 704, __pyx_L74_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_7 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_4))) { + __pyx_t_7 = PyMethod_GET_SELF(__pyx_t_4); + if (likely(__pyx_t_7)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); + __Pyx_INCREF(__pyx_t_7); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_4, function); + } + } + if (!__pyx_t_7) { + __pyx_t_1 = __Pyx_PyObject_CallOneArg(__pyx_t_4, __pyx_v_back); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 704, __pyx_L74_error) + __Pyx_GOTREF(__pyx_t_1); + } else { + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_4)) { + PyObject *__pyx_temp[2] = {__pyx_t_7, __pyx_v_back}; + __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_4, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 704, __pyx_L74_error) + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_GOTREF(__pyx_t_1); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_4)) { + PyObject *__pyx_temp[2] = {__pyx_t_7, __pyx_v_back}; + __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_4, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 704, __pyx_L74_error) + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_GOTREF(__pyx_t_1); + } else + #endif + { + __pyx_t_6 = PyTuple_New(1+1); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 704, __pyx_L74_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_GIVEREF(__pyx_t_7); PyTuple_SET_ITEM(__pyx_t_6, 0, __pyx_t_7); __pyx_t_7 = NULL; + __Pyx_INCREF(__pyx_v_back); + __Pyx_GIVEREF(__pyx_v_back); + PyTuple_SET_ITEM(__pyx_t_6, 0+1, __pyx_v_back); + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_4, __pyx_t_6, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 704, __pyx_L74_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + } + } + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if ((likely(PyTuple_CheckExact(__pyx_t_1))) || (PyList_CheckExact(__pyx_t_1))) { + PyObject* sequence = __pyx_t_1; + #if !CYTHON_COMPILING_IN_PYPY + Py_ssize_t size = Py_SIZE(sequence); + #else + Py_ssize_t size = PySequence_Size(sequence); + #endif + if (unlikely(size != 3)) { + if (size > 3) __Pyx_RaiseTooManyValuesError(3); + else if (size >= 0) __Pyx_RaiseNeedMoreValuesError(size); + __PYX_ERR(0, 704, __pyx_L74_error) + } + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + if (likely(PyTuple_CheckExact(sequence))) { + __pyx_t_4 = PyTuple_GET_ITEM(sequence, 0); + __pyx_t_6 = PyTuple_GET_ITEM(sequence, 1); + __pyx_t_7 = PyTuple_GET_ITEM(sequence, 2); + } else { + __pyx_t_4 = PyList_GET_ITEM(sequence, 0); + __pyx_t_6 = PyList_GET_ITEM(sequence, 1); + __pyx_t_7 = PyList_GET_ITEM(sequence, 2); + } + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(__pyx_t_6); + __Pyx_INCREF(__pyx_t_7); + #else + __pyx_t_4 = PySequence_ITEM(sequence, 0); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 704, __pyx_L74_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_6 = PySequence_ITEM(sequence, 1); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 704, __pyx_L74_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_7 = PySequence_ITEM(sequence, 2); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 704, __pyx_L74_error) + __Pyx_GOTREF(__pyx_t_7); + #endif + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + } else { + Py_ssize_t index = -1; + __pyx_t_8 = PyObject_GetIter(__pyx_t_1); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 704, __pyx_L74_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_11 = Py_TYPE(__pyx_t_8)->tp_iternext; + index = 0; __pyx_t_4 = __pyx_t_11(__pyx_t_8); if (unlikely(!__pyx_t_4)) goto __pyx_L105_unpacking_failed; + __Pyx_GOTREF(__pyx_t_4); + index = 1; __pyx_t_6 = __pyx_t_11(__pyx_t_8); if (unlikely(!__pyx_t_6)) goto __pyx_L105_unpacking_failed; + __Pyx_GOTREF(__pyx_t_6); + index = 2; __pyx_t_7 = __pyx_t_11(__pyx_t_8); if (unlikely(!__pyx_t_7)) goto __pyx_L105_unpacking_failed; + __Pyx_GOTREF(__pyx_t_7); + if (__Pyx_IternextUnpackEndCheck(__pyx_t_11(__pyx_t_8), 3) < 0) __PYX_ERR(0, 704, __pyx_L74_error) + __pyx_t_11 = NULL; + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + goto __pyx_L106_unpacking_done; + __pyx_L105_unpacking_failed:; + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __pyx_t_11 = NULL; + if (__Pyx_IterFinish() == 0) __Pyx_RaiseNeedMoreValuesError(index); + __PYX_ERR(0, 704, __pyx_L74_error) + __pyx_L106_unpacking_done:; + } + __pyx_v__ = __pyx_t_4; + __pyx_t_4 = 0; + __pyx_v_back_filename = __pyx_t_6; + __pyx_t_6 = 0; + __pyx_v_base = __pyx_t_7; + __pyx_t_7 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":705 + * # 'call' event for tracing and we stop on the first line of code twice. + * _, back_filename, base = get_abs_path_real_path_and_base_from_frame(back) + * if (base == DEBUG_START[0] and back.f_code.co_name == DEBUG_START[1]) or \ # <<<<<<<<<<<<<< + * (base == DEBUG_START_PY3K[0] and back.f_code.co_name == DEBUG_START_PY3K[1]): + * stop = False + */ + __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_DEBUG_START); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 705, __pyx_L74_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_7 = __Pyx_GetItemInt(__pyx_t_1, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 705, __pyx_L74_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = PyObject_RichCompare(__pyx_v_base, __pyx_t_7, Py_EQ); __Pyx_XGOTREF(__pyx_t_1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 705, __pyx_L74_error) + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __pyx_t_10 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_10 < 0)) __PYX_ERR(0, 705, __pyx_L74_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + if (!__pyx_t_10) { + goto __pyx_L109_next_or; + } else { + } + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_back, __pyx_n_s_f_code); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 705, __pyx_L74_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_co_name); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 705, __pyx_L74_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_DEBUG_START); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 705, __pyx_L74_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_6 = __Pyx_GetItemInt(__pyx_t_1, 1, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 705, __pyx_L74_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = PyObject_RichCompare(__pyx_t_7, __pyx_t_6, Py_EQ); __Pyx_XGOTREF(__pyx_t_1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 705, __pyx_L74_error) + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __pyx_t_10 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_10 < 0)) __PYX_ERR(0, 705, __pyx_L74_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + if (!__pyx_t_10) { + } else { + __pyx_t_12 = __pyx_t_10; + goto __pyx_L108_bool_binop_done; + } + __pyx_L109_next_or:; + + /* "_pydevd_bundle/pydevd_cython.pyx":706 + * _, back_filename, base = get_abs_path_real_path_and_base_from_frame(back) + * if (base == DEBUG_START[0] and back.f_code.co_name == DEBUG_START[1]) or \ + * (base == DEBUG_START_PY3K[0] and back.f_code.co_name == DEBUG_START_PY3K[1]): # <<<<<<<<<<<<<< + * stop = False + * main_debugger.first_breakpoint_reached = True + */ + __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_DEBUG_START_PY3K); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 706, __pyx_L74_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_6 = __Pyx_GetItemInt(__pyx_t_1, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 706, __pyx_L74_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = PyObject_RichCompare(__pyx_v_base, __pyx_t_6, Py_EQ); __Pyx_XGOTREF(__pyx_t_1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 706, __pyx_L74_error) + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __pyx_t_10 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_10 < 0)) __PYX_ERR(0, 706, __pyx_L74_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + if (__pyx_t_10) { + } else { + __pyx_t_12 = __pyx_t_10; + goto __pyx_L108_bool_binop_done; + } + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_back, __pyx_n_s_f_code); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 706, __pyx_L74_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_co_name); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 706, __pyx_L74_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_DEBUG_START_PY3K); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 706, __pyx_L74_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_7 = __Pyx_GetItemInt(__pyx_t_1, 1, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 706, __pyx_L74_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = PyObject_RichCompare(__pyx_t_6, __pyx_t_7, Py_EQ); __Pyx_XGOTREF(__pyx_t_1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 706, __pyx_L74_error) + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __pyx_t_10 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_10 < 0)) __PYX_ERR(0, 706, __pyx_L74_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_12 = __pyx_t_10; + __pyx_L108_bool_binop_done:; + + /* "_pydevd_bundle/pydevd_cython.pyx":705 + * # 'call' event for tracing and we stop on the first line of code twice. + * _, back_filename, base = get_abs_path_real_path_and_base_from_frame(back) + * if (base == DEBUG_START[0] and back.f_code.co_name == DEBUG_START[1]) or \ # <<<<<<<<<<<<<< + * (base == DEBUG_START_PY3K[0] and back.f_code.co_name == DEBUG_START_PY3K[1]): + * stop = False + */ + if (__pyx_t_12) { + + /* "_pydevd_bundle/pydevd_cython.pyx":707 + * if (base == DEBUG_START[0] and back.f_code.co_name == DEBUG_START[1]) or \ + * (base == DEBUG_START_PY3K[0] and back.f_code.co_name == DEBUG_START_PY3K[1]): + * stop = False # <<<<<<<<<<<<<< + * main_debugger.first_breakpoint_reached = True + * else: + */ + __Pyx_INCREF(Py_False); + __Pyx_DECREF_SET(__pyx_v_stop, Py_False); + + /* "_pydevd_bundle/pydevd_cython.pyx":708 + * (base == DEBUG_START_PY3K[0] and back.f_code.co_name == DEBUG_START_PY3K[1]): + * stop = False + * main_debugger.first_breakpoint_reached = True # <<<<<<<<<<<<<< + * else: + * # if the frame is traced after breakpoint stop, + */ + if (__Pyx_PyObject_SetAttrStr(__pyx_v_main_debugger, __pyx_n_s_first_breakpoint_reached, Py_True) < 0) __PYX_ERR(0, 708, __pyx_L74_error) + + /* "_pydevd_bundle/pydevd_cython.pyx":705 + * # 'call' event for tracing and we stop on the first line of code twice. + * _, back_filename, base = get_abs_path_real_path_and_base_from_frame(back) + * if (base == DEBUG_START[0] and back.f_code.co_name == DEBUG_START[1]) or \ # <<<<<<<<<<<<<< + * (base == DEBUG_START_PY3K[0] and back.f_code.co_name == DEBUG_START_PY3K[1]): + * stop = False + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":701 + * if is_call: + * back = frame.f_back + * if back is not None: # <<<<<<<<<<<<<< + * # When we start debug session, we call execfile in pydevd run function. It produces an additional + * # 'call' event for tracing and we stop on the first line of code twice. + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":699 + * + * if not main_debugger.first_breakpoint_reached: + * if is_call: # <<<<<<<<<<<<<< + * back = frame.f_back + * if back is not None: + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":698 + * handle_breakpoint_expression(breakpoint, info, new_frame) + * + * if not main_debugger.first_breakpoint_reached: # <<<<<<<<<<<<<< + * if is_call: + * back = frame.f_back + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":687 + * #ok, hit breakpoint, now, we have to discover if it is a conditional breakpoint + * # lets do the conditional stuff here + * if stop or exist_result: # <<<<<<<<<<<<<< + * condition = breakpoint.condition + * if condition is not None: + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":684 + * flag, breakpoint, new_frame, bp_type = result + * + * if breakpoint: # <<<<<<<<<<<<<< + * #ok, hit breakpoint, now, we have to discover if it is a conditional breakpoint + * # lets do the conditional stuff here + */ + goto __pyx_L95; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":712 + * # if the frame is traced after breakpoint stop, + * # but the file should be ignored while stepping because of filters + * if step_cmd != -1: # <<<<<<<<<<<<<< + * if main_debugger.is_filter_enabled and main_debugger.is_ignored_by_filters(filename): + * # ignore files matching stepping filters + */ + /*else*/ { + __pyx_t_12 = ((__pyx_v_step_cmd != -1L) != 0); + if (__pyx_t_12) { + + /* "_pydevd_bundle/pydevd_cython.pyx":713 + * # but the file should be ignored while stepping because of filters + * if step_cmd != -1: + * if main_debugger.is_filter_enabled and main_debugger.is_ignored_by_filters(filename): # <<<<<<<<<<<<<< + * # ignore files matching stepping filters + * return self.trace_dispatch + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_is_filter_enabled); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 713, __pyx_L74_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_10 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_10 < 0)) __PYX_ERR(0, 713, __pyx_L74_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + if (__pyx_t_10) { + } else { + __pyx_t_12 = __pyx_t_10; + goto __pyx_L114_bool_binop_done; + } + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_is_ignored_by_filters); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 713, __pyx_L74_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_6 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_7))) { + __pyx_t_6 = PyMethod_GET_SELF(__pyx_t_7); + if (likely(__pyx_t_6)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_7); + __Pyx_INCREF(__pyx_t_6); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_7, function); + } + } + if (!__pyx_t_6) { + __pyx_t_1 = __Pyx_PyObject_CallOneArg(__pyx_t_7, __pyx_v_filename); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 713, __pyx_L74_error) + __Pyx_GOTREF(__pyx_t_1); + } else { + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_7)) { + PyObject *__pyx_temp[2] = {__pyx_t_6, __pyx_v_filename}; + __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_7, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 713, __pyx_L74_error) + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_GOTREF(__pyx_t_1); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_7)) { + PyObject *__pyx_temp[2] = {__pyx_t_6, __pyx_v_filename}; + __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_7, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 713, __pyx_L74_error) + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_GOTREF(__pyx_t_1); + } else + #endif + { + __pyx_t_4 = PyTuple_New(1+1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 713, __pyx_L74_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_GIVEREF(__pyx_t_6); PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_6); __pyx_t_6 = NULL; + __Pyx_INCREF(__pyx_v_filename); + __Pyx_GIVEREF(__pyx_v_filename); + PyTuple_SET_ITEM(__pyx_t_4, 0+1, __pyx_v_filename); + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_7, __pyx_t_4, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 713, __pyx_L74_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + } + } + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __pyx_t_10 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_10 < 0)) __PYX_ERR(0, 713, __pyx_L74_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_12 = __pyx_t_10; + __pyx_L114_bool_binop_done:; + if (__pyx_t_12) { + + /* "_pydevd_bundle/pydevd_cython.pyx":715 + * if main_debugger.is_filter_enabled and main_debugger.is_ignored_by_filters(filename): + * # ignore files matching stepping filters + * return self.trace_dispatch # <<<<<<<<<<<<<< + * if main_debugger.is_filter_libraries and main_debugger.not_in_scope(filename): + * # ignore library files while stepping + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_trace_dispatch); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 715, __pyx_L74_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L78_try_return; + + /* "_pydevd_bundle/pydevd_cython.pyx":713 + * # but the file should be ignored while stepping because of filters + * if step_cmd != -1: + * if main_debugger.is_filter_enabled and main_debugger.is_ignored_by_filters(filename): # <<<<<<<<<<<<<< + * # ignore files matching stepping filters + * return self.trace_dispatch + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":716 + * # ignore files matching stepping filters + * return self.trace_dispatch + * if main_debugger.is_filter_libraries and main_debugger.not_in_scope(filename): # <<<<<<<<<<<<<< + * # ignore library files while stepping + * return self.trace_dispatch + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_is_filter_libraries); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 716, __pyx_L74_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_10 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_10 < 0)) __PYX_ERR(0, 716, __pyx_L74_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + if (__pyx_t_10) { + } else { + __pyx_t_12 = __pyx_t_10; + goto __pyx_L117_bool_binop_done; + } + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_not_in_scope); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 716, __pyx_L74_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_4 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_7))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_7); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_7); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_7, function); + } + } + if (!__pyx_t_4) { + __pyx_t_1 = __Pyx_PyObject_CallOneArg(__pyx_t_7, __pyx_v_filename); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 716, __pyx_L74_error) + __Pyx_GOTREF(__pyx_t_1); + } else { + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_7)) { + PyObject *__pyx_temp[2] = {__pyx_t_4, __pyx_v_filename}; + __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_7, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 716, __pyx_L74_error) + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_GOTREF(__pyx_t_1); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_7)) { + PyObject *__pyx_temp[2] = {__pyx_t_4, __pyx_v_filename}; + __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_7, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 716, __pyx_L74_error) + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_GOTREF(__pyx_t_1); + } else + #endif + { + __pyx_t_6 = PyTuple_New(1+1); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 716, __pyx_L74_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_GIVEREF(__pyx_t_4); PyTuple_SET_ITEM(__pyx_t_6, 0, __pyx_t_4); __pyx_t_4 = NULL; + __Pyx_INCREF(__pyx_v_filename); + __Pyx_GIVEREF(__pyx_v_filename); + PyTuple_SET_ITEM(__pyx_t_6, 0+1, __pyx_v_filename); + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_7, __pyx_t_6, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 716, __pyx_L74_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + } + } + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __pyx_t_10 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_10 < 0)) __PYX_ERR(0, 716, __pyx_L74_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_12 = __pyx_t_10; + __pyx_L117_bool_binop_done:; + if (__pyx_t_12) { + + /* "_pydevd_bundle/pydevd_cython.pyx":718 + * if main_debugger.is_filter_libraries and main_debugger.not_in_scope(filename): + * # ignore library files while stepping + * return self.trace_dispatch # <<<<<<<<<<<<<< + * + * if main_debugger.show_return_values: + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_trace_dispatch); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 718, __pyx_L74_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L78_try_return; + + /* "_pydevd_bundle/pydevd_cython.pyx":716 + * # ignore files matching stepping filters + * return self.trace_dispatch + * if main_debugger.is_filter_libraries and main_debugger.not_in_scope(filename): # <<<<<<<<<<<<<< + * # ignore library files while stepping + * return self.trace_dispatch + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":712 + * # if the frame is traced after breakpoint stop, + * # but the file should be ignored while stepping because of filters + * if step_cmd != -1: # <<<<<<<<<<<<<< + * if main_debugger.is_filter_enabled and main_debugger.is_ignored_by_filters(filename): + * # ignore files matching stepping filters + */ + } + } + __pyx_L95:; + + /* "_pydevd_bundle/pydevd_cython.pyx":720 + * return self.trace_dispatch + * + * if main_debugger.show_return_values: # <<<<<<<<<<<<<< + * if is_return and info.pydev_step_cmd == CMD_STEP_OVER and frame.f_back == info.pydev_step_stop: + * self.show_return_values(frame, arg) + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_show_return_values); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 720, __pyx_L74_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_12 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_12 < 0)) __PYX_ERR(0, 720, __pyx_L74_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + if (__pyx_t_12) { + + /* "_pydevd_bundle/pydevd_cython.pyx":721 + * + * if main_debugger.show_return_values: + * if is_return and info.pydev_step_cmd == CMD_STEP_OVER and frame.f_back == info.pydev_step_stop: # <<<<<<<<<<<<<< + * self.show_return_values(frame, arg) + * + */ + __pyx_t_10 = (__pyx_v_is_return != 0); + if (__pyx_t_10) { + } else { + __pyx_t_12 = __pyx_t_10; + goto __pyx_L121_bool_binop_done; + } + __pyx_t_1 = __Pyx_PyInt_From_int(__pyx_v_info->pydev_step_cmd); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 721, __pyx_L74_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_7 = __Pyx_GetModuleGlobalName(__pyx_n_s_CMD_STEP_OVER); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 721, __pyx_L74_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_6 = PyObject_RichCompare(__pyx_t_1, __pyx_t_7, Py_EQ); __Pyx_XGOTREF(__pyx_t_6); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 721, __pyx_L74_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __pyx_t_10 = __Pyx_PyObject_IsTrue(__pyx_t_6); if (unlikely(__pyx_t_10 < 0)) __PYX_ERR(0, 721, __pyx_L74_error) + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + if (__pyx_t_10) { + } else { + __pyx_t_12 = __pyx_t_10; + goto __pyx_L121_bool_binop_done; + } + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_back); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 721, __pyx_L74_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_7 = PyObject_RichCompare(__pyx_t_6, __pyx_v_info->pydev_step_stop, Py_EQ); __Pyx_XGOTREF(__pyx_t_7); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 721, __pyx_L74_error) + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __pyx_t_10 = __Pyx_PyObject_IsTrue(__pyx_t_7); if (unlikely(__pyx_t_10 < 0)) __PYX_ERR(0, 721, __pyx_L74_error) + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __pyx_t_12 = __pyx_t_10; + __pyx_L121_bool_binop_done:; + if (__pyx_t_12) { + + /* "_pydevd_bundle/pydevd_cython.pyx":722 + * if main_debugger.show_return_values: + * if is_return and info.pydev_step_cmd == CMD_STEP_OVER and frame.f_back == info.pydev_step_stop: + * self.show_return_values(frame, arg) # <<<<<<<<<<<<<< + * + * elif main_debugger.remove_return_values_flag: + */ + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_show_return_values); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 722, __pyx_L74_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_1 = NULL; + __pyx_t_5 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_6))) { + __pyx_t_1 = PyMethod_GET_SELF(__pyx_t_6); + if (likely(__pyx_t_1)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_6); + __Pyx_INCREF(__pyx_t_1); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_6, function); + __pyx_t_5 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_6)) { + PyObject *__pyx_temp[3] = {__pyx_t_1, __pyx_v_frame, __pyx_v_arg}; + __pyx_t_7 = __Pyx_PyFunction_FastCall(__pyx_t_6, __pyx_temp+1-__pyx_t_5, 2+__pyx_t_5); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 722, __pyx_L74_error) + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_GOTREF(__pyx_t_7); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_6)) { + PyObject *__pyx_temp[3] = {__pyx_t_1, __pyx_v_frame, __pyx_v_arg}; + __pyx_t_7 = __Pyx_PyCFunction_FastCall(__pyx_t_6, __pyx_temp+1-__pyx_t_5, 2+__pyx_t_5); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 722, __pyx_L74_error) + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_GOTREF(__pyx_t_7); + } else + #endif + { + __pyx_t_4 = PyTuple_New(2+__pyx_t_5); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 722, __pyx_L74_error) + __Pyx_GOTREF(__pyx_t_4); + if (__pyx_t_1) { + __Pyx_GIVEREF(__pyx_t_1); PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_1); __pyx_t_1 = NULL; + } + __Pyx_INCREF(__pyx_v_frame); + __Pyx_GIVEREF(__pyx_v_frame); + PyTuple_SET_ITEM(__pyx_t_4, 0+__pyx_t_5, __pyx_v_frame); + __Pyx_INCREF(__pyx_v_arg); + __Pyx_GIVEREF(__pyx_v_arg); + PyTuple_SET_ITEM(__pyx_t_4, 1+__pyx_t_5, __pyx_v_arg); + __pyx_t_7 = __Pyx_PyObject_Call(__pyx_t_6, __pyx_t_4, NULL); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 722, __pyx_L74_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + } + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":721 + * + * if main_debugger.show_return_values: + * if is_return and info.pydev_step_cmd == CMD_STEP_OVER and frame.f_back == info.pydev_step_stop: # <<<<<<<<<<<<<< + * self.show_return_values(frame, arg) + * + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":720 + * return self.trace_dispatch + * + * if main_debugger.show_return_values: # <<<<<<<<<<<<<< + * if is_return and info.pydev_step_cmd == CMD_STEP_OVER and frame.f_back == info.pydev_step_stop: + * self.show_return_values(frame, arg) + */ + goto __pyx_L119; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":724 + * self.show_return_values(frame, arg) + * + * elif main_debugger.remove_return_values_flag: # <<<<<<<<<<<<<< + * try: + * self.remove_return_values(main_debugger, frame) + */ + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_remove_return_values_flag); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 724, __pyx_L74_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_12 = __Pyx_PyObject_IsTrue(__pyx_t_7); if (unlikely(__pyx_t_12 < 0)) __PYX_ERR(0, 724, __pyx_L74_error) + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + if (__pyx_t_12) { + + /* "_pydevd_bundle/pydevd_cython.pyx":725 + * + * elif main_debugger.remove_return_values_flag: + * try: # <<<<<<<<<<<<<< + * self.remove_return_values(main_debugger, frame) + * finally: + */ + /*try:*/ { + + /* "_pydevd_bundle/pydevd_cython.pyx":726 + * elif main_debugger.remove_return_values_flag: + * try: + * self.remove_return_values(main_debugger, frame) # <<<<<<<<<<<<<< + * finally: + * main_debugger.remove_return_values_flag = False + */ + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_remove_return_values); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 726, __pyx_L125_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_4 = NULL; + __pyx_t_5 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_6))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_6); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_6); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_6, function); + __pyx_t_5 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_6)) { + PyObject *__pyx_temp[3] = {__pyx_t_4, __pyx_v_main_debugger, __pyx_v_frame}; + __pyx_t_7 = __Pyx_PyFunction_FastCall(__pyx_t_6, __pyx_temp+1-__pyx_t_5, 2+__pyx_t_5); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 726, __pyx_L125_error) + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_GOTREF(__pyx_t_7); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_6)) { + PyObject *__pyx_temp[3] = {__pyx_t_4, __pyx_v_main_debugger, __pyx_v_frame}; + __pyx_t_7 = __Pyx_PyCFunction_FastCall(__pyx_t_6, __pyx_temp+1-__pyx_t_5, 2+__pyx_t_5); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 726, __pyx_L125_error) + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_GOTREF(__pyx_t_7); + } else + #endif + { + __pyx_t_1 = PyTuple_New(2+__pyx_t_5); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 726, __pyx_L125_error) + __Pyx_GOTREF(__pyx_t_1); + if (__pyx_t_4) { + __Pyx_GIVEREF(__pyx_t_4); PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_t_4); __pyx_t_4 = NULL; + } + __Pyx_INCREF(__pyx_v_main_debugger); + __Pyx_GIVEREF(__pyx_v_main_debugger); + PyTuple_SET_ITEM(__pyx_t_1, 0+__pyx_t_5, __pyx_v_main_debugger); + __Pyx_INCREF(__pyx_v_frame); + __Pyx_GIVEREF(__pyx_v_frame); + PyTuple_SET_ITEM(__pyx_t_1, 1+__pyx_t_5, __pyx_v_frame); + __pyx_t_7 = __Pyx_PyObject_Call(__pyx_t_6, __pyx_t_1, NULL); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 726, __pyx_L125_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + } + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":728 + * self.remove_return_values(main_debugger, frame) + * finally: + * main_debugger.remove_return_values_flag = False # <<<<<<<<<<<<<< + * + * if stop: + */ + /*finally:*/ { + /*normal exit:*/{ + if (__Pyx_PyObject_SetAttrStr(__pyx_v_main_debugger, __pyx_n_s_remove_return_values_flag, Py_False) < 0) __PYX_ERR(0, 728, __pyx_L74_error) + goto __pyx_L126; + } + /*exception exit:*/{ + __Pyx_PyThreadState_declare + __pyx_L125_error:; + __pyx_t_21 = 0; __pyx_t_22 = 0; __pyx_t_23 = 0; __pyx_t_24 = 0; __pyx_t_25 = 0; __pyx_t_26 = 0; + __Pyx_PyThreadState_assign + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_XDECREF(__pyx_t_13); __pyx_t_13 = 0; + if (PY_MAJOR_VERSION >= 3) __Pyx_ExceptionSwap(&__pyx_t_24, &__pyx_t_25, &__pyx_t_26); + if ((PY_MAJOR_VERSION < 3) || unlikely(__Pyx_GetException(&__pyx_t_21, &__pyx_t_22, &__pyx_t_23) < 0)) __Pyx_ErrFetch(&__pyx_t_21, &__pyx_t_22, &__pyx_t_23); + __Pyx_XGOTREF(__pyx_t_21); + __Pyx_XGOTREF(__pyx_t_22); + __Pyx_XGOTREF(__pyx_t_23); + __Pyx_XGOTREF(__pyx_t_24); + __Pyx_XGOTREF(__pyx_t_25); + __Pyx_XGOTREF(__pyx_t_26); + __pyx_t_5 = __pyx_lineno; __pyx_t_19 = __pyx_clineno; __pyx_t_20 = __pyx_filename; + { + if (__Pyx_PyObject_SetAttrStr(__pyx_v_main_debugger, __pyx_n_s_remove_return_values_flag, Py_False) < 0) __PYX_ERR(0, 728, __pyx_L128_error) + } + __Pyx_PyThreadState_assign + if (PY_MAJOR_VERSION >= 3) { + __Pyx_XGIVEREF(__pyx_t_24); + __Pyx_XGIVEREF(__pyx_t_25); + __Pyx_XGIVEREF(__pyx_t_26); + __Pyx_ExceptionReset(__pyx_t_24, __pyx_t_25, __pyx_t_26); + } + __Pyx_XGIVEREF(__pyx_t_21); + __Pyx_XGIVEREF(__pyx_t_22); + __Pyx_XGIVEREF(__pyx_t_23); + __Pyx_ErrRestore(__pyx_t_21, __pyx_t_22, __pyx_t_23); + __pyx_t_21 = 0; __pyx_t_22 = 0; __pyx_t_23 = 0; __pyx_t_24 = 0; __pyx_t_25 = 0; __pyx_t_26 = 0; + __pyx_lineno = __pyx_t_5; __pyx_clineno = __pyx_t_19; __pyx_filename = __pyx_t_20; + goto __pyx_L74_error; + __pyx_L128_error:; + __Pyx_PyThreadState_assign + if (PY_MAJOR_VERSION >= 3) { + __Pyx_XGIVEREF(__pyx_t_24); + __Pyx_XGIVEREF(__pyx_t_25); + __Pyx_XGIVEREF(__pyx_t_26); + __Pyx_ExceptionReset(__pyx_t_24, __pyx_t_25, __pyx_t_26); + } + __Pyx_XDECREF(__pyx_t_21); __pyx_t_21 = 0; + __Pyx_XDECREF(__pyx_t_22); __pyx_t_22 = 0; + __Pyx_XDECREF(__pyx_t_23); __pyx_t_23 = 0; + __pyx_t_24 = 0; __pyx_t_25 = 0; __pyx_t_26 = 0; + goto __pyx_L74_error; + } + __pyx_L126:; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":724 + * self.show_return_values(frame, arg) + * + * elif main_debugger.remove_return_values_flag: # <<<<<<<<<<<<<< + * try: + * self.remove_return_values(main_debugger, frame) + */ + } + __pyx_L119:; + + /* "_pydevd_bundle/pydevd_cython.pyx":730 + * main_debugger.remove_return_values_flag = False + * + * if stop: # <<<<<<<<<<<<<< + * self.set_suspend(thread, CMD_SET_BREAK) + * if breakpoint and breakpoint.suspend_policy == "ALL": + */ + __pyx_t_12 = __Pyx_PyObject_IsTrue(__pyx_v_stop); if (unlikely(__pyx_t_12 < 0)) __PYX_ERR(0, 730, __pyx_L74_error) + if (__pyx_t_12) { + + /* "_pydevd_bundle/pydevd_cython.pyx":731 + * + * if stop: + * self.set_suspend(thread, CMD_SET_BREAK) # <<<<<<<<<<<<<< + * if breakpoint and breakpoint.suspend_policy == "ALL": + * main_debugger.suspend_all_other_threads(thread) + */ + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_set_suspend); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 731, __pyx_L74_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_CMD_SET_BREAK); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 731, __pyx_L74_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_4 = NULL; + __pyx_t_19 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_6))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_6); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_6); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_6, function); + __pyx_t_19 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_6)) { + PyObject *__pyx_temp[3] = {__pyx_t_4, __pyx_v_thread, __pyx_t_1}; + __pyx_t_7 = __Pyx_PyFunction_FastCall(__pyx_t_6, __pyx_temp+1-__pyx_t_19, 2+__pyx_t_19); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 731, __pyx_L74_error) + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_6)) { + PyObject *__pyx_temp[3] = {__pyx_t_4, __pyx_v_thread, __pyx_t_1}; + __pyx_t_7 = __Pyx_PyCFunction_FastCall(__pyx_t_6, __pyx_temp+1-__pyx_t_19, 2+__pyx_t_19); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 731, __pyx_L74_error) + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + } else + #endif + { + __pyx_t_8 = PyTuple_New(2+__pyx_t_19); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 731, __pyx_L74_error) + __Pyx_GOTREF(__pyx_t_8); + if (__pyx_t_4) { + __Pyx_GIVEREF(__pyx_t_4); PyTuple_SET_ITEM(__pyx_t_8, 0, __pyx_t_4); __pyx_t_4 = NULL; + } + __Pyx_INCREF(__pyx_v_thread); + __Pyx_GIVEREF(__pyx_v_thread); + PyTuple_SET_ITEM(__pyx_t_8, 0+__pyx_t_19, __pyx_v_thread); + __Pyx_GIVEREF(__pyx_t_1); + PyTuple_SET_ITEM(__pyx_t_8, 1+__pyx_t_19, __pyx_t_1); + __pyx_t_1 = 0; + __pyx_t_7 = __Pyx_PyObject_Call(__pyx_t_6, __pyx_t_8, NULL); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 731, __pyx_L74_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + } + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":732 + * if stop: + * self.set_suspend(thread, CMD_SET_BREAK) + * if breakpoint and breakpoint.suspend_policy == "ALL": # <<<<<<<<<<<<<< + * main_debugger.suspend_all_other_threads(thread) + * elif flag and plugin_manager is not None: + */ + __pyx_t_10 = __Pyx_PyObject_IsTrue(__pyx_v_breakpoint); if (unlikely(__pyx_t_10 < 0)) __PYX_ERR(0, 732, __pyx_L74_error) + if (__pyx_t_10) { + } else { + __pyx_t_12 = __pyx_t_10; + goto __pyx_L131_bool_binop_done; + } + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_v_breakpoint, __pyx_n_s_suspend_policy); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 732, __pyx_L74_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_10 = (__Pyx_PyString_Equals(__pyx_t_7, __pyx_n_s_ALL, Py_EQ)); if (unlikely(__pyx_t_10 < 0)) __PYX_ERR(0, 732, __pyx_L74_error) + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __pyx_t_12 = __pyx_t_10; + __pyx_L131_bool_binop_done:; + if (__pyx_t_12) { + + /* "_pydevd_bundle/pydevd_cython.pyx":733 + * self.set_suspend(thread, CMD_SET_BREAK) + * if breakpoint and breakpoint.suspend_policy == "ALL": + * main_debugger.suspend_all_other_threads(thread) # <<<<<<<<<<<<<< + * elif flag and plugin_manager is not None: + * result = plugin_manager.suspend(main_debugger, thread, frame, bp_type) + */ + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_suspend_all_other_threads); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 733, __pyx_L74_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_8 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_6))) { + __pyx_t_8 = PyMethod_GET_SELF(__pyx_t_6); + if (likely(__pyx_t_8)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_6); + __Pyx_INCREF(__pyx_t_8); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_6, function); + } + } + if (!__pyx_t_8) { + __pyx_t_7 = __Pyx_PyObject_CallOneArg(__pyx_t_6, __pyx_v_thread); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 733, __pyx_L74_error) + __Pyx_GOTREF(__pyx_t_7); + } else { + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_6)) { + PyObject *__pyx_temp[2] = {__pyx_t_8, __pyx_v_thread}; + __pyx_t_7 = __Pyx_PyFunction_FastCall(__pyx_t_6, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 733, __pyx_L74_error) + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_GOTREF(__pyx_t_7); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_6)) { + PyObject *__pyx_temp[2] = {__pyx_t_8, __pyx_v_thread}; + __pyx_t_7 = __Pyx_PyCFunction_FastCall(__pyx_t_6, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 733, __pyx_L74_error) + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_GOTREF(__pyx_t_7); + } else + #endif + { + __pyx_t_1 = PyTuple_New(1+1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 733, __pyx_L74_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_8); PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_t_8); __pyx_t_8 = NULL; + __Pyx_INCREF(__pyx_v_thread); + __Pyx_GIVEREF(__pyx_v_thread); + PyTuple_SET_ITEM(__pyx_t_1, 0+1, __pyx_v_thread); + __pyx_t_7 = __Pyx_PyObject_Call(__pyx_t_6, __pyx_t_1, NULL); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 733, __pyx_L74_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + } + } + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":732 + * if stop: + * self.set_suspend(thread, CMD_SET_BREAK) + * if breakpoint and breakpoint.suspend_policy == "ALL": # <<<<<<<<<<<<<< + * main_debugger.suspend_all_other_threads(thread) + * elif flag and plugin_manager is not None: + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":730 + * main_debugger.remove_return_values_flag = False + * + * if stop: # <<<<<<<<<<<<<< + * self.set_suspend(thread, CMD_SET_BREAK) + * if breakpoint and breakpoint.suspend_policy == "ALL": + */ + goto __pyx_L129; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":734 + * if breakpoint and breakpoint.suspend_policy == "ALL": + * main_debugger.suspend_all_other_threads(thread) + * elif flag and plugin_manager is not None: # <<<<<<<<<<<<<< + * result = plugin_manager.suspend(main_debugger, thread, frame, bp_type) + * if result: + */ + __pyx_t_10 = __Pyx_PyObject_IsTrue(__pyx_v_flag); if (unlikely(__pyx_t_10 < 0)) __PYX_ERR(0, 734, __pyx_L74_error) + if (__pyx_t_10) { + } else { + __pyx_t_12 = __pyx_t_10; + goto __pyx_L133_bool_binop_done; + } + __pyx_t_10 = (__pyx_v_plugin_manager != Py_None); + __pyx_t_9 = (__pyx_t_10 != 0); + __pyx_t_12 = __pyx_t_9; + __pyx_L133_bool_binop_done:; + if (__pyx_t_12) { + + /* "_pydevd_bundle/pydevd_cython.pyx":735 + * main_debugger.suspend_all_other_threads(thread) + * elif flag and plugin_manager is not None: + * result = plugin_manager.suspend(main_debugger, thread, frame, bp_type) # <<<<<<<<<<<<<< + * if result: + * frame = result + */ + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_v_plugin_manager, __pyx_n_s_suspend); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 735, __pyx_L74_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_1 = NULL; + __pyx_t_19 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_6))) { + __pyx_t_1 = PyMethod_GET_SELF(__pyx_t_6); + if (likely(__pyx_t_1)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_6); + __Pyx_INCREF(__pyx_t_1); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_6, function); + __pyx_t_19 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_6)) { + PyObject *__pyx_temp[5] = {__pyx_t_1, __pyx_v_main_debugger, __pyx_v_thread, __pyx_v_frame, __pyx_v_bp_type}; + __pyx_t_7 = __Pyx_PyFunction_FastCall(__pyx_t_6, __pyx_temp+1-__pyx_t_19, 4+__pyx_t_19); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 735, __pyx_L74_error) + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_GOTREF(__pyx_t_7); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_6)) { + PyObject *__pyx_temp[5] = {__pyx_t_1, __pyx_v_main_debugger, __pyx_v_thread, __pyx_v_frame, __pyx_v_bp_type}; + __pyx_t_7 = __Pyx_PyCFunction_FastCall(__pyx_t_6, __pyx_temp+1-__pyx_t_19, 4+__pyx_t_19); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 735, __pyx_L74_error) + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_GOTREF(__pyx_t_7); + } else + #endif + { + __pyx_t_8 = PyTuple_New(4+__pyx_t_19); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 735, __pyx_L74_error) + __Pyx_GOTREF(__pyx_t_8); + if (__pyx_t_1) { + __Pyx_GIVEREF(__pyx_t_1); PyTuple_SET_ITEM(__pyx_t_8, 0, __pyx_t_1); __pyx_t_1 = NULL; + } + __Pyx_INCREF(__pyx_v_main_debugger); + __Pyx_GIVEREF(__pyx_v_main_debugger); + PyTuple_SET_ITEM(__pyx_t_8, 0+__pyx_t_19, __pyx_v_main_debugger); + __Pyx_INCREF(__pyx_v_thread); + __Pyx_GIVEREF(__pyx_v_thread); + PyTuple_SET_ITEM(__pyx_t_8, 1+__pyx_t_19, __pyx_v_thread); + __Pyx_INCREF(__pyx_v_frame); + __Pyx_GIVEREF(__pyx_v_frame); + PyTuple_SET_ITEM(__pyx_t_8, 2+__pyx_t_19, __pyx_v_frame); + __Pyx_INCREF(__pyx_v_bp_type); + __Pyx_GIVEREF(__pyx_v_bp_type); + PyTuple_SET_ITEM(__pyx_t_8, 3+__pyx_t_19, __pyx_v_bp_type); + __pyx_t_7 = __Pyx_PyObject_Call(__pyx_t_6, __pyx_t_8, NULL); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 735, __pyx_L74_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + } + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_XDECREF_SET(__pyx_v_result, __pyx_t_7); + __pyx_t_7 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":736 + * elif flag and plugin_manager is not None: + * result = plugin_manager.suspend(main_debugger, thread, frame, bp_type) + * if result: # <<<<<<<<<<<<<< + * frame = result + * + */ + __pyx_t_12 = __Pyx_PyObject_IsTrue(__pyx_v_result); if (unlikely(__pyx_t_12 < 0)) __PYX_ERR(0, 736, __pyx_L74_error) + if (__pyx_t_12) { + + /* "_pydevd_bundle/pydevd_cython.pyx":737 + * result = plugin_manager.suspend(main_debugger, thread, frame, bp_type) + * if result: + * frame = result # <<<<<<<<<<<<<< + * + * # if thread has a suspend flag, we suspend with a busy wait + */ + __Pyx_INCREF(__pyx_v_result); + __Pyx_DECREF_SET(__pyx_v_frame, __pyx_v_result); + + /* "_pydevd_bundle/pydevd_cython.pyx":736 + * elif flag and plugin_manager is not None: + * result = plugin_manager.suspend(main_debugger, thread, frame, bp_type) + * if result: # <<<<<<<<<<<<<< + * frame = result + * + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":734 + * if breakpoint and breakpoint.suspend_policy == "ALL": + * main_debugger.suspend_all_other_threads(thread) + * elif flag and plugin_manager is not None: # <<<<<<<<<<<<<< + * result = plugin_manager.suspend(main_debugger, thread, frame, bp_type) + * if result: + */ + } + __pyx_L129:; + + /* "_pydevd_bundle/pydevd_cython.pyx":740 + * + * # if thread has a suspend flag, we suspend with a busy wait + * if info.pydev_state == STATE_SUSPEND: # <<<<<<<<<<<<<< + * self.do_wait_suspend(thread, frame, event, arg) + * return self.trace_dispatch + */ + __pyx_t_7 = __Pyx_PyInt_From_int(__pyx_v_info->pydev_state); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 740, __pyx_L74_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_6 = __Pyx_GetModuleGlobalName(__pyx_n_s_STATE_SUSPEND); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 740, __pyx_L74_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_8 = PyObject_RichCompare(__pyx_t_7, __pyx_t_6, Py_EQ); __Pyx_XGOTREF(__pyx_t_8); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 740, __pyx_L74_error) + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __pyx_t_12 = __Pyx_PyObject_IsTrue(__pyx_t_8); if (unlikely(__pyx_t_12 < 0)) __PYX_ERR(0, 740, __pyx_L74_error) + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + if (__pyx_t_12) { + + /* "_pydevd_bundle/pydevd_cython.pyx":741 + * # if thread has a suspend flag, we suspend with a busy wait + * if info.pydev_state == STATE_SUSPEND: + * self.do_wait_suspend(thread, frame, event, arg) # <<<<<<<<<<<<<< + * return self.trace_dispatch + * else: + */ + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_do_wait_suspend); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 741, __pyx_L74_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_7 = NULL; + __pyx_t_19 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_6))) { + __pyx_t_7 = PyMethod_GET_SELF(__pyx_t_6); + if (likely(__pyx_t_7)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_6); + __Pyx_INCREF(__pyx_t_7); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_6, function); + __pyx_t_19 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_6)) { + PyObject *__pyx_temp[5] = {__pyx_t_7, __pyx_v_thread, __pyx_v_frame, __pyx_v_event, __pyx_v_arg}; + __pyx_t_8 = __Pyx_PyFunction_FastCall(__pyx_t_6, __pyx_temp+1-__pyx_t_19, 4+__pyx_t_19); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 741, __pyx_L74_error) + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_GOTREF(__pyx_t_8); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_6)) { + PyObject *__pyx_temp[5] = {__pyx_t_7, __pyx_v_thread, __pyx_v_frame, __pyx_v_event, __pyx_v_arg}; + __pyx_t_8 = __Pyx_PyCFunction_FastCall(__pyx_t_6, __pyx_temp+1-__pyx_t_19, 4+__pyx_t_19); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 741, __pyx_L74_error) + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_GOTREF(__pyx_t_8); + } else + #endif + { + __pyx_t_1 = PyTuple_New(4+__pyx_t_19); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 741, __pyx_L74_error) + __Pyx_GOTREF(__pyx_t_1); + if (__pyx_t_7) { + __Pyx_GIVEREF(__pyx_t_7); PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_t_7); __pyx_t_7 = NULL; + } + __Pyx_INCREF(__pyx_v_thread); + __Pyx_GIVEREF(__pyx_v_thread); + PyTuple_SET_ITEM(__pyx_t_1, 0+__pyx_t_19, __pyx_v_thread); + __Pyx_INCREF(__pyx_v_frame); + __Pyx_GIVEREF(__pyx_v_frame); + PyTuple_SET_ITEM(__pyx_t_1, 1+__pyx_t_19, __pyx_v_frame); + __Pyx_INCREF(__pyx_v_event); + __Pyx_GIVEREF(__pyx_v_event); + PyTuple_SET_ITEM(__pyx_t_1, 2+__pyx_t_19, __pyx_v_event); + __Pyx_INCREF(__pyx_v_arg); + __Pyx_GIVEREF(__pyx_v_arg); + PyTuple_SET_ITEM(__pyx_t_1, 3+__pyx_t_19, __pyx_v_arg); + __pyx_t_8 = __Pyx_PyObject_Call(__pyx_t_6, __pyx_t_1, NULL); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 741, __pyx_L74_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + } + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":742 + * if info.pydev_state == STATE_SUSPEND: + * self.do_wait_suspend(thread, frame, event, arg) + * return self.trace_dispatch # <<<<<<<<<<<<<< + * else: + * if not breakpoint and not is_return: + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_8 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_trace_dispatch); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 742, __pyx_L74_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_r = __pyx_t_8; + __pyx_t_8 = 0; + goto __pyx_L78_try_return; + + /* "_pydevd_bundle/pydevd_cython.pyx":740 + * + * # if thread has a suspend flag, we suspend with a busy wait + * if info.pydev_state == STATE_SUSPEND: # <<<<<<<<<<<<<< + * self.do_wait_suspend(thread, frame, event, arg) + * return self.trace_dispatch + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":744 + * return self.trace_dispatch + * else: + * if not breakpoint and not is_return: # <<<<<<<<<<<<<< + * # No stop from anyone and no breakpoint found in line (cache that). + * frame_skips_cache[line_cache_key] = 0 + */ + /*else*/ { + __pyx_t_9 = __Pyx_PyObject_IsTrue(__pyx_v_breakpoint); if (unlikely(__pyx_t_9 < 0)) __PYX_ERR(0, 744, __pyx_L74_error) + __pyx_t_10 = ((!__pyx_t_9) != 0); + if (__pyx_t_10) { + } else { + __pyx_t_12 = __pyx_t_10; + goto __pyx_L138_bool_binop_done; + } + __pyx_t_10 = ((!(__pyx_v_is_return != 0)) != 0); + __pyx_t_12 = __pyx_t_10; + __pyx_L138_bool_binop_done:; + if (__pyx_t_12) { + + /* "_pydevd_bundle/pydevd_cython.pyx":746 + * if not breakpoint and not is_return: + * # No stop from anyone and no breakpoint found in line (cache that). + * frame_skips_cache[line_cache_key] = 0 # <<<<<<<<<<<<<< + * + * except: + */ + if (unlikely(__pyx_v_frame_skips_cache == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(0, 746, __pyx_L74_error) + } + if (unlikely(PyDict_SetItem(__pyx_v_frame_skips_cache, __pyx_v_line_cache_key, __pyx_int_0) < 0)) __PYX_ERR(0, 746, __pyx_L74_error) + + /* "_pydevd_bundle/pydevd_cython.pyx":744 + * return self.trace_dispatch + * else: + * if not breakpoint and not is_return: # <<<<<<<<<<<<<< + * # No stop from anyone and no breakpoint found in line (cache that). + * frame_skips_cache[line_cache_key] = 0 + */ + } + } + + /* "_pydevd_bundle/pydevd_cython.pyx":662 + * # print('NOT skipped', frame.f_lineno, frame.f_code.co_name, event) + * + * try: # <<<<<<<<<<<<<< + * flag = False + * #return is not taken into account for breakpoint hit because we'd have a double-hit in this case + */ + } + __Pyx_XDECREF(__pyx_t_16); __pyx_t_16 = 0; + __Pyx_XDECREF(__pyx_t_17); __pyx_t_17 = 0; + __Pyx_XDECREF(__pyx_t_18); __pyx_t_18 = 0; + goto __pyx_L79_try_end; + __pyx_L74_error:; + __Pyx_PyThreadState_assign + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_XDECREF(__pyx_t_13); __pyx_t_13 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":748 + * frame_skips_cache[line_cache_key] = 0 + * + * except: # <<<<<<<<<<<<<< + * traceback.print_exc() + * raise + */ + /*except:*/ { + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBFrame.trace_dispatch", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_8, &__pyx_t_6, &__pyx_t_1) < 0) __PYX_ERR(0, 748, __pyx_L76_except_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_GOTREF(__pyx_t_6); + __Pyx_GOTREF(__pyx_t_1); + + /* "_pydevd_bundle/pydevd_cython.pyx":749 + * + * except: + * traceback.print_exc() # <<<<<<<<<<<<<< + * raise + * + */ + __pyx_t_4 = __Pyx_GetModuleGlobalName(__pyx_n_s_traceback); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 749, __pyx_L76_except_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_4, __pyx_n_s_print_exc); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 749, __pyx_L76_except_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_4 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_3))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_3, function); + } + } + if (__pyx_t_4) { + __pyx_t_7 = __Pyx_PyObject_CallOneArg(__pyx_t_3, __pyx_t_4); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 749, __pyx_L76_except_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + } else { + __pyx_t_7 = __Pyx_PyObject_CallNoArg(__pyx_t_3); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 749, __pyx_L76_except_error) + } + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":750 + * except: + * traceback.print_exc() + * raise # <<<<<<<<<<<<<< + * + * #step handling. We stop when we hit the right frame + */ + __Pyx_GIVEREF(__pyx_t_8); + __Pyx_GIVEREF(__pyx_t_6); + __Pyx_XGIVEREF(__pyx_t_1); + __Pyx_ErrRestoreWithState(__pyx_t_8, __pyx_t_6, __pyx_t_1); + __pyx_t_8 = 0; __pyx_t_6 = 0; __pyx_t_1 = 0; + __PYX_ERR(0, 750, __pyx_L76_except_error) + } + __pyx_L76_except_error:; + + /* "_pydevd_bundle/pydevd_cython.pyx":662 + * # print('NOT skipped', frame.f_lineno, frame.f_code.co_name, event) + * + * try: # <<<<<<<<<<<<<< + * flag = False + * #return is not taken into account for breakpoint hit because we'd have a double-hit in this case + */ + __Pyx_PyThreadState_assign + __Pyx_XGIVEREF(__pyx_t_16); + __Pyx_XGIVEREF(__pyx_t_17); + __Pyx_XGIVEREF(__pyx_t_18); + __Pyx_ExceptionReset(__pyx_t_16, __pyx_t_17, __pyx_t_18); + goto __pyx_L4_error; + __pyx_L78_try_return:; + __Pyx_PyThreadState_assign + __Pyx_XGIVEREF(__pyx_t_16); + __Pyx_XGIVEREF(__pyx_t_17); + __Pyx_XGIVEREF(__pyx_t_18); + __Pyx_ExceptionReset(__pyx_t_16, __pyx_t_17, __pyx_t_18); + goto __pyx_L3_return; + __pyx_L79_try_end:; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":753 + * + * #step handling. We stop when we hit the right frame + * try: # <<<<<<<<<<<<<< + * should_skip = 0 + * if pydevd_dont_trace.should_trace_hook is not None: + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_18, &__pyx_t_17, &__pyx_t_16); + __Pyx_XGOTREF(__pyx_t_18); + __Pyx_XGOTREF(__pyx_t_17); + __Pyx_XGOTREF(__pyx_t_16); + /*try:*/ { + + /* "_pydevd_bundle/pydevd_cython.pyx":754 + * #step handling. We stop when we hit the right frame + * try: + * should_skip = 0 # <<<<<<<<<<<<<< + * if pydevd_dont_trace.should_trace_hook is not None: + * if self.should_skip == -1: + */ + __pyx_v_should_skip = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":755 + * try: + * should_skip = 0 + * if pydevd_dont_trace.should_trace_hook is not None: # <<<<<<<<<<<<<< + * if self.should_skip == -1: + * # I.e.: cache the result on self.should_skip (no need to evaluate the same frame multiple times). + */ + __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_pydevd_dont_trace); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 755, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_should_trace_hook); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 755, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_12 = (__pyx_t_6 != Py_None); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __pyx_t_10 = (__pyx_t_12 != 0); + if (__pyx_t_10) { + + /* "_pydevd_bundle/pydevd_cython.pyx":756 + * should_skip = 0 + * if pydevd_dont_trace.should_trace_hook is not None: + * if self.should_skip == -1: # <<<<<<<<<<<<<< + * # I.e.: cache the result on self.should_skip (no need to evaluate the same frame multiple times). + * # Note that on a code reload, we won't re-evaluate this because in practice, the frame.f_code + */ + __pyx_t_10 = ((__pyx_v_self->should_skip == -1L) != 0); + if (__pyx_t_10) { + + /* "_pydevd_bundle/pydevd_cython.pyx":760 + * # Note that on a code reload, we won't re-evaluate this because in practice, the frame.f_code + * # Which will be handled by this frame is read-only, so, we can cache it safely. + * if not pydevd_dont_trace.should_trace_hook(frame, filename): # <<<<<<<<<<<<<< + * # -1, 0, 1 to be Cython-friendly + * should_skip = self.should_skip = 1 + */ + __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_pydevd_dont_trace); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 760, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_should_trace_hook); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 760, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = NULL; + __pyx_t_19 = 0; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_8))) { + __pyx_t_1 = PyMethod_GET_SELF(__pyx_t_8); + if (likely(__pyx_t_1)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_8); + __Pyx_INCREF(__pyx_t_1); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_8, function); + __pyx_t_19 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_8)) { + PyObject *__pyx_temp[3] = {__pyx_t_1, __pyx_v_frame, __pyx_v_filename}; + __pyx_t_6 = __Pyx_PyFunction_FastCall(__pyx_t_8, __pyx_temp+1-__pyx_t_19, 2+__pyx_t_19); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 760, __pyx_L142_error) + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_GOTREF(__pyx_t_6); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_8)) { + PyObject *__pyx_temp[3] = {__pyx_t_1, __pyx_v_frame, __pyx_v_filename}; + __pyx_t_6 = __Pyx_PyCFunction_FastCall(__pyx_t_8, __pyx_temp+1-__pyx_t_19, 2+__pyx_t_19); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 760, __pyx_L142_error) + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_GOTREF(__pyx_t_6); + } else + #endif + { + __pyx_t_7 = PyTuple_New(2+__pyx_t_19); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 760, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_7); + if (__pyx_t_1) { + __Pyx_GIVEREF(__pyx_t_1); PyTuple_SET_ITEM(__pyx_t_7, 0, __pyx_t_1); __pyx_t_1 = NULL; + } + __Pyx_INCREF(__pyx_v_frame); + __Pyx_GIVEREF(__pyx_v_frame); + PyTuple_SET_ITEM(__pyx_t_7, 0+__pyx_t_19, __pyx_v_frame); + __Pyx_INCREF(__pyx_v_filename); + __Pyx_GIVEREF(__pyx_v_filename); + PyTuple_SET_ITEM(__pyx_t_7, 1+__pyx_t_19, __pyx_v_filename); + __pyx_t_6 = __Pyx_PyObject_Call(__pyx_t_8, __pyx_t_7, NULL); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 760, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + } + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __pyx_t_10 = __Pyx_PyObject_IsTrue(__pyx_t_6); if (unlikely(__pyx_t_10 < 0)) __PYX_ERR(0, 760, __pyx_L142_error) + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __pyx_t_12 = ((!__pyx_t_10) != 0); + if (__pyx_t_12) { + + /* "_pydevd_bundle/pydevd_cython.pyx":762 + * if not pydevd_dont_trace.should_trace_hook(frame, filename): + * # -1, 0, 1 to be Cython-friendly + * should_skip = self.should_skip = 1 # <<<<<<<<<<<<<< + * else: + * should_skip = self.should_skip = 0 + */ + __pyx_v_should_skip = 1; + __pyx_v_self->should_skip = 1; + + /* "_pydevd_bundle/pydevd_cython.pyx":760 + * # Note that on a code reload, we won't re-evaluate this because in practice, the frame.f_code + * # Which will be handled by this frame is read-only, so, we can cache it safely. + * if not pydevd_dont_trace.should_trace_hook(frame, filename): # <<<<<<<<<<<<<< + * # -1, 0, 1 to be Cython-friendly + * should_skip = self.should_skip = 1 + */ + goto __pyx_L150; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":764 + * should_skip = self.should_skip = 1 + * else: + * should_skip = self.should_skip = 0 # <<<<<<<<<<<<<< + * else: + * should_skip = self.should_skip + */ + /*else*/ { + __pyx_v_should_skip = 0; + __pyx_v_self->should_skip = 0; + } + __pyx_L150:; + + /* "_pydevd_bundle/pydevd_cython.pyx":756 + * should_skip = 0 + * if pydevd_dont_trace.should_trace_hook is not None: + * if self.should_skip == -1: # <<<<<<<<<<<<<< + * # I.e.: cache the result on self.should_skip (no need to evaluate the same frame multiple times). + * # Note that on a code reload, we won't re-evaluate this because in practice, the frame.f_code + */ + goto __pyx_L149; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":766 + * should_skip = self.should_skip = 0 + * else: + * should_skip = self.should_skip # <<<<<<<<<<<<<< + * + * plugin_stop = False + */ + /*else*/ { + __pyx_t_19 = __pyx_v_self->should_skip; + __pyx_v_should_skip = __pyx_t_19; + } + __pyx_L149:; + + /* "_pydevd_bundle/pydevd_cython.pyx":755 + * try: + * should_skip = 0 + * if pydevd_dont_trace.should_trace_hook is not None: # <<<<<<<<<<<<<< + * if self.should_skip == -1: + * # I.e.: cache the result on self.should_skip (no need to evaluate the same frame multiple times). + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":768 + * should_skip = self.should_skip + * + * plugin_stop = False # <<<<<<<<<<<<<< + * if should_skip: + * stop = False + */ + __Pyx_INCREF(Py_False); + __pyx_v_plugin_stop = Py_False; + + /* "_pydevd_bundle/pydevd_cython.pyx":769 + * + * plugin_stop = False + * if should_skip: # <<<<<<<<<<<<<< + * stop = False + * + */ + __pyx_t_12 = (__pyx_v_should_skip != 0); + if (__pyx_t_12) { + + /* "_pydevd_bundle/pydevd_cython.pyx":770 + * plugin_stop = False + * if should_skip: + * stop = False # <<<<<<<<<<<<<< + * + * elif step_cmd == CMD_STEP_INTO: + */ + __Pyx_INCREF(Py_False); + __Pyx_DECREF_SET(__pyx_v_stop, Py_False); + + /* "_pydevd_bundle/pydevd_cython.pyx":769 + * + * plugin_stop = False + * if should_skip: # <<<<<<<<<<<<<< + * stop = False + * + */ + goto __pyx_L151; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":772 + * stop = False + * + * elif step_cmd == CMD_STEP_INTO: # <<<<<<<<<<<<<< + * stop = is_line or is_return + * if plugin_manager is not None: + */ + __pyx_t_6 = __Pyx_PyInt_From_int(__pyx_v_step_cmd); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 772, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_8 = __Pyx_GetModuleGlobalName(__pyx_n_s_CMD_STEP_INTO); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 772, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_7 = PyObject_RichCompare(__pyx_t_6, __pyx_t_8, Py_EQ); __Pyx_XGOTREF(__pyx_t_7); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 772, __pyx_L142_error) + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __pyx_t_12 = __Pyx_PyObject_IsTrue(__pyx_t_7); if (unlikely(__pyx_t_12 < 0)) __PYX_ERR(0, 772, __pyx_L142_error) + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + if (__pyx_t_12) { + + /* "_pydevd_bundle/pydevd_cython.pyx":773 + * + * elif step_cmd == CMD_STEP_INTO: + * stop = is_line or is_return # <<<<<<<<<<<<<< + * if plugin_manager is not None: + * result = plugin_manager.cmd_step_into(main_debugger, frame, event, self._args, stop_info, stop) + */ + if (!__pyx_v_is_line) { + } else { + __pyx_t_8 = __Pyx_PyBool_FromLong(__pyx_v_is_line); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 773, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_7 = __pyx_t_8; + __pyx_t_8 = 0; + goto __pyx_L152_bool_binop_done; + } + __pyx_t_8 = __Pyx_PyBool_FromLong(__pyx_v_is_return); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 773, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_7 = __pyx_t_8; + __pyx_t_8 = 0; + __pyx_L152_bool_binop_done:; + __Pyx_DECREF_SET(__pyx_v_stop, __pyx_t_7); + __pyx_t_7 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":774 + * elif step_cmd == CMD_STEP_INTO: + * stop = is_line or is_return + * if plugin_manager is not None: # <<<<<<<<<<<<<< + * result = plugin_manager.cmd_step_into(main_debugger, frame, event, self._args, stop_info, stop) + * if result: + */ + __pyx_t_12 = (__pyx_v_plugin_manager != Py_None); + __pyx_t_10 = (__pyx_t_12 != 0); + if (__pyx_t_10) { + + /* "_pydevd_bundle/pydevd_cython.pyx":775 + * stop = is_line or is_return + * if plugin_manager is not None: + * result = plugin_manager.cmd_step_into(main_debugger, frame, event, self._args, stop_info, stop) # <<<<<<<<<<<<<< + * if result: + * stop, plugin_stop = result + */ + __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_v_plugin_manager, __pyx_n_s_cmd_step_into); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 775, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_6 = NULL; + __pyx_t_19 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_8))) { + __pyx_t_6 = PyMethod_GET_SELF(__pyx_t_8); + if (likely(__pyx_t_6)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_8); + __Pyx_INCREF(__pyx_t_6); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_8, function); + __pyx_t_19 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_8)) { + PyObject *__pyx_temp[7] = {__pyx_t_6, __pyx_v_main_debugger, __pyx_v_frame, __pyx_v_event, __pyx_v_self->_args, __pyx_v_stop_info, __pyx_v_stop}; + __pyx_t_7 = __Pyx_PyFunction_FastCall(__pyx_t_8, __pyx_temp+1-__pyx_t_19, 6+__pyx_t_19); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 775, __pyx_L142_error) + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_GOTREF(__pyx_t_7); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_8)) { + PyObject *__pyx_temp[7] = {__pyx_t_6, __pyx_v_main_debugger, __pyx_v_frame, __pyx_v_event, __pyx_v_self->_args, __pyx_v_stop_info, __pyx_v_stop}; + __pyx_t_7 = __Pyx_PyCFunction_FastCall(__pyx_t_8, __pyx_temp+1-__pyx_t_19, 6+__pyx_t_19); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 775, __pyx_L142_error) + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_GOTREF(__pyx_t_7); + } else + #endif + { + __pyx_t_1 = PyTuple_New(6+__pyx_t_19); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 775, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_1); + if (__pyx_t_6) { + __Pyx_GIVEREF(__pyx_t_6); PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_t_6); __pyx_t_6 = NULL; + } + __Pyx_INCREF(__pyx_v_main_debugger); + __Pyx_GIVEREF(__pyx_v_main_debugger); + PyTuple_SET_ITEM(__pyx_t_1, 0+__pyx_t_19, __pyx_v_main_debugger); + __Pyx_INCREF(__pyx_v_frame); + __Pyx_GIVEREF(__pyx_v_frame); + PyTuple_SET_ITEM(__pyx_t_1, 1+__pyx_t_19, __pyx_v_frame); + __Pyx_INCREF(__pyx_v_event); + __Pyx_GIVEREF(__pyx_v_event); + PyTuple_SET_ITEM(__pyx_t_1, 2+__pyx_t_19, __pyx_v_event); + __Pyx_INCREF(__pyx_v_self->_args); + __Pyx_GIVEREF(__pyx_v_self->_args); + PyTuple_SET_ITEM(__pyx_t_1, 3+__pyx_t_19, __pyx_v_self->_args); + __Pyx_INCREF(__pyx_v_stop_info); + __Pyx_GIVEREF(__pyx_v_stop_info); + PyTuple_SET_ITEM(__pyx_t_1, 4+__pyx_t_19, __pyx_v_stop_info); + __Pyx_INCREF(__pyx_v_stop); + __Pyx_GIVEREF(__pyx_v_stop); + PyTuple_SET_ITEM(__pyx_t_1, 5+__pyx_t_19, __pyx_v_stop); + __pyx_t_7 = __Pyx_PyObject_Call(__pyx_t_8, __pyx_t_1, NULL); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 775, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + } + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_XDECREF_SET(__pyx_v_result, __pyx_t_7); + __pyx_t_7 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":776 + * if plugin_manager is not None: + * result = plugin_manager.cmd_step_into(main_debugger, frame, event, self._args, stop_info, stop) + * if result: # <<<<<<<<<<<<<< + * stop, plugin_stop = result + * + */ + __pyx_t_10 = __Pyx_PyObject_IsTrue(__pyx_v_result); if (unlikely(__pyx_t_10 < 0)) __PYX_ERR(0, 776, __pyx_L142_error) + if (__pyx_t_10) { + + /* "_pydevd_bundle/pydevd_cython.pyx":777 + * result = plugin_manager.cmd_step_into(main_debugger, frame, event, self._args, stop_info, stop) + * if result: + * stop, plugin_stop = result # <<<<<<<<<<<<<< + * + * elif step_cmd == CMD_STEP_INTO_MY_CODE: + */ + if ((likely(PyTuple_CheckExact(__pyx_v_result))) || (PyList_CheckExact(__pyx_v_result))) { + PyObject* sequence = __pyx_v_result; + #if !CYTHON_COMPILING_IN_PYPY + Py_ssize_t size = Py_SIZE(sequence); + #else + Py_ssize_t size = PySequence_Size(sequence); + #endif + if (unlikely(size != 2)) { + if (size > 2) __Pyx_RaiseTooManyValuesError(2); + else if (size >= 0) __Pyx_RaiseNeedMoreValuesError(size); + __PYX_ERR(0, 777, __pyx_L142_error) + } + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + if (likely(PyTuple_CheckExact(sequence))) { + __pyx_t_7 = PyTuple_GET_ITEM(sequence, 0); + __pyx_t_8 = PyTuple_GET_ITEM(sequence, 1); + } else { + __pyx_t_7 = PyList_GET_ITEM(sequence, 0); + __pyx_t_8 = PyList_GET_ITEM(sequence, 1); + } + __Pyx_INCREF(__pyx_t_7); + __Pyx_INCREF(__pyx_t_8); + #else + __pyx_t_7 = PySequence_ITEM(sequence, 0); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 777, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_8 = PySequence_ITEM(sequence, 1); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 777, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_8); + #endif + } else { + Py_ssize_t index = -1; + __pyx_t_1 = PyObject_GetIter(__pyx_v_result); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 777, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_11 = Py_TYPE(__pyx_t_1)->tp_iternext; + index = 0; __pyx_t_7 = __pyx_t_11(__pyx_t_1); if (unlikely(!__pyx_t_7)) goto __pyx_L156_unpacking_failed; + __Pyx_GOTREF(__pyx_t_7); + index = 1; __pyx_t_8 = __pyx_t_11(__pyx_t_1); if (unlikely(!__pyx_t_8)) goto __pyx_L156_unpacking_failed; + __Pyx_GOTREF(__pyx_t_8); + if (__Pyx_IternextUnpackEndCheck(__pyx_t_11(__pyx_t_1), 2) < 0) __PYX_ERR(0, 777, __pyx_L142_error) + __pyx_t_11 = NULL; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + goto __pyx_L157_unpacking_done; + __pyx_L156_unpacking_failed:; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_11 = NULL; + if (__Pyx_IterFinish() == 0) __Pyx_RaiseNeedMoreValuesError(index); + __PYX_ERR(0, 777, __pyx_L142_error) + __pyx_L157_unpacking_done:; + } + __Pyx_DECREF_SET(__pyx_v_stop, __pyx_t_7); + __pyx_t_7 = 0; + __Pyx_DECREF_SET(__pyx_v_plugin_stop, __pyx_t_8); + __pyx_t_8 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":776 + * if plugin_manager is not None: + * result = plugin_manager.cmd_step_into(main_debugger, frame, event, self._args, stop_info, stop) + * if result: # <<<<<<<<<<<<<< + * stop, plugin_stop = result + * + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":774 + * elif step_cmd == CMD_STEP_INTO: + * stop = is_line or is_return + * if plugin_manager is not None: # <<<<<<<<<<<<<< + * result = plugin_manager.cmd_step_into(main_debugger, frame, event, self._args, stop_info, stop) + * if result: + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":772 + * stop = False + * + * elif step_cmd == CMD_STEP_INTO: # <<<<<<<<<<<<<< + * stop = is_line or is_return + * if plugin_manager is not None: + */ + goto __pyx_L151; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":779 + * stop, plugin_stop = result + * + * elif step_cmd == CMD_STEP_INTO_MY_CODE: # <<<<<<<<<<<<<< + * if not main_debugger.not_in_scope(frame.f_code.co_filename): + * stop = is_line + */ + __pyx_t_8 = __Pyx_PyInt_From_int(__pyx_v_step_cmd); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 779, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_7 = __Pyx_GetModuleGlobalName(__pyx_n_s_CMD_STEP_INTO_MY_CODE); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 779, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_1 = PyObject_RichCompare(__pyx_t_8, __pyx_t_7, Py_EQ); __Pyx_XGOTREF(__pyx_t_1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 779, __pyx_L142_error) + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __pyx_t_10 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_10 < 0)) __PYX_ERR(0, 779, __pyx_L142_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + if (__pyx_t_10) { + + /* "_pydevd_bundle/pydevd_cython.pyx":780 + * + * elif step_cmd == CMD_STEP_INTO_MY_CODE: + * if not main_debugger.not_in_scope(frame.f_code.co_filename): # <<<<<<<<<<<<<< + * stop = is_line + * + */ + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_not_in_scope); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 780, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_code); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 780, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_t_8, __pyx_n_s_co_filename); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 780, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __pyx_t_8 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_7))) { + __pyx_t_8 = PyMethod_GET_SELF(__pyx_t_7); + if (likely(__pyx_t_8)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_7); + __Pyx_INCREF(__pyx_t_8); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_7, function); + } + } + if (!__pyx_t_8) { + __pyx_t_1 = __Pyx_PyObject_CallOneArg(__pyx_t_7, __pyx_t_6); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 780, __pyx_L142_error) + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_GOTREF(__pyx_t_1); + } else { + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_7)) { + PyObject *__pyx_temp[2] = {__pyx_t_8, __pyx_t_6}; + __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_7, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 780, __pyx_L142_error) + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_7)) { + PyObject *__pyx_temp[2] = {__pyx_t_8, __pyx_t_6}; + __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_7, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 780, __pyx_L142_error) + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + } else + #endif + { + __pyx_t_3 = PyTuple_New(1+1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 780, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_GIVEREF(__pyx_t_8); PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_t_8); __pyx_t_8 = NULL; + __Pyx_GIVEREF(__pyx_t_6); + PyTuple_SET_ITEM(__pyx_t_3, 0+1, __pyx_t_6); + __pyx_t_6 = 0; + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_7, __pyx_t_3, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 780, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + } + } + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __pyx_t_10 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_10 < 0)) __PYX_ERR(0, 780, __pyx_L142_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_12 = ((!__pyx_t_10) != 0); + if (__pyx_t_12) { + + /* "_pydevd_bundle/pydevd_cython.pyx":781 + * elif step_cmd == CMD_STEP_INTO_MY_CODE: + * if not main_debugger.not_in_scope(frame.f_code.co_filename): + * stop = is_line # <<<<<<<<<<<<<< + * + * elif step_cmd == CMD_STEP_OVER: + */ + __pyx_t_1 = __Pyx_PyBool_FromLong(__pyx_v_is_line); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 781, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF_SET(__pyx_v_stop, __pyx_t_1); + __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":780 + * + * elif step_cmd == CMD_STEP_INTO_MY_CODE: + * if not main_debugger.not_in_scope(frame.f_code.co_filename): # <<<<<<<<<<<<<< + * stop = is_line + * + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":779 + * stop, plugin_stop = result + * + * elif step_cmd == CMD_STEP_INTO_MY_CODE: # <<<<<<<<<<<<<< + * if not main_debugger.not_in_scope(frame.f_code.co_filename): + * stop = is_line + */ + goto __pyx_L151; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":783 + * stop = is_line + * + * elif step_cmd == CMD_STEP_OVER: # <<<<<<<<<<<<<< + * stop = stop_frame is frame and (is_line or is_return) + * + */ + __pyx_t_1 = __Pyx_PyInt_From_int(__pyx_v_step_cmd); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 783, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_7 = __Pyx_GetModuleGlobalName(__pyx_n_s_CMD_STEP_OVER); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 783, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_3 = PyObject_RichCompare(__pyx_t_1, __pyx_t_7, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 783, __pyx_L142_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __pyx_t_12 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_12 < 0)) __PYX_ERR(0, 783, __pyx_L142_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__pyx_t_12) { + + /* "_pydevd_bundle/pydevd_cython.pyx":784 + * + * elif step_cmd == CMD_STEP_OVER: + * stop = stop_frame is frame and (is_line or is_return) # <<<<<<<<<<<<<< + * + * if frame.f_code.co_flags & CO_GENERATOR: + */ + __pyx_t_12 = (__pyx_v_stop_frame == __pyx_v_frame); + if (__pyx_t_12) { + } else { + __pyx_t_7 = __Pyx_PyBool_FromLong(__pyx_t_12); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 784, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_3 = __pyx_t_7; + __pyx_t_7 = 0; + goto __pyx_L159_bool_binop_done; + } + if (!__pyx_v_is_line) { + } else { + __pyx_t_7 = __Pyx_PyBool_FromLong(__pyx_v_is_line); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 784, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_3 = __pyx_t_7; + __pyx_t_7 = 0; + goto __pyx_L159_bool_binop_done; + } + __pyx_t_7 = __Pyx_PyBool_FromLong(__pyx_v_is_return); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 784, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_3 = __pyx_t_7; + __pyx_t_7 = 0; + __pyx_L159_bool_binop_done:; + __Pyx_DECREF_SET(__pyx_v_stop, __pyx_t_3); + __pyx_t_3 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":786 + * stop = stop_frame is frame and (is_line or is_return) + * + * if frame.f_code.co_flags & CO_GENERATOR: # <<<<<<<<<<<<<< + * if is_return: + * stop = False + */ + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_code); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 786, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_co_flags); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 786, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_3 = __Pyx_GetModuleGlobalName(__pyx_n_s_CO_GENERATOR); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 786, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_1 = PyNumber_And(__pyx_t_7, __pyx_t_3); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 786, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_12 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_12 < 0)) __PYX_ERR(0, 786, __pyx_L142_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + if (__pyx_t_12) { + + /* "_pydevd_bundle/pydevd_cython.pyx":787 + * + * if frame.f_code.co_flags & CO_GENERATOR: + * if is_return: # <<<<<<<<<<<<<< + * stop = False + * + */ + __pyx_t_12 = (__pyx_v_is_return != 0); + if (__pyx_t_12) { + + /* "_pydevd_bundle/pydevd_cython.pyx":788 + * if frame.f_code.co_flags & CO_GENERATOR: + * if is_return: + * stop = False # <<<<<<<<<<<<<< + * + * if plugin_manager is not None: + */ + __Pyx_INCREF(Py_False); + __Pyx_DECREF_SET(__pyx_v_stop, Py_False); + + /* "_pydevd_bundle/pydevd_cython.pyx":787 + * + * if frame.f_code.co_flags & CO_GENERATOR: + * if is_return: # <<<<<<<<<<<<<< + * stop = False + * + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":786 + * stop = stop_frame is frame and (is_line or is_return) + * + * if frame.f_code.co_flags & CO_GENERATOR: # <<<<<<<<<<<<<< + * if is_return: + * stop = False + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":790 + * stop = False + * + * if plugin_manager is not None: # <<<<<<<<<<<<<< + * result = plugin_manager.cmd_step_over(main_debugger, frame, event, self._args, stop_info, stop) + * if result: + */ + __pyx_t_12 = (__pyx_v_plugin_manager != Py_None); + __pyx_t_10 = (__pyx_t_12 != 0); + if (__pyx_t_10) { + + /* "_pydevd_bundle/pydevd_cython.pyx":791 + * + * if plugin_manager is not None: + * result = plugin_manager.cmd_step_over(main_debugger, frame, event, self._args, stop_info, stop) # <<<<<<<<<<<<<< + * if result: + * stop, plugin_stop = result + */ + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_plugin_manager, __pyx_n_s_cmd_step_over); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 791, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_7 = NULL; + __pyx_t_19 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_3))) { + __pyx_t_7 = PyMethod_GET_SELF(__pyx_t_3); + if (likely(__pyx_t_7)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); + __Pyx_INCREF(__pyx_t_7); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_3, function); + __pyx_t_19 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_3)) { + PyObject *__pyx_temp[7] = {__pyx_t_7, __pyx_v_main_debugger, __pyx_v_frame, __pyx_v_event, __pyx_v_self->_args, __pyx_v_stop_info, __pyx_v_stop}; + __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_3, __pyx_temp+1-__pyx_t_19, 6+__pyx_t_19); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 791, __pyx_L142_error) + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_GOTREF(__pyx_t_1); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_3)) { + PyObject *__pyx_temp[7] = {__pyx_t_7, __pyx_v_main_debugger, __pyx_v_frame, __pyx_v_event, __pyx_v_self->_args, __pyx_v_stop_info, __pyx_v_stop}; + __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_3, __pyx_temp+1-__pyx_t_19, 6+__pyx_t_19); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 791, __pyx_L142_error) + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_GOTREF(__pyx_t_1); + } else + #endif + { + __pyx_t_6 = PyTuple_New(6+__pyx_t_19); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 791, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_6); + if (__pyx_t_7) { + __Pyx_GIVEREF(__pyx_t_7); PyTuple_SET_ITEM(__pyx_t_6, 0, __pyx_t_7); __pyx_t_7 = NULL; + } + __Pyx_INCREF(__pyx_v_main_debugger); + __Pyx_GIVEREF(__pyx_v_main_debugger); + PyTuple_SET_ITEM(__pyx_t_6, 0+__pyx_t_19, __pyx_v_main_debugger); + __Pyx_INCREF(__pyx_v_frame); + __Pyx_GIVEREF(__pyx_v_frame); + PyTuple_SET_ITEM(__pyx_t_6, 1+__pyx_t_19, __pyx_v_frame); + __Pyx_INCREF(__pyx_v_event); + __Pyx_GIVEREF(__pyx_v_event); + PyTuple_SET_ITEM(__pyx_t_6, 2+__pyx_t_19, __pyx_v_event); + __Pyx_INCREF(__pyx_v_self->_args); + __Pyx_GIVEREF(__pyx_v_self->_args); + PyTuple_SET_ITEM(__pyx_t_6, 3+__pyx_t_19, __pyx_v_self->_args); + __Pyx_INCREF(__pyx_v_stop_info); + __Pyx_GIVEREF(__pyx_v_stop_info); + PyTuple_SET_ITEM(__pyx_t_6, 4+__pyx_t_19, __pyx_v_stop_info); + __Pyx_INCREF(__pyx_v_stop); + __Pyx_GIVEREF(__pyx_v_stop); + PyTuple_SET_ITEM(__pyx_t_6, 5+__pyx_t_19, __pyx_v_stop); + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_3, __pyx_t_6, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 791, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + } + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_XDECREF_SET(__pyx_v_result, __pyx_t_1); + __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":792 + * if plugin_manager is not None: + * result = plugin_manager.cmd_step_over(main_debugger, frame, event, self._args, stop_info, stop) + * if result: # <<<<<<<<<<<<<< + * stop, plugin_stop = result + * + */ + __pyx_t_10 = __Pyx_PyObject_IsTrue(__pyx_v_result); if (unlikely(__pyx_t_10 < 0)) __PYX_ERR(0, 792, __pyx_L142_error) + if (__pyx_t_10) { + + /* "_pydevd_bundle/pydevd_cython.pyx":793 + * result = plugin_manager.cmd_step_over(main_debugger, frame, event, self._args, stop_info, stop) + * if result: + * stop, plugin_stop = result # <<<<<<<<<<<<<< + * + * elif step_cmd == CMD_SMART_STEP_INTO: + */ + if ((likely(PyTuple_CheckExact(__pyx_v_result))) || (PyList_CheckExact(__pyx_v_result))) { + PyObject* sequence = __pyx_v_result; + #if !CYTHON_COMPILING_IN_PYPY + Py_ssize_t size = Py_SIZE(sequence); + #else + Py_ssize_t size = PySequence_Size(sequence); + #endif + if (unlikely(size != 2)) { + if (size > 2) __Pyx_RaiseTooManyValuesError(2); + else if (size >= 0) __Pyx_RaiseNeedMoreValuesError(size); + __PYX_ERR(0, 793, __pyx_L142_error) + } + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + if (likely(PyTuple_CheckExact(sequence))) { + __pyx_t_1 = PyTuple_GET_ITEM(sequence, 0); + __pyx_t_3 = PyTuple_GET_ITEM(sequence, 1); + } else { + __pyx_t_1 = PyList_GET_ITEM(sequence, 0); + __pyx_t_3 = PyList_GET_ITEM(sequence, 1); + } + __Pyx_INCREF(__pyx_t_1); + __Pyx_INCREF(__pyx_t_3); + #else + __pyx_t_1 = PySequence_ITEM(sequence, 0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 793, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_3 = PySequence_ITEM(sequence, 1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 793, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_3); + #endif + } else { + Py_ssize_t index = -1; + __pyx_t_6 = PyObject_GetIter(__pyx_v_result); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 793, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_11 = Py_TYPE(__pyx_t_6)->tp_iternext; + index = 0; __pyx_t_1 = __pyx_t_11(__pyx_t_6); if (unlikely(!__pyx_t_1)) goto __pyx_L166_unpacking_failed; + __Pyx_GOTREF(__pyx_t_1); + index = 1; __pyx_t_3 = __pyx_t_11(__pyx_t_6); if (unlikely(!__pyx_t_3)) goto __pyx_L166_unpacking_failed; + __Pyx_GOTREF(__pyx_t_3); + if (__Pyx_IternextUnpackEndCheck(__pyx_t_11(__pyx_t_6), 2) < 0) __PYX_ERR(0, 793, __pyx_L142_error) + __pyx_t_11 = NULL; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + goto __pyx_L167_unpacking_done; + __pyx_L166_unpacking_failed:; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __pyx_t_11 = NULL; + if (__Pyx_IterFinish() == 0) __Pyx_RaiseNeedMoreValuesError(index); + __PYX_ERR(0, 793, __pyx_L142_error) + __pyx_L167_unpacking_done:; + } + __Pyx_DECREF_SET(__pyx_v_stop, __pyx_t_1); + __pyx_t_1 = 0; + __Pyx_DECREF_SET(__pyx_v_plugin_stop, __pyx_t_3); + __pyx_t_3 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":792 + * if plugin_manager is not None: + * result = plugin_manager.cmd_step_over(main_debugger, frame, event, self._args, stop_info, stop) + * if result: # <<<<<<<<<<<<<< + * stop, plugin_stop = result + * + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":790 + * stop = False + * + * if plugin_manager is not None: # <<<<<<<<<<<<<< + * result = plugin_manager.cmd_step_over(main_debugger, frame, event, self._args, stop_info, stop) + * if result: + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":783 + * stop = is_line + * + * elif step_cmd == CMD_STEP_OVER: # <<<<<<<<<<<<<< + * stop = stop_frame is frame and (is_line or is_return) + * + */ + goto __pyx_L151; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":795 + * stop, plugin_stop = result + * + * elif step_cmd == CMD_SMART_STEP_INTO: # <<<<<<<<<<<<<< + * stop = False + * if info.pydev_smart_step_stop is frame: + */ + __pyx_t_3 = __Pyx_PyInt_From_int(__pyx_v_step_cmd); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 795, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_CMD_SMART_STEP_INTO); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 795, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_6 = PyObject_RichCompare(__pyx_t_3, __pyx_t_1, Py_EQ); __Pyx_XGOTREF(__pyx_t_6); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 795, __pyx_L142_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_10 = __Pyx_PyObject_IsTrue(__pyx_t_6); if (unlikely(__pyx_t_10 < 0)) __PYX_ERR(0, 795, __pyx_L142_error) + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + if (__pyx_t_10) { + + /* "_pydevd_bundle/pydevd_cython.pyx":796 + * + * elif step_cmd == CMD_SMART_STEP_INTO: + * stop = False # <<<<<<<<<<<<<< + * if info.pydev_smart_step_stop is frame: + * info.pydev_func_name = '.invalid.' # Must match the type in cython + */ + __Pyx_INCREF(Py_False); + __Pyx_DECREF_SET(__pyx_v_stop, Py_False); + + /* "_pydevd_bundle/pydevd_cython.pyx":797 + * elif step_cmd == CMD_SMART_STEP_INTO: + * stop = False + * if info.pydev_smart_step_stop is frame: # <<<<<<<<<<<<<< + * info.pydev_func_name = '.invalid.' # Must match the type in cython + * info.pydev_smart_step_stop = None + */ + __pyx_t_10 = (__pyx_v_info->pydev_smart_step_stop == __pyx_v_frame); + __pyx_t_12 = (__pyx_t_10 != 0); + if (__pyx_t_12) { + + /* "_pydevd_bundle/pydevd_cython.pyx":798 + * stop = False + * if info.pydev_smart_step_stop is frame: + * info.pydev_func_name = '.invalid.' # Must match the type in cython # <<<<<<<<<<<<<< + * info.pydev_smart_step_stop = None + * + */ + __Pyx_INCREF(__pyx_kp_s_invalid); + __Pyx_GIVEREF(__pyx_kp_s_invalid); + __Pyx_GOTREF(__pyx_v_info->pydev_func_name); + __Pyx_DECREF(__pyx_v_info->pydev_func_name); + __pyx_v_info->pydev_func_name = __pyx_kp_s_invalid; + + /* "_pydevd_bundle/pydevd_cython.pyx":799 + * if info.pydev_smart_step_stop is frame: + * info.pydev_func_name = '.invalid.' # Must match the type in cython + * info.pydev_smart_step_stop = None # <<<<<<<<<<<<<< + * + * if is_line or is_exception_event: + */ + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_info->pydev_smart_step_stop); + __Pyx_DECREF(__pyx_v_info->pydev_smart_step_stop); + __pyx_v_info->pydev_smart_step_stop = Py_None; + + /* "_pydevd_bundle/pydevd_cython.pyx":797 + * elif step_cmd == CMD_SMART_STEP_INTO: + * stop = False + * if info.pydev_smart_step_stop is frame: # <<<<<<<<<<<<<< + * info.pydev_func_name = '.invalid.' # Must match the type in cython + * info.pydev_smart_step_stop = None + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":801 + * info.pydev_smart_step_stop = None + * + * if is_line or is_exception_event: # <<<<<<<<<<<<<< + * curr_func_name = frame.f_code.co_name + * + */ + __pyx_t_10 = (__pyx_v_is_line != 0); + if (!__pyx_t_10) { + } else { + __pyx_t_12 = __pyx_t_10; + goto __pyx_L170_bool_binop_done; + } + __pyx_t_10 = (__pyx_v_is_exception_event != 0); + __pyx_t_12 = __pyx_t_10; + __pyx_L170_bool_binop_done:; + if (__pyx_t_12) { + + /* "_pydevd_bundle/pydevd_cython.pyx":802 + * + * if is_line or is_exception_event: + * curr_func_name = frame.f_code.co_name # <<<<<<<<<<<<<< + * + * #global context is set with an empty name + */ + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_code); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 802, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_6, __pyx_n_s_co_name); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 802, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + if (!(likely(PyString_CheckExact(__pyx_t_1))||((__pyx_t_1) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "str", Py_TYPE(__pyx_t_1)->tp_name), 0))) __PYX_ERR(0, 802, __pyx_L142_error) + __Pyx_XDECREF_SET(__pyx_v_curr_func_name, ((PyObject*)__pyx_t_1)); + __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":805 + * + * #global context is set with an empty name + * if curr_func_name in ('?', '') or curr_func_name is None: # <<<<<<<<<<<<<< + * curr_func_name = '' + * + */ + __Pyx_INCREF(__pyx_v_curr_func_name); + __pyx_t_13 = __pyx_v_curr_func_name; + __pyx_t_9 = (__Pyx_PyString_Equals(__pyx_t_13, __pyx_kp_s__5, Py_EQ)); if (unlikely(__pyx_t_9 < 0)) __PYX_ERR(0, 805, __pyx_L142_error) + __pyx_t_27 = (__pyx_t_9 != 0); + if (!__pyx_t_27) { + } else { + __pyx_t_10 = __pyx_t_27; + goto __pyx_L175_bool_binop_done; + } + __pyx_t_27 = (__Pyx_PyString_Equals(__pyx_t_13, __pyx_kp_s_module, Py_EQ)); if (unlikely(__pyx_t_27 < 0)) __PYX_ERR(0, 805, __pyx_L142_error) + __pyx_t_9 = (__pyx_t_27 != 0); + __pyx_t_10 = __pyx_t_9; + __pyx_L175_bool_binop_done:; + __Pyx_DECREF(__pyx_t_13); __pyx_t_13 = 0; + __pyx_t_9 = (__pyx_t_10 != 0); + if (!__pyx_t_9) { + } else { + __pyx_t_12 = __pyx_t_9; + goto __pyx_L173_bool_binop_done; + } + __pyx_t_9 = (__pyx_v_curr_func_name == ((PyObject*)Py_None)); + __pyx_t_10 = (__pyx_t_9 != 0); + __pyx_t_12 = __pyx_t_10; + __pyx_L173_bool_binop_done:; + if (__pyx_t_12) { + + /* "_pydevd_bundle/pydevd_cython.pyx":806 + * #global context is set with an empty name + * if curr_func_name in ('?', '') or curr_func_name is None: + * curr_func_name = '' # <<<<<<<<<<<<<< + * + * if curr_func_name == info.pydev_func_name: + */ + __Pyx_INCREF(__pyx_kp_s_); + __Pyx_DECREF_SET(__pyx_v_curr_func_name, __pyx_kp_s_); + + /* "_pydevd_bundle/pydevd_cython.pyx":805 + * + * #global context is set with an empty name + * if curr_func_name in ('?', '') or curr_func_name is None: # <<<<<<<<<<<<<< + * curr_func_name = '' + * + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":808 + * curr_func_name = '' + * + * if curr_func_name == info.pydev_func_name: # <<<<<<<<<<<<<< + * stop = True + * + */ + __pyx_t_12 = (__Pyx_PyString_Equals(__pyx_v_curr_func_name, __pyx_v_info->pydev_func_name, Py_EQ)); if (unlikely(__pyx_t_12 < 0)) __PYX_ERR(0, 808, __pyx_L142_error) + __pyx_t_10 = (__pyx_t_12 != 0); + if (__pyx_t_10) { + + /* "_pydevd_bundle/pydevd_cython.pyx":809 + * + * if curr_func_name == info.pydev_func_name: + * stop = True # <<<<<<<<<<<<<< + * + * elif step_cmd == CMD_STEP_RETURN: + */ + __Pyx_INCREF(Py_True); + __Pyx_DECREF_SET(__pyx_v_stop, Py_True); + + /* "_pydevd_bundle/pydevd_cython.pyx":808 + * curr_func_name = '' + * + * if curr_func_name == info.pydev_func_name: # <<<<<<<<<<<<<< + * stop = True + * + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":801 + * info.pydev_smart_step_stop = None + * + * if is_line or is_exception_event: # <<<<<<<<<<<<<< + * curr_func_name = frame.f_code.co_name + * + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":795 + * stop, plugin_stop = result + * + * elif step_cmd == CMD_SMART_STEP_INTO: # <<<<<<<<<<<<<< + * stop = False + * if info.pydev_smart_step_stop is frame: + */ + goto __pyx_L151; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":811 + * stop = True + * + * elif step_cmd == CMD_STEP_RETURN: # <<<<<<<<<<<<<< + * stop = is_return and stop_frame is frame + * + */ + __pyx_t_1 = __Pyx_PyInt_From_int(__pyx_v_step_cmd); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 811, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_6 = __Pyx_GetModuleGlobalName(__pyx_n_s_CMD_STEP_RETURN); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 811, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_3 = PyObject_RichCompare(__pyx_t_1, __pyx_t_6, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 811, __pyx_L142_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __pyx_t_10 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_10 < 0)) __PYX_ERR(0, 811, __pyx_L142_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (__pyx_t_10) { + + /* "_pydevd_bundle/pydevd_cython.pyx":812 + * + * elif step_cmd == CMD_STEP_RETURN: + * stop = is_return and stop_frame is frame # <<<<<<<<<<<<<< + * + * elif step_cmd == CMD_RUN_TO_LINE or step_cmd == CMD_SET_NEXT_STATEMENT: + */ + if (__pyx_v_is_return) { + } else { + __pyx_t_6 = __Pyx_PyBool_FromLong(__pyx_v_is_return); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 812, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_3 = __pyx_t_6; + __pyx_t_6 = 0; + goto __pyx_L178_bool_binop_done; + } + __pyx_t_10 = (__pyx_v_stop_frame == __pyx_v_frame); + __pyx_t_6 = __Pyx_PyBool_FromLong(__pyx_t_10); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 812, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_3 = __pyx_t_6; + __pyx_t_6 = 0; + __pyx_L178_bool_binop_done:; + __Pyx_DECREF_SET(__pyx_v_stop, __pyx_t_3); + __pyx_t_3 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":811 + * stop = True + * + * elif step_cmd == CMD_STEP_RETURN: # <<<<<<<<<<<<<< + * stop = is_return and stop_frame is frame + * + */ + goto __pyx_L151; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":814 + * stop = is_return and stop_frame is frame + * + * elif step_cmd == CMD_RUN_TO_LINE or step_cmd == CMD_SET_NEXT_STATEMENT: # <<<<<<<<<<<<<< + * stop = False + * + */ + __pyx_t_3 = __Pyx_PyInt_From_int(__pyx_v_step_cmd); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 814, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_6 = __Pyx_GetModuleGlobalName(__pyx_n_s_CMD_RUN_TO_LINE); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 814, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_1 = PyObject_RichCompare(__pyx_t_3, __pyx_t_6, Py_EQ); __Pyx_XGOTREF(__pyx_t_1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 814, __pyx_L142_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __pyx_t_12 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_12 < 0)) __PYX_ERR(0, 814, __pyx_L142_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + if (!__pyx_t_12) { + } else { + __pyx_t_10 = __pyx_t_12; + goto __pyx_L180_bool_binop_done; + } + __pyx_t_1 = __Pyx_PyInt_From_int(__pyx_v_step_cmd); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 814, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_6 = __Pyx_GetModuleGlobalName(__pyx_n_s_CMD_SET_NEXT_STATEMENT); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 814, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_3 = PyObject_RichCompare(__pyx_t_1, __pyx_t_6, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 814, __pyx_L142_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __pyx_t_12 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_12 < 0)) __PYX_ERR(0, 814, __pyx_L142_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_10 = __pyx_t_12; + __pyx_L180_bool_binop_done:; + if (__pyx_t_10) { + + /* "_pydevd_bundle/pydevd_cython.pyx":815 + * + * elif step_cmd == CMD_RUN_TO_LINE or step_cmd == CMD_SET_NEXT_STATEMENT: + * stop = False # <<<<<<<<<<<<<< + * + * if is_line or is_exception_event: + */ + __Pyx_INCREF(Py_False); + __Pyx_DECREF_SET(__pyx_v_stop, Py_False); + + /* "_pydevd_bundle/pydevd_cython.pyx":817 + * stop = False + * + * if is_line or is_exception_event: # <<<<<<<<<<<<<< + * #Yes, we can only act on line events (weird hum?) + * #Note: This code is duplicated at pydevd.py + */ + __pyx_t_12 = (__pyx_v_is_line != 0); + if (!__pyx_t_12) { + } else { + __pyx_t_10 = __pyx_t_12; + goto __pyx_L183_bool_binop_done; + } + __pyx_t_12 = (__pyx_v_is_exception_event != 0); + __pyx_t_10 = __pyx_t_12; + __pyx_L183_bool_binop_done:; + if (__pyx_t_10) { + + /* "_pydevd_bundle/pydevd_cython.pyx":821 + * #Note: This code is duplicated at pydevd.py + * #Acting on exception events after debugger breaks with exception + * curr_func_name = frame.f_code.co_name # <<<<<<<<<<<<<< + * + * #global context is set with an empty name + */ + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_code); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 821, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_co_name); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 821, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (!(likely(PyString_CheckExact(__pyx_t_6))||((__pyx_t_6) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "str", Py_TYPE(__pyx_t_6)->tp_name), 0))) __PYX_ERR(0, 821, __pyx_L142_error) + __Pyx_XDECREF_SET(__pyx_v_curr_func_name, ((PyObject*)__pyx_t_6)); + __pyx_t_6 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":824 + * + * #global context is set with an empty name + * if curr_func_name in ('?', ''): # <<<<<<<<<<<<<< + * curr_func_name = '' + * + */ + __Pyx_INCREF(__pyx_v_curr_func_name); + __pyx_t_13 = __pyx_v_curr_func_name; + __pyx_t_12 = (__Pyx_PyString_Equals(__pyx_t_13, __pyx_kp_s__5, Py_EQ)); if (unlikely(__pyx_t_12 < 0)) __PYX_ERR(0, 824, __pyx_L142_error) + __pyx_t_9 = (__pyx_t_12 != 0); + if (!__pyx_t_9) { + } else { + __pyx_t_10 = __pyx_t_9; + goto __pyx_L186_bool_binop_done; + } + __pyx_t_9 = (__Pyx_PyString_Equals(__pyx_t_13, __pyx_kp_s_module, Py_EQ)); if (unlikely(__pyx_t_9 < 0)) __PYX_ERR(0, 824, __pyx_L142_error) + __pyx_t_12 = (__pyx_t_9 != 0); + __pyx_t_10 = __pyx_t_12; + __pyx_L186_bool_binop_done:; + __Pyx_DECREF(__pyx_t_13); __pyx_t_13 = 0; + __pyx_t_12 = (__pyx_t_10 != 0); + if (__pyx_t_12) { + + /* "_pydevd_bundle/pydevd_cython.pyx":825 + * #global context is set with an empty name + * if curr_func_name in ('?', ''): + * curr_func_name = '' # <<<<<<<<<<<<<< + * + * if curr_func_name == info.pydev_func_name: + */ + __Pyx_INCREF(__pyx_kp_s_); + __Pyx_DECREF_SET(__pyx_v_curr_func_name, __pyx_kp_s_); + + /* "_pydevd_bundle/pydevd_cython.pyx":824 + * + * #global context is set with an empty name + * if curr_func_name in ('?', ''): # <<<<<<<<<<<<<< + * curr_func_name = '' + * + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":827 + * curr_func_name = '' + * + * if curr_func_name == info.pydev_func_name: # <<<<<<<<<<<<<< + * line = info.pydev_next_line + * if frame.f_lineno == line: + */ + __pyx_t_12 = (__Pyx_PyString_Equals(__pyx_v_curr_func_name, __pyx_v_info->pydev_func_name, Py_EQ)); if (unlikely(__pyx_t_12 < 0)) __PYX_ERR(0, 827, __pyx_L142_error) + __pyx_t_10 = (__pyx_t_12 != 0); + if (__pyx_t_10) { + + /* "_pydevd_bundle/pydevd_cython.pyx":828 + * + * if curr_func_name == info.pydev_func_name: + * line = info.pydev_next_line # <<<<<<<<<<<<<< + * if frame.f_lineno == line: + * stop = True + */ + __pyx_t_19 = __pyx_v_info->pydev_next_line; + __pyx_v_line = __pyx_t_19; + + /* "_pydevd_bundle/pydevd_cython.pyx":829 + * if curr_func_name == info.pydev_func_name: + * line = info.pydev_next_line + * if frame.f_lineno == line: # <<<<<<<<<<<<<< + * stop = True + * else: + */ + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_lineno); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 829, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_3 = __Pyx_PyInt_From_int(__pyx_v_line); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 829, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_1 = PyObject_RichCompare(__pyx_t_6, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 829, __pyx_L142_error) + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_10 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_10 < 0)) __PYX_ERR(0, 829, __pyx_L142_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + if (__pyx_t_10) { + + /* "_pydevd_bundle/pydevd_cython.pyx":830 + * line = info.pydev_next_line + * if frame.f_lineno == line: + * stop = True # <<<<<<<<<<<<<< + * else: + * if frame.f_trace is None: + */ + __Pyx_INCREF(Py_True); + __Pyx_DECREF_SET(__pyx_v_stop, Py_True); + + /* "_pydevd_bundle/pydevd_cython.pyx":829 + * if curr_func_name == info.pydev_func_name: + * line = info.pydev_next_line + * if frame.f_lineno == line: # <<<<<<<<<<<<<< + * stop = True + * else: + */ + goto __pyx_L189; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":832 + * stop = True + * else: + * if frame.f_trace is None: # <<<<<<<<<<<<<< + * frame.f_trace = self.trace_dispatch + * frame.f_lineno = line + */ + /*else*/ { + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_trace); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 832, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_10 = (__pyx_t_1 == Py_None); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_12 = (__pyx_t_10 != 0); + if (__pyx_t_12) { + + /* "_pydevd_bundle/pydevd_cython.pyx":833 + * else: + * if frame.f_trace is None: + * frame.f_trace = self.trace_dispatch # <<<<<<<<<<<<<< + * frame.f_lineno = line + * frame.f_trace = None + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_trace_dispatch); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 833, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_1); + if (__Pyx_PyObject_SetAttrStr(__pyx_v_frame, __pyx_n_s_f_trace, __pyx_t_1) < 0) __PYX_ERR(0, 833, __pyx_L142_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":832 + * stop = True + * else: + * if frame.f_trace is None: # <<<<<<<<<<<<<< + * frame.f_trace = self.trace_dispatch + * frame.f_lineno = line + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":834 + * if frame.f_trace is None: + * frame.f_trace = self.trace_dispatch + * frame.f_lineno = line # <<<<<<<<<<<<<< + * frame.f_trace = None + * stop = True + */ + __pyx_t_1 = __Pyx_PyInt_From_int(__pyx_v_line); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 834, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_1); + if (__Pyx_PyObject_SetAttrStr(__pyx_v_frame, __pyx_n_s_f_lineno, __pyx_t_1) < 0) __PYX_ERR(0, 834, __pyx_L142_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":835 + * frame.f_trace = self.trace_dispatch + * frame.f_lineno = line + * frame.f_trace = None # <<<<<<<<<<<<<< + * stop = True + * + */ + if (__Pyx_PyObject_SetAttrStr(__pyx_v_frame, __pyx_n_s_f_trace, Py_None) < 0) __PYX_ERR(0, 835, __pyx_L142_error) + + /* "_pydevd_bundle/pydevd_cython.pyx":836 + * frame.f_lineno = line + * frame.f_trace = None + * stop = True # <<<<<<<<<<<<<< + * + * else: + */ + __Pyx_INCREF(Py_True); + __Pyx_DECREF_SET(__pyx_v_stop, Py_True); + } + __pyx_L189:; + + /* "_pydevd_bundle/pydevd_cython.pyx":827 + * curr_func_name = '' + * + * if curr_func_name == info.pydev_func_name: # <<<<<<<<<<<<<< + * line = info.pydev_next_line + * if frame.f_lineno == line: + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":817 + * stop = False + * + * if is_line or is_exception_event: # <<<<<<<<<<<<<< + * #Yes, we can only act on line events (weird hum?) + * #Note: This code is duplicated at pydevd.py + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":814 + * stop = is_return and stop_frame is frame + * + * elif step_cmd == CMD_RUN_TO_LINE or step_cmd == CMD_SET_NEXT_STATEMENT: # <<<<<<<<<<<<<< + * stop = False + * + */ + goto __pyx_L151; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":839 + * + * else: + * stop = False # <<<<<<<<<<<<<< + * + * if stop and step_cmd != -1 and is_return and IS_PY3K and hasattr(frame, "f_back"): + */ + /*else*/ { + __Pyx_INCREF(Py_False); + __Pyx_DECREF_SET(__pyx_v_stop, Py_False); + } + __pyx_L151:; + + /* "_pydevd_bundle/pydevd_cython.pyx":841 + * stop = False + * + * if stop and step_cmd != -1 and is_return and IS_PY3K and hasattr(frame, "f_back"): # <<<<<<<<<<<<<< + * f_code = getattr(frame.f_back, 'f_code', None) + * if f_code is not None: + */ + __pyx_t_10 = __Pyx_PyObject_IsTrue(__pyx_v_stop); if (unlikely(__pyx_t_10 < 0)) __PYX_ERR(0, 841, __pyx_L142_error) + if (__pyx_t_10) { + } else { + __pyx_t_12 = __pyx_t_10; + goto __pyx_L192_bool_binop_done; + } + __pyx_t_10 = ((__pyx_v_step_cmd != -1L) != 0); + if (__pyx_t_10) { + } else { + __pyx_t_12 = __pyx_t_10; + goto __pyx_L192_bool_binop_done; + } + __pyx_t_10 = (__pyx_v_is_return != 0); + if (__pyx_t_10) { + } else { + __pyx_t_12 = __pyx_t_10; + goto __pyx_L192_bool_binop_done; + } + __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_IS_PY3K); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 841, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_10 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_10 < 0)) __PYX_ERR(0, 841, __pyx_L142_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + if (__pyx_t_10) { + } else { + __pyx_t_12 = __pyx_t_10; + goto __pyx_L192_bool_binop_done; + } + __pyx_t_10 = __Pyx_HasAttr(__pyx_v_frame, __pyx_n_s_f_back); if (unlikely(__pyx_t_10 == -1)) __PYX_ERR(0, 841, __pyx_L142_error) + __pyx_t_9 = (__pyx_t_10 != 0); + __pyx_t_12 = __pyx_t_9; + __pyx_L192_bool_binop_done:; + if (__pyx_t_12) { + + /* "_pydevd_bundle/pydevd_cython.pyx":842 + * + * if stop and step_cmd != -1 and is_return and IS_PY3K and hasattr(frame, "f_back"): + * f_code = getattr(frame.f_back, 'f_code', None) # <<<<<<<<<<<<<< + * if f_code is not None: + * back_filename = os.path.basename(f_code.co_filename) + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_back); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 842, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_3 = __Pyx_GetAttr3(__pyx_t_1, __pyx_n_s_f_code, Py_None); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 842, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_v_f_code = __pyx_t_3; + __pyx_t_3 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":843 + * if stop and step_cmd != -1 and is_return and IS_PY3K and hasattr(frame, "f_back"): + * f_code = getattr(frame.f_back, 'f_code', None) + * if f_code is not None: # <<<<<<<<<<<<<< + * back_filename = os.path.basename(f_code.co_filename) + * file_type = get_file_type(back_filename) + */ + __pyx_t_12 = (__pyx_v_f_code != Py_None); + __pyx_t_9 = (__pyx_t_12 != 0); + if (__pyx_t_9) { + + /* "_pydevd_bundle/pydevd_cython.pyx":844 + * f_code = getattr(frame.f_back, 'f_code', None) + * if f_code is not None: + * back_filename = os.path.basename(f_code.co_filename) # <<<<<<<<<<<<<< + * file_type = get_file_type(back_filename) + * if file_type == PYDEV_FILE: + */ + __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_os); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 844, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_path); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 844, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_6, __pyx_n_s_basename); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 844, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_v_f_code, __pyx_n_s_co_filename); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 844, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_7 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_1))) { + __pyx_t_7 = PyMethod_GET_SELF(__pyx_t_1); + if (likely(__pyx_t_7)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1); + __Pyx_INCREF(__pyx_t_7); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_1, function); + } + } + if (!__pyx_t_7) { + __pyx_t_3 = __Pyx_PyObject_CallOneArg(__pyx_t_1, __pyx_t_6); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 844, __pyx_L142_error) + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_GOTREF(__pyx_t_3); + } else { + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_1)) { + PyObject *__pyx_temp[2] = {__pyx_t_7, __pyx_t_6}; + __pyx_t_3 = __Pyx_PyFunction_FastCall(__pyx_t_1, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 844, __pyx_L142_error) + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_1)) { + PyObject *__pyx_temp[2] = {__pyx_t_7, __pyx_t_6}; + __pyx_t_3 = __Pyx_PyCFunction_FastCall(__pyx_t_1, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 844, __pyx_L142_error) + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + } else + #endif + { + __pyx_t_8 = PyTuple_New(1+1); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 844, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_GIVEREF(__pyx_t_7); PyTuple_SET_ITEM(__pyx_t_8, 0, __pyx_t_7); __pyx_t_7 = NULL; + __Pyx_GIVEREF(__pyx_t_6); + PyTuple_SET_ITEM(__pyx_t_8, 0+1, __pyx_t_6); + __pyx_t_6 = 0; + __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_t_8, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 844, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + } + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF_SET(__pyx_v_back_filename, __pyx_t_3); + __pyx_t_3 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":845 + * if f_code is not None: + * back_filename = os.path.basename(f_code.co_filename) + * file_type = get_file_type(back_filename) # <<<<<<<<<<<<<< + * if file_type == PYDEV_FILE: + * stop = False + */ + __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_get_file_type); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 845, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_8 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_1))) { + __pyx_t_8 = PyMethod_GET_SELF(__pyx_t_1); + if (likely(__pyx_t_8)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1); + __Pyx_INCREF(__pyx_t_8); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_1, function); + } + } + if (!__pyx_t_8) { + __pyx_t_3 = __Pyx_PyObject_CallOneArg(__pyx_t_1, __pyx_v_back_filename); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 845, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_3); + } else { + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_1)) { + PyObject *__pyx_temp[2] = {__pyx_t_8, __pyx_v_back_filename}; + __pyx_t_3 = __Pyx_PyFunction_FastCall(__pyx_t_1, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 845, __pyx_L142_error) + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_GOTREF(__pyx_t_3); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_1)) { + PyObject *__pyx_temp[2] = {__pyx_t_8, __pyx_v_back_filename}; + __pyx_t_3 = __Pyx_PyCFunction_FastCall(__pyx_t_1, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 845, __pyx_L142_error) + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_GOTREF(__pyx_t_3); + } else + #endif + { + __pyx_t_6 = PyTuple_New(1+1); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 845, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_GIVEREF(__pyx_t_8); PyTuple_SET_ITEM(__pyx_t_6, 0, __pyx_t_8); __pyx_t_8 = NULL; + __Pyx_INCREF(__pyx_v_back_filename); + __Pyx_GIVEREF(__pyx_v_back_filename); + PyTuple_SET_ITEM(__pyx_t_6, 0+1, __pyx_v_back_filename); + __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_t_6, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 845, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + } + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_v_file_type = __pyx_t_3; + __pyx_t_3 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":846 + * back_filename = os.path.basename(f_code.co_filename) + * file_type = get_file_type(back_filename) + * if file_type == PYDEV_FILE: # <<<<<<<<<<<<<< + * stop = False + * + */ + __pyx_t_3 = __Pyx_GetModuleGlobalName(__pyx_n_s_PYDEV_FILE); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 846, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_1 = PyObject_RichCompare(__pyx_v_file_type, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 846, __pyx_L142_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_9 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_9 < 0)) __PYX_ERR(0, 846, __pyx_L142_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + if (__pyx_t_9) { + + /* "_pydevd_bundle/pydevd_cython.pyx":847 + * file_type = get_file_type(back_filename) + * if file_type == PYDEV_FILE: + * stop = False # <<<<<<<<<<<<<< + * + * if plugin_stop: + */ + __Pyx_INCREF(Py_False); + __Pyx_DECREF_SET(__pyx_v_stop, Py_False); + + /* "_pydevd_bundle/pydevd_cython.pyx":846 + * back_filename = os.path.basename(f_code.co_filename) + * file_type = get_file_type(back_filename) + * if file_type == PYDEV_FILE: # <<<<<<<<<<<<<< + * stop = False + * + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":843 + * if stop and step_cmd != -1 and is_return and IS_PY3K and hasattr(frame, "f_back"): + * f_code = getattr(frame.f_back, 'f_code', None) + * if f_code is not None: # <<<<<<<<<<<<<< + * back_filename = os.path.basename(f_code.co_filename) + * file_type = get_file_type(back_filename) + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":841 + * stop = False + * + * if stop and step_cmd != -1 and is_return and IS_PY3K and hasattr(frame, "f_back"): # <<<<<<<<<<<<<< + * f_code = getattr(frame.f_back, 'f_code', None) + * if f_code is not None: + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":849 + * stop = False + * + * if plugin_stop: # <<<<<<<<<<<<<< + * stopped_on_plugin = plugin_manager.stop(main_debugger, frame, event, self._args, stop_info, arg, step_cmd) + * elif stop: + */ + __pyx_t_9 = __Pyx_PyObject_IsTrue(__pyx_v_plugin_stop); if (unlikely(__pyx_t_9 < 0)) __PYX_ERR(0, 849, __pyx_L142_error) + if (__pyx_t_9) { + + /* "_pydevd_bundle/pydevd_cython.pyx":850 + * + * if plugin_stop: + * stopped_on_plugin = plugin_manager.stop(main_debugger, frame, event, self._args, stop_info, arg, step_cmd) # <<<<<<<<<<<<<< + * elif stop: + * if is_line: + */ + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_plugin_manager, __pyx_n_s_stop); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 850, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_6 = __Pyx_PyInt_From_int(__pyx_v_step_cmd); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 850, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_8 = NULL; + __pyx_t_19 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_3))) { + __pyx_t_8 = PyMethod_GET_SELF(__pyx_t_3); + if (likely(__pyx_t_8)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); + __Pyx_INCREF(__pyx_t_8); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_3, function); + __pyx_t_19 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_3)) { + PyObject *__pyx_temp[8] = {__pyx_t_8, __pyx_v_main_debugger, __pyx_v_frame, __pyx_v_event, __pyx_v_self->_args, __pyx_v_stop_info, __pyx_v_arg, __pyx_t_6}; + __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_3, __pyx_temp+1-__pyx_t_19, 7+__pyx_t_19); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 850, __pyx_L142_error) + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_3)) { + PyObject *__pyx_temp[8] = {__pyx_t_8, __pyx_v_main_debugger, __pyx_v_frame, __pyx_v_event, __pyx_v_self->_args, __pyx_v_stop_info, __pyx_v_arg, __pyx_t_6}; + __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_3, __pyx_temp+1-__pyx_t_19, 7+__pyx_t_19); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 850, __pyx_L142_error) + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + } else + #endif + { + __pyx_t_7 = PyTuple_New(7+__pyx_t_19); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 850, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_7); + if (__pyx_t_8) { + __Pyx_GIVEREF(__pyx_t_8); PyTuple_SET_ITEM(__pyx_t_7, 0, __pyx_t_8); __pyx_t_8 = NULL; + } + __Pyx_INCREF(__pyx_v_main_debugger); + __Pyx_GIVEREF(__pyx_v_main_debugger); + PyTuple_SET_ITEM(__pyx_t_7, 0+__pyx_t_19, __pyx_v_main_debugger); + __Pyx_INCREF(__pyx_v_frame); + __Pyx_GIVEREF(__pyx_v_frame); + PyTuple_SET_ITEM(__pyx_t_7, 1+__pyx_t_19, __pyx_v_frame); + __Pyx_INCREF(__pyx_v_event); + __Pyx_GIVEREF(__pyx_v_event); + PyTuple_SET_ITEM(__pyx_t_7, 2+__pyx_t_19, __pyx_v_event); + __Pyx_INCREF(__pyx_v_self->_args); + __Pyx_GIVEREF(__pyx_v_self->_args); + PyTuple_SET_ITEM(__pyx_t_7, 3+__pyx_t_19, __pyx_v_self->_args); + __Pyx_INCREF(__pyx_v_stop_info); + __Pyx_GIVEREF(__pyx_v_stop_info); + PyTuple_SET_ITEM(__pyx_t_7, 4+__pyx_t_19, __pyx_v_stop_info); + __Pyx_INCREF(__pyx_v_arg); + __Pyx_GIVEREF(__pyx_v_arg); + PyTuple_SET_ITEM(__pyx_t_7, 5+__pyx_t_19, __pyx_v_arg); + __Pyx_GIVEREF(__pyx_t_6); + PyTuple_SET_ITEM(__pyx_t_7, 6+__pyx_t_19, __pyx_t_6); + __pyx_t_6 = 0; + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_3, __pyx_t_7, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 850, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + } + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_v_stopped_on_plugin = __pyx_t_1; + __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":849 + * stop = False + * + * if plugin_stop: # <<<<<<<<<<<<<< + * stopped_on_plugin = plugin_manager.stop(main_debugger, frame, event, self._args, stop_info, arg, step_cmd) + * elif stop: + */ + goto __pyx_L199; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":851 + * if plugin_stop: + * stopped_on_plugin = plugin_manager.stop(main_debugger, frame, event, self._args, stop_info, arg, step_cmd) + * elif stop: # <<<<<<<<<<<<<< + * if is_line: + * self.set_suspend(thread, step_cmd) + */ + __pyx_t_9 = __Pyx_PyObject_IsTrue(__pyx_v_stop); if (unlikely(__pyx_t_9 < 0)) __PYX_ERR(0, 851, __pyx_L142_error) + if (__pyx_t_9) { + + /* "_pydevd_bundle/pydevd_cython.pyx":852 + * stopped_on_plugin = plugin_manager.stop(main_debugger, frame, event, self._args, stop_info, arg, step_cmd) + * elif stop: + * if is_line: # <<<<<<<<<<<<<< + * self.set_suspend(thread, step_cmd) + * self.do_wait_suspend(thread, frame, event, arg) + */ + __pyx_t_9 = (__pyx_v_is_line != 0); + if (__pyx_t_9) { + + /* "_pydevd_bundle/pydevd_cython.pyx":853 + * elif stop: + * if is_line: + * self.set_suspend(thread, step_cmd) # <<<<<<<<<<<<<< + * self.do_wait_suspend(thread, frame, event, arg) + * else: #return event + */ + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_set_suspend); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 853, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_7 = __Pyx_PyInt_From_int(__pyx_v_step_cmd); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 853, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_6 = NULL; + __pyx_t_19 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_3))) { + __pyx_t_6 = PyMethod_GET_SELF(__pyx_t_3); + if (likely(__pyx_t_6)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); + __Pyx_INCREF(__pyx_t_6); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_3, function); + __pyx_t_19 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_3)) { + PyObject *__pyx_temp[3] = {__pyx_t_6, __pyx_v_thread, __pyx_t_7}; + __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_3, __pyx_temp+1-__pyx_t_19, 2+__pyx_t_19); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 853, __pyx_L142_error) + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_3)) { + PyObject *__pyx_temp[3] = {__pyx_t_6, __pyx_v_thread, __pyx_t_7}; + __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_3, __pyx_temp+1-__pyx_t_19, 2+__pyx_t_19); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 853, __pyx_L142_error) + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + } else + #endif + { + __pyx_t_8 = PyTuple_New(2+__pyx_t_19); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 853, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_8); + if (__pyx_t_6) { + __Pyx_GIVEREF(__pyx_t_6); PyTuple_SET_ITEM(__pyx_t_8, 0, __pyx_t_6); __pyx_t_6 = NULL; + } + __Pyx_INCREF(__pyx_v_thread); + __Pyx_GIVEREF(__pyx_v_thread); + PyTuple_SET_ITEM(__pyx_t_8, 0+__pyx_t_19, __pyx_v_thread); + __Pyx_GIVEREF(__pyx_t_7); + PyTuple_SET_ITEM(__pyx_t_8, 1+__pyx_t_19, __pyx_t_7); + __pyx_t_7 = 0; + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_3, __pyx_t_8, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 853, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + } + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":854 + * if is_line: + * self.set_suspend(thread, step_cmd) + * self.do_wait_suspend(thread, frame, event, arg) # <<<<<<<<<<<<<< + * else: #return event + * back = frame.f_back + */ + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_do_wait_suspend); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 854, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_8 = NULL; + __pyx_t_19 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_3))) { + __pyx_t_8 = PyMethod_GET_SELF(__pyx_t_3); + if (likely(__pyx_t_8)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); + __Pyx_INCREF(__pyx_t_8); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_3, function); + __pyx_t_19 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_3)) { + PyObject *__pyx_temp[5] = {__pyx_t_8, __pyx_v_thread, __pyx_v_frame, __pyx_v_event, __pyx_v_arg}; + __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_3, __pyx_temp+1-__pyx_t_19, 4+__pyx_t_19); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 854, __pyx_L142_error) + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_GOTREF(__pyx_t_1); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_3)) { + PyObject *__pyx_temp[5] = {__pyx_t_8, __pyx_v_thread, __pyx_v_frame, __pyx_v_event, __pyx_v_arg}; + __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_3, __pyx_temp+1-__pyx_t_19, 4+__pyx_t_19); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 854, __pyx_L142_error) + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_GOTREF(__pyx_t_1); + } else + #endif + { + __pyx_t_7 = PyTuple_New(4+__pyx_t_19); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 854, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_7); + if (__pyx_t_8) { + __Pyx_GIVEREF(__pyx_t_8); PyTuple_SET_ITEM(__pyx_t_7, 0, __pyx_t_8); __pyx_t_8 = NULL; + } + __Pyx_INCREF(__pyx_v_thread); + __Pyx_GIVEREF(__pyx_v_thread); + PyTuple_SET_ITEM(__pyx_t_7, 0+__pyx_t_19, __pyx_v_thread); + __Pyx_INCREF(__pyx_v_frame); + __Pyx_GIVEREF(__pyx_v_frame); + PyTuple_SET_ITEM(__pyx_t_7, 1+__pyx_t_19, __pyx_v_frame); + __Pyx_INCREF(__pyx_v_event); + __Pyx_GIVEREF(__pyx_v_event); + PyTuple_SET_ITEM(__pyx_t_7, 2+__pyx_t_19, __pyx_v_event); + __Pyx_INCREF(__pyx_v_arg); + __Pyx_GIVEREF(__pyx_v_arg); + PyTuple_SET_ITEM(__pyx_t_7, 3+__pyx_t_19, __pyx_v_arg); + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_3, __pyx_t_7, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 854, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + } + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":852 + * stopped_on_plugin = plugin_manager.stop(main_debugger, frame, event, self._args, stop_info, arg, step_cmd) + * elif stop: + * if is_line: # <<<<<<<<<<<<<< + * self.set_suspend(thread, step_cmd) + * self.do_wait_suspend(thread, frame, event, arg) + */ + goto __pyx_L200; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":856 + * self.do_wait_suspend(thread, frame, event, arg) + * else: #return event + * back = frame.f_back # <<<<<<<<<<<<<< + * if back is not None: + * #When we get to the pydevd run function, the debugging has actually finished for the main thread + */ + /*else*/ { + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_back); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 856, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_XDECREF_SET(__pyx_v_back, __pyx_t_1); + __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":857 + * else: #return event + * back = frame.f_back + * if back is not None: # <<<<<<<<<<<<<< + * #When we get to the pydevd run function, the debugging has actually finished for the main thread + * #(note that it can still go on for other threads, but for this one, we just make it finish) + */ + __pyx_t_9 = (__pyx_v_back != Py_None); + __pyx_t_12 = (__pyx_t_9 != 0); + if (__pyx_t_12) { + + /* "_pydevd_bundle/pydevd_cython.pyx":861 + * #(note that it can still go on for other threads, but for this one, we just make it finish) + * #So, just setting it to None should be OK + * _, back_filename, base = get_abs_path_real_path_and_base_from_frame(back) # <<<<<<<<<<<<<< + * if base == DEBUG_START[0] and back.f_code.co_name == DEBUG_START[1]: + * back = None + */ + __pyx_t_3 = __Pyx_GetModuleGlobalName(__pyx_n_s_get_abs_path_real_path_and_base); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 861, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_7 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_3))) { + __pyx_t_7 = PyMethod_GET_SELF(__pyx_t_3); + if (likely(__pyx_t_7)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); + __Pyx_INCREF(__pyx_t_7); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_3, function); + } + } + if (!__pyx_t_7) { + __pyx_t_1 = __Pyx_PyObject_CallOneArg(__pyx_t_3, __pyx_v_back); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 861, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_1); + } else { + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_3)) { + PyObject *__pyx_temp[2] = {__pyx_t_7, __pyx_v_back}; + __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_3, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 861, __pyx_L142_error) + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_GOTREF(__pyx_t_1); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_3)) { + PyObject *__pyx_temp[2] = {__pyx_t_7, __pyx_v_back}; + __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_3, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 861, __pyx_L142_error) + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_GOTREF(__pyx_t_1); + } else + #endif + { + __pyx_t_8 = PyTuple_New(1+1); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 861, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_GIVEREF(__pyx_t_7); PyTuple_SET_ITEM(__pyx_t_8, 0, __pyx_t_7); __pyx_t_7 = NULL; + __Pyx_INCREF(__pyx_v_back); + __Pyx_GIVEREF(__pyx_v_back); + PyTuple_SET_ITEM(__pyx_t_8, 0+1, __pyx_v_back); + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_3, __pyx_t_8, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 861, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + } + } + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if ((likely(PyTuple_CheckExact(__pyx_t_1))) || (PyList_CheckExact(__pyx_t_1))) { + PyObject* sequence = __pyx_t_1; + #if !CYTHON_COMPILING_IN_PYPY + Py_ssize_t size = Py_SIZE(sequence); + #else + Py_ssize_t size = PySequence_Size(sequence); + #endif + if (unlikely(size != 3)) { + if (size > 3) __Pyx_RaiseTooManyValuesError(3); + else if (size >= 0) __Pyx_RaiseNeedMoreValuesError(size); + __PYX_ERR(0, 861, __pyx_L142_error) + } + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + if (likely(PyTuple_CheckExact(sequence))) { + __pyx_t_3 = PyTuple_GET_ITEM(sequence, 0); + __pyx_t_8 = PyTuple_GET_ITEM(sequence, 1); + __pyx_t_7 = PyTuple_GET_ITEM(sequence, 2); + } else { + __pyx_t_3 = PyList_GET_ITEM(sequence, 0); + __pyx_t_8 = PyList_GET_ITEM(sequence, 1); + __pyx_t_7 = PyList_GET_ITEM(sequence, 2); + } + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(__pyx_t_8); + __Pyx_INCREF(__pyx_t_7); + #else + __pyx_t_3 = PySequence_ITEM(sequence, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 861, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_8 = PySequence_ITEM(sequence, 1); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 861, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_7 = PySequence_ITEM(sequence, 2); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 861, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_7); + #endif + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + } else { + Py_ssize_t index = -1; + __pyx_t_6 = PyObject_GetIter(__pyx_t_1); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 861, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_11 = Py_TYPE(__pyx_t_6)->tp_iternext; + index = 0; __pyx_t_3 = __pyx_t_11(__pyx_t_6); if (unlikely(!__pyx_t_3)) goto __pyx_L202_unpacking_failed; + __Pyx_GOTREF(__pyx_t_3); + index = 1; __pyx_t_8 = __pyx_t_11(__pyx_t_6); if (unlikely(!__pyx_t_8)) goto __pyx_L202_unpacking_failed; + __Pyx_GOTREF(__pyx_t_8); + index = 2; __pyx_t_7 = __pyx_t_11(__pyx_t_6); if (unlikely(!__pyx_t_7)) goto __pyx_L202_unpacking_failed; + __Pyx_GOTREF(__pyx_t_7); + if (__Pyx_IternextUnpackEndCheck(__pyx_t_11(__pyx_t_6), 3) < 0) __PYX_ERR(0, 861, __pyx_L142_error) + __pyx_t_11 = NULL; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + goto __pyx_L203_unpacking_done; + __pyx_L202_unpacking_failed:; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __pyx_t_11 = NULL; + if (__Pyx_IterFinish() == 0) __Pyx_RaiseNeedMoreValuesError(index); + __PYX_ERR(0, 861, __pyx_L142_error) + __pyx_L203_unpacking_done:; + } + __Pyx_XDECREF_SET(__pyx_v__, __pyx_t_3); + __pyx_t_3 = 0; + __Pyx_XDECREF_SET(__pyx_v_back_filename, __pyx_t_8); + __pyx_t_8 = 0; + __Pyx_XDECREF_SET(__pyx_v_base, __pyx_t_7); + __pyx_t_7 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":862 + * #So, just setting it to None should be OK + * _, back_filename, base = get_abs_path_real_path_and_base_from_frame(back) + * if base == DEBUG_START[0] and back.f_code.co_name == DEBUG_START[1]: # <<<<<<<<<<<<<< + * back = None + * + */ + __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_DEBUG_START); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 862, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_7 = __Pyx_GetItemInt(__pyx_t_1, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 862, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = PyObject_RichCompare(__pyx_v_base, __pyx_t_7, Py_EQ); __Pyx_XGOTREF(__pyx_t_1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 862, __pyx_L142_error) + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __pyx_t_9 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_9 < 0)) __PYX_ERR(0, 862, __pyx_L142_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + if (__pyx_t_9) { + } else { + __pyx_t_12 = __pyx_t_9; + goto __pyx_L205_bool_binop_done; + } + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_back, __pyx_n_s_f_code); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 862, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_co_name); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 862, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_DEBUG_START); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 862, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_8 = __Pyx_GetItemInt(__pyx_t_1, 1, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 862, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = PyObject_RichCompare(__pyx_t_7, __pyx_t_8, Py_EQ); __Pyx_XGOTREF(__pyx_t_1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 862, __pyx_L142_error) + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __pyx_t_9 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_9 < 0)) __PYX_ERR(0, 862, __pyx_L142_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_12 = __pyx_t_9; + __pyx_L205_bool_binop_done:; + if (__pyx_t_12) { + + /* "_pydevd_bundle/pydevd_cython.pyx":863 + * _, back_filename, base = get_abs_path_real_path_and_base_from_frame(back) + * if base == DEBUG_START[0] and back.f_code.co_name == DEBUG_START[1]: + * back = None # <<<<<<<<<<<<<< + * + * elif base == TRACE_PROPERTY: + */ + __Pyx_INCREF(Py_None); + __Pyx_DECREF_SET(__pyx_v_back, Py_None); + + /* "_pydevd_bundle/pydevd_cython.pyx":862 + * #So, just setting it to None should be OK + * _, back_filename, base = get_abs_path_real_path_and_base_from_frame(back) + * if base == DEBUG_START[0] and back.f_code.co_name == DEBUG_START[1]: # <<<<<<<<<<<<<< + * back = None + * + */ + goto __pyx_L204; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":865 + * back = None + * + * elif base == TRACE_PROPERTY: # <<<<<<<<<<<<<< + * # We dont want to trace the return event of pydevd_traceproperty (custom property for debugging) + * #if we're in a return, we want it to appear to the user in the previous frame! + */ + __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_TRACE_PROPERTY); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 865, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_8 = PyObject_RichCompare(__pyx_v_base, __pyx_t_1, Py_EQ); __Pyx_XGOTREF(__pyx_t_8); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 865, __pyx_L142_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_12 = __Pyx_PyObject_IsTrue(__pyx_t_8); if (unlikely(__pyx_t_12 < 0)) __PYX_ERR(0, 865, __pyx_L142_error) + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + if (__pyx_t_12) { + + /* "_pydevd_bundle/pydevd_cython.pyx":868 + * # We dont want to trace the return event of pydevd_traceproperty (custom property for debugging) + * #if we're in a return, we want it to appear to the user in the previous frame! + * return None # <<<<<<<<<<<<<< + * + * elif pydevd_dont_trace.should_trace_hook is not None: + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(Py_None); + __pyx_r = Py_None; + goto __pyx_L146_try_return; + + /* "_pydevd_bundle/pydevd_cython.pyx":865 + * back = None + * + * elif base == TRACE_PROPERTY: # <<<<<<<<<<<<<< + * # We dont want to trace the return event of pydevd_traceproperty (custom property for debugging) + * #if we're in a return, we want it to appear to the user in the previous frame! + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":870 + * return None + * + * elif pydevd_dont_trace.should_trace_hook is not None: # <<<<<<<<<<<<<< + * if not pydevd_dont_trace.should_trace_hook(back, back_filename): + * # In this case, we'll have to skip the previous one because it shouldn't be traced. + */ + __pyx_t_8 = __Pyx_GetModuleGlobalName(__pyx_n_s_pydevd_dont_trace); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 870, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_8, __pyx_n_s_should_trace_hook); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 870, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __pyx_t_12 = (__pyx_t_1 != Py_None); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_9 = (__pyx_t_12 != 0); + if (__pyx_t_9) { + + /* "_pydevd_bundle/pydevd_cython.pyx":871 + * + * elif pydevd_dont_trace.should_trace_hook is not None: + * if not pydevd_dont_trace.should_trace_hook(back, back_filename): # <<<<<<<<<<<<<< + * # In this case, we'll have to skip the previous one because it shouldn't be traced. + * # Also, we have to reset the tracing, because if the parent's parent (or some + */ + __pyx_t_8 = __Pyx_GetModuleGlobalName(__pyx_n_s_pydevd_dont_trace); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 871, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_7 = __Pyx_PyObject_GetAttrStr(__pyx_t_8, __pyx_n_s_should_trace_hook); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 871, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __pyx_t_8 = NULL; + __pyx_t_19 = 0; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_7))) { + __pyx_t_8 = PyMethod_GET_SELF(__pyx_t_7); + if (likely(__pyx_t_8)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_7); + __Pyx_INCREF(__pyx_t_8); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_7, function); + __pyx_t_19 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_7)) { + PyObject *__pyx_temp[3] = {__pyx_t_8, __pyx_v_back, __pyx_v_back_filename}; + __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_7, __pyx_temp+1-__pyx_t_19, 2+__pyx_t_19); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 871, __pyx_L142_error) + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_GOTREF(__pyx_t_1); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_7)) { + PyObject *__pyx_temp[3] = {__pyx_t_8, __pyx_v_back, __pyx_v_back_filename}; + __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_7, __pyx_temp+1-__pyx_t_19, 2+__pyx_t_19); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 871, __pyx_L142_error) + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_GOTREF(__pyx_t_1); + } else + #endif + { + __pyx_t_3 = PyTuple_New(2+__pyx_t_19); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 871, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_3); + if (__pyx_t_8) { + __Pyx_GIVEREF(__pyx_t_8); PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_t_8); __pyx_t_8 = NULL; + } + __Pyx_INCREF(__pyx_v_back); + __Pyx_GIVEREF(__pyx_v_back); + PyTuple_SET_ITEM(__pyx_t_3, 0+__pyx_t_19, __pyx_v_back); + __Pyx_INCREF(__pyx_v_back_filename); + __Pyx_GIVEREF(__pyx_v_back_filename); + PyTuple_SET_ITEM(__pyx_t_3, 1+__pyx_t_19, __pyx_v_back_filename); + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_7, __pyx_t_3, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 871, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + } + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __pyx_t_9 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_9 < 0)) __PYX_ERR(0, 871, __pyx_L142_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_12 = ((!__pyx_t_9) != 0); + if (__pyx_t_12) { + + /* "_pydevd_bundle/pydevd_cython.pyx":877 + * # we should anymore (so, a step in/over/return may not stop anywhere if no parent is traced). + * # Related test: _debugger_case17a.py + * main_debugger.set_trace_for_frame_and_parents(back, overwrite_prev_trace=True) # <<<<<<<<<<<<<< + * return None + * + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_set_trace_for_frame_and_parents); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 877, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_7 = PyTuple_New(1); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 877, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_INCREF(__pyx_v_back); + __Pyx_GIVEREF(__pyx_v_back); + PyTuple_SET_ITEM(__pyx_t_7, 0, __pyx_v_back); + __pyx_t_3 = PyDict_New(); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 877, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_3); + if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_overwrite_prev_trace, Py_True) < 0) __PYX_ERR(0, 877, __pyx_L142_error) + __pyx_t_8 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_t_7, __pyx_t_3); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 877, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":878 + * # Related test: _debugger_case17a.py + * main_debugger.set_trace_for_frame_and_parents(back, overwrite_prev_trace=True) + * return None # <<<<<<<<<<<<<< + * + * if back is not None: + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(Py_None); + __pyx_r = Py_None; + goto __pyx_L146_try_return; + + /* "_pydevd_bundle/pydevd_cython.pyx":871 + * + * elif pydevd_dont_trace.should_trace_hook is not None: + * if not pydevd_dont_trace.should_trace_hook(back, back_filename): # <<<<<<<<<<<<<< + * # In this case, we'll have to skip the previous one because it shouldn't be traced. + * # Also, we have to reset the tracing, because if the parent's parent (or some + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":870 + * return None + * + * elif pydevd_dont_trace.should_trace_hook is not None: # <<<<<<<<<<<<<< + * if not pydevd_dont_trace.should_trace_hook(back, back_filename): + * # In this case, we'll have to skip the previous one because it shouldn't be traced. + */ + } + __pyx_L204:; + + /* "_pydevd_bundle/pydevd_cython.pyx":857 + * else: #return event + * back = frame.f_back + * if back is not None: # <<<<<<<<<<<<<< + * #When we get to the pydevd run function, the debugging has actually finished for the main thread + * #(note that it can still go on for other threads, but for this one, we just make it finish) + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":880 + * return None + * + * if back is not None: # <<<<<<<<<<<<<< + * #if we're in a return, we want it to appear to the user in the previous frame! + * self.set_suspend(thread, step_cmd) + */ + __pyx_t_12 = (__pyx_v_back != Py_None); + __pyx_t_9 = (__pyx_t_12 != 0); + if (__pyx_t_9) { + + /* "_pydevd_bundle/pydevd_cython.pyx":882 + * if back is not None: + * #if we're in a return, we want it to appear to the user in the previous frame! + * self.set_suspend(thread, step_cmd) # <<<<<<<<<<<<<< + * self.do_wait_suspend(thread, back, event, arg) + * else: + */ + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_set_suspend); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 882, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_7 = __Pyx_PyInt_From_int(__pyx_v_step_cmd); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 882, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_7); + __pyx_t_1 = NULL; + __pyx_t_19 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_3))) { + __pyx_t_1 = PyMethod_GET_SELF(__pyx_t_3); + if (likely(__pyx_t_1)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); + __Pyx_INCREF(__pyx_t_1); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_3, function); + __pyx_t_19 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_3)) { + PyObject *__pyx_temp[3] = {__pyx_t_1, __pyx_v_thread, __pyx_t_7}; + __pyx_t_8 = __Pyx_PyFunction_FastCall(__pyx_t_3, __pyx_temp+1-__pyx_t_19, 2+__pyx_t_19); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 882, __pyx_L142_error) + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_GOTREF(__pyx_t_8); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_3)) { + PyObject *__pyx_temp[3] = {__pyx_t_1, __pyx_v_thread, __pyx_t_7}; + __pyx_t_8 = __Pyx_PyCFunction_FastCall(__pyx_t_3, __pyx_temp+1-__pyx_t_19, 2+__pyx_t_19); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 882, __pyx_L142_error) + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_GOTREF(__pyx_t_8); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + } else + #endif + { + __pyx_t_6 = PyTuple_New(2+__pyx_t_19); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 882, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_6); + if (__pyx_t_1) { + __Pyx_GIVEREF(__pyx_t_1); PyTuple_SET_ITEM(__pyx_t_6, 0, __pyx_t_1); __pyx_t_1 = NULL; + } + __Pyx_INCREF(__pyx_v_thread); + __Pyx_GIVEREF(__pyx_v_thread); + PyTuple_SET_ITEM(__pyx_t_6, 0+__pyx_t_19, __pyx_v_thread); + __Pyx_GIVEREF(__pyx_t_7); + PyTuple_SET_ITEM(__pyx_t_6, 1+__pyx_t_19, __pyx_t_7); + __pyx_t_7 = 0; + __pyx_t_8 = __Pyx_PyObject_Call(__pyx_t_3, __pyx_t_6, NULL); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 882, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + } + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":883 + * #if we're in a return, we want it to appear to the user in the previous frame! + * self.set_suspend(thread, step_cmd) + * self.do_wait_suspend(thread, back, event, arg) # <<<<<<<<<<<<<< + * else: + * #in jython we may not have a back frame + */ + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_do_wait_suspend); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 883, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_6 = NULL; + __pyx_t_19 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_3))) { + __pyx_t_6 = PyMethod_GET_SELF(__pyx_t_3); + if (likely(__pyx_t_6)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); + __Pyx_INCREF(__pyx_t_6); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_3, function); + __pyx_t_19 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_3)) { + PyObject *__pyx_temp[5] = {__pyx_t_6, __pyx_v_thread, __pyx_v_back, __pyx_v_event, __pyx_v_arg}; + __pyx_t_8 = __Pyx_PyFunction_FastCall(__pyx_t_3, __pyx_temp+1-__pyx_t_19, 4+__pyx_t_19); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 883, __pyx_L142_error) + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_GOTREF(__pyx_t_8); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_3)) { + PyObject *__pyx_temp[5] = {__pyx_t_6, __pyx_v_thread, __pyx_v_back, __pyx_v_event, __pyx_v_arg}; + __pyx_t_8 = __Pyx_PyCFunction_FastCall(__pyx_t_3, __pyx_temp+1-__pyx_t_19, 4+__pyx_t_19); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 883, __pyx_L142_error) + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_GOTREF(__pyx_t_8); + } else + #endif + { + __pyx_t_7 = PyTuple_New(4+__pyx_t_19); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 883, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_7); + if (__pyx_t_6) { + __Pyx_GIVEREF(__pyx_t_6); PyTuple_SET_ITEM(__pyx_t_7, 0, __pyx_t_6); __pyx_t_6 = NULL; + } + __Pyx_INCREF(__pyx_v_thread); + __Pyx_GIVEREF(__pyx_v_thread); + PyTuple_SET_ITEM(__pyx_t_7, 0+__pyx_t_19, __pyx_v_thread); + __Pyx_INCREF(__pyx_v_back); + __Pyx_GIVEREF(__pyx_v_back); + PyTuple_SET_ITEM(__pyx_t_7, 1+__pyx_t_19, __pyx_v_back); + __Pyx_INCREF(__pyx_v_event); + __Pyx_GIVEREF(__pyx_v_event); + PyTuple_SET_ITEM(__pyx_t_7, 2+__pyx_t_19, __pyx_v_event); + __Pyx_INCREF(__pyx_v_arg); + __Pyx_GIVEREF(__pyx_v_arg); + PyTuple_SET_ITEM(__pyx_t_7, 3+__pyx_t_19, __pyx_v_arg); + __pyx_t_8 = __Pyx_PyObject_Call(__pyx_t_3, __pyx_t_7, NULL); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 883, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + } + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":880 + * return None + * + * if back is not None: # <<<<<<<<<<<<<< + * #if we're in a return, we want it to appear to the user in the previous frame! + * self.set_suspend(thread, step_cmd) + */ + goto __pyx_L208; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":886 + * else: + * #in jython we may not have a back frame + * info.pydev_step_stop = None # <<<<<<<<<<<<<< + * info.pydev_step_cmd = -1 + * info.pydev_state = STATE_RUN + */ + /*else*/ { + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_info->pydev_step_stop); + __Pyx_DECREF(__pyx_v_info->pydev_step_stop); + __pyx_v_info->pydev_step_stop = Py_None; + + /* "_pydevd_bundle/pydevd_cython.pyx":887 + * #in jython we may not have a back frame + * info.pydev_step_stop = None + * info.pydev_step_cmd = -1 # <<<<<<<<<<<<<< + * info.pydev_state = STATE_RUN + * + */ + __pyx_v_info->pydev_step_cmd = -1; + + /* "_pydevd_bundle/pydevd_cython.pyx":888 + * info.pydev_step_stop = None + * info.pydev_step_cmd = -1 + * info.pydev_state = STATE_RUN # <<<<<<<<<<<<<< + * + * except KeyboardInterrupt: + */ + __pyx_t_8 = __Pyx_GetModuleGlobalName(__pyx_n_s_STATE_RUN); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 888, __pyx_L142_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_19 = __Pyx_PyInt_As_int(__pyx_t_8); if (unlikely((__pyx_t_19 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 888, __pyx_L142_error) + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __pyx_v_info->pydev_state = __pyx_t_19; + } + __pyx_L208:; + } + __pyx_L200:; + + /* "_pydevd_bundle/pydevd_cython.pyx":851 + * if plugin_stop: + * stopped_on_plugin = plugin_manager.stop(main_debugger, frame, event, self._args, stop_info, arg, step_cmd) + * elif stop: # <<<<<<<<<<<<<< + * if is_line: + * self.set_suspend(thread, step_cmd) + */ + } + __pyx_L199:; + + /* "_pydevd_bundle/pydevd_cython.pyx":753 + * + * #step handling. We stop when we hit the right frame + * try: # <<<<<<<<<<<<<< + * should_skip = 0 + * if pydevd_dont_trace.should_trace_hook is not None: + */ + } + __Pyx_XDECREF(__pyx_t_18); __pyx_t_18 = 0; + __Pyx_XDECREF(__pyx_t_17); __pyx_t_17 = 0; + __Pyx_XDECREF(__pyx_t_16); __pyx_t_16 = 0; + goto __pyx_L147_try_end; + __pyx_L142_error:; + __Pyx_PyThreadState_assign + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_XDECREF(__pyx_t_13); __pyx_t_13 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":890 + * info.pydev_state = STATE_RUN + * + * except KeyboardInterrupt: # <<<<<<<<<<<<<< + * raise + * except: + */ + __pyx_t_19 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_KeyboardInterrupt); + if (__pyx_t_19) { + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBFrame.trace_dispatch", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_8, &__pyx_t_3, &__pyx_t_7) < 0) __PYX_ERR(0, 890, __pyx_L144_except_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_GOTREF(__pyx_t_3); + __Pyx_GOTREF(__pyx_t_7); + + /* "_pydevd_bundle/pydevd_cython.pyx":891 + * + * except KeyboardInterrupt: + * raise # <<<<<<<<<<<<<< + * except: + * try: + */ + __Pyx_GIVEREF(__pyx_t_8); + __Pyx_GIVEREF(__pyx_t_3); + __Pyx_XGIVEREF(__pyx_t_7); + __Pyx_ErrRestoreWithState(__pyx_t_8, __pyx_t_3, __pyx_t_7); + __pyx_t_8 = 0; __pyx_t_3 = 0; __pyx_t_7 = 0; + __PYX_ERR(0, 891, __pyx_L144_except_error) + } + + /* "_pydevd_bundle/pydevd_cython.pyx":892 + * except KeyboardInterrupt: + * raise + * except: # <<<<<<<<<<<<<< + * try: + * traceback.print_exc() + */ + /*except:*/ { + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBFrame.trace_dispatch", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_7, &__pyx_t_3, &__pyx_t_8) < 0) __PYX_ERR(0, 892, __pyx_L144_except_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_GOTREF(__pyx_t_3); + __Pyx_GOTREF(__pyx_t_8); + + /* "_pydevd_bundle/pydevd_cython.pyx":893 + * raise + * except: + * try: # <<<<<<<<<<<<<< + * traceback.print_exc() + * info.pydev_step_cmd = -1 + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_26, &__pyx_t_25, &__pyx_t_24); + __Pyx_XGOTREF(__pyx_t_26); + __Pyx_XGOTREF(__pyx_t_25); + __Pyx_XGOTREF(__pyx_t_24); + /*try:*/ { + + /* "_pydevd_bundle/pydevd_cython.pyx":894 + * except: + * try: + * traceback.print_exc() # <<<<<<<<<<<<<< + * info.pydev_step_cmd = -1 + * except: + */ + __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_traceback); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 894, __pyx_L213_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_print_exc); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 894, __pyx_L213_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_4))) { + __pyx_t_1 = PyMethod_GET_SELF(__pyx_t_4); + if (likely(__pyx_t_1)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); + __Pyx_INCREF(__pyx_t_1); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_4, function); + } + } + if (__pyx_t_1) { + __pyx_t_6 = __Pyx_PyObject_CallOneArg(__pyx_t_4, __pyx_t_1); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 894, __pyx_L213_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + } else { + __pyx_t_6 = __Pyx_PyObject_CallNoArg(__pyx_t_4); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 894, __pyx_L213_error) + } + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":895 + * try: + * traceback.print_exc() + * info.pydev_step_cmd = -1 # <<<<<<<<<<<<<< + * except: + * return None + */ + __pyx_v_info->pydev_step_cmd = -1; + + /* "_pydevd_bundle/pydevd_cython.pyx":893 + * raise + * except: + * try: # <<<<<<<<<<<<<< + * traceback.print_exc() + * info.pydev_step_cmd = -1 + */ + } + __Pyx_XDECREF(__pyx_t_26); __pyx_t_26 = 0; + __Pyx_XDECREF(__pyx_t_25); __pyx_t_25 = 0; + __Pyx_XDECREF(__pyx_t_24); __pyx_t_24 = 0; + goto __pyx_L220_try_end; + __pyx_L213_error:; + __Pyx_PyThreadState_assign + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_XDECREF(__pyx_t_13); __pyx_t_13 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":896 + * traceback.print_exc() + * info.pydev_step_cmd = -1 + * except: # <<<<<<<<<<<<<< + * return None + * + */ + /*except:*/ { + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBFrame.trace_dispatch", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_6, &__pyx_t_4, &__pyx_t_1) < 0) __PYX_ERR(0, 896, __pyx_L215_except_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_GOTREF(__pyx_t_4); + __Pyx_GOTREF(__pyx_t_1); + + /* "_pydevd_bundle/pydevd_cython.pyx":897 + * info.pydev_step_cmd = -1 + * except: + * return None # <<<<<<<<<<<<<< + * + * #if we are quitting, let's stop the tracing + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(Py_None); + __pyx_r = Py_None; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + goto __pyx_L216_except_return; + } + __pyx_L215_except_error:; + + /* "_pydevd_bundle/pydevd_cython.pyx":893 + * raise + * except: + * try: # <<<<<<<<<<<<<< + * traceback.print_exc() + * info.pydev_step_cmd = -1 + */ + __Pyx_PyThreadState_assign + __Pyx_XGIVEREF(__pyx_t_26); + __Pyx_XGIVEREF(__pyx_t_25); + __Pyx_XGIVEREF(__pyx_t_24); + __Pyx_ExceptionReset(__pyx_t_26, __pyx_t_25, __pyx_t_24); + goto __pyx_L144_except_error; + __pyx_L216_except_return:; + __Pyx_PyThreadState_assign + __Pyx_XGIVEREF(__pyx_t_26); + __Pyx_XGIVEREF(__pyx_t_25); + __Pyx_XGIVEREF(__pyx_t_24); + __Pyx_ExceptionReset(__pyx_t_26, __pyx_t_25, __pyx_t_24); + goto __pyx_L145_except_return; + __pyx_L220_try_end:; + } + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + goto __pyx_L143_exception_handled; + } + __pyx_L144_except_error:; + + /* "_pydevd_bundle/pydevd_cython.pyx":753 + * + * #step handling. We stop when we hit the right frame + * try: # <<<<<<<<<<<<<< + * should_skip = 0 + * if pydevd_dont_trace.should_trace_hook is not None: + */ + __Pyx_PyThreadState_assign + __Pyx_XGIVEREF(__pyx_t_18); + __Pyx_XGIVEREF(__pyx_t_17); + __Pyx_XGIVEREF(__pyx_t_16); + __Pyx_ExceptionReset(__pyx_t_18, __pyx_t_17, __pyx_t_16); + goto __pyx_L4_error; + __pyx_L146_try_return:; + __Pyx_PyThreadState_assign + __Pyx_XGIVEREF(__pyx_t_18); + __Pyx_XGIVEREF(__pyx_t_17); + __Pyx_XGIVEREF(__pyx_t_16); + __Pyx_ExceptionReset(__pyx_t_18, __pyx_t_17, __pyx_t_16); + goto __pyx_L3_return; + __pyx_L145_except_return:; + __Pyx_PyThreadState_assign + __Pyx_XGIVEREF(__pyx_t_18); + __Pyx_XGIVEREF(__pyx_t_17); + __Pyx_XGIVEREF(__pyx_t_16); + __Pyx_ExceptionReset(__pyx_t_18, __pyx_t_17, __pyx_t_16); + goto __pyx_L3_return; + __pyx_L143_exception_handled:; + __Pyx_PyThreadState_assign + __Pyx_XGIVEREF(__pyx_t_18); + __Pyx_XGIVEREF(__pyx_t_17); + __Pyx_XGIVEREF(__pyx_t_16); + __Pyx_ExceptionReset(__pyx_t_18, __pyx_t_17, __pyx_t_16); + __pyx_L147_try_end:; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":900 + * + * #if we are quitting, let's stop the tracing + * retVal = None # <<<<<<<<<<<<<< + * if not main_debugger.quitting: + * retVal = self.trace_dispatch + */ + __Pyx_INCREF(Py_None); + __pyx_v_retVal = Py_None; + + /* "_pydevd_bundle/pydevd_cython.pyx":901 + * #if we are quitting, let's stop the tracing + * retVal = None + * if not main_debugger.quitting: # <<<<<<<<<<<<<< + * retVal = self.trace_dispatch + * + */ + __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_quitting); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 901, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_8); + __pyx_t_9 = __Pyx_PyObject_IsTrue(__pyx_t_8); if (unlikely(__pyx_t_9 < 0)) __PYX_ERR(0, 901, __pyx_L4_error) + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + __pyx_t_12 = ((!__pyx_t_9) != 0); + if (__pyx_t_12) { + + /* "_pydevd_bundle/pydevd_cython.pyx":902 + * retVal = None + * if not main_debugger.quitting: + * retVal = self.trace_dispatch # <<<<<<<<<<<<<< + * + * return retVal + */ + __pyx_t_8 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_self), __pyx_n_s_trace_dispatch); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 902, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_DECREF_SET(__pyx_v_retVal, __pyx_t_8); + __pyx_t_8 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":901 + * #if we are quitting, let's stop the tracing + * retVal = None + * if not main_debugger.quitting: # <<<<<<<<<<<<<< + * retVal = self.trace_dispatch + * + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":904 + * retVal = self.trace_dispatch + * + * return retVal # <<<<<<<<<<<<<< + * finally: + * info.is_tracing = False + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_retVal); + __pyx_r = __pyx_v_retVal; + goto __pyx_L3_return; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":906 + * return retVal + * finally: + * info.is_tracing = False # <<<<<<<<<<<<<< + * + * #end trace_dispatch + */ + /*finally:*/ { + /*exception exit:*/{ + __Pyx_PyThreadState_declare + __pyx_L4_error:; + __pyx_t_16 = 0; __pyx_t_17 = 0; __pyx_t_18 = 0; __pyx_t_24 = 0; __pyx_t_25 = 0; __pyx_t_26 = 0; + __Pyx_PyThreadState_assign + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_XDECREF(__pyx_t_13); __pyx_t_13 = 0; + if (PY_MAJOR_VERSION >= 3) __Pyx_ExceptionSwap(&__pyx_t_24, &__pyx_t_25, &__pyx_t_26); + if ((PY_MAJOR_VERSION < 3) || unlikely(__Pyx_GetException(&__pyx_t_16, &__pyx_t_17, &__pyx_t_18) < 0)) __Pyx_ErrFetch(&__pyx_t_16, &__pyx_t_17, &__pyx_t_18); + __Pyx_XGOTREF(__pyx_t_16); + __Pyx_XGOTREF(__pyx_t_17); + __Pyx_XGOTREF(__pyx_t_18); + __Pyx_XGOTREF(__pyx_t_24); + __Pyx_XGOTREF(__pyx_t_25); + __Pyx_XGOTREF(__pyx_t_26); + __pyx_t_19 = __pyx_lineno; __pyx_t_5 = __pyx_clineno; __pyx_t_28 = __pyx_filename; + { + __pyx_v_info->is_tracing = 0; + } + __Pyx_PyThreadState_assign + if (PY_MAJOR_VERSION >= 3) { + __Pyx_XGIVEREF(__pyx_t_24); + __Pyx_XGIVEREF(__pyx_t_25); + __Pyx_XGIVEREF(__pyx_t_26); + __Pyx_ExceptionReset(__pyx_t_24, __pyx_t_25, __pyx_t_26); + } + __Pyx_XGIVEREF(__pyx_t_16); + __Pyx_XGIVEREF(__pyx_t_17); + __Pyx_XGIVEREF(__pyx_t_18); + __Pyx_ErrRestore(__pyx_t_16, __pyx_t_17, __pyx_t_18); + __pyx_t_16 = 0; __pyx_t_17 = 0; __pyx_t_18 = 0; __pyx_t_24 = 0; __pyx_t_25 = 0; __pyx_t_26 = 0; + __pyx_lineno = __pyx_t_19; __pyx_clineno = __pyx_t_5; __pyx_filename = __pyx_t_28; + goto __pyx_L1_error; + } + __pyx_L3_return: { + __pyx_t_26 = __pyx_r; + __pyx_r = 0; + __pyx_v_info->is_tracing = 0; + __pyx_r = __pyx_t_26; + __pyx_t_26 = 0; + goto __pyx_L0; + } + } + + /* "_pydevd_bundle/pydevd_cython.pyx":498 + * + * # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + * cpdef trace_dispatch(self, frame, str event, arg): # <<<<<<<<<<<<<< + * cdef str filename; + * cdef bint is_exception_event; + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_XDECREF(__pyx_t_8); + __Pyx_XDECREF(__pyx_t_13); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBFrame.trace_dispatch", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_filename); + __Pyx_XDECREF((PyObject *)__pyx_v_info); + __Pyx_XDECREF(__pyx_v_curr_func_name); + __Pyx_XDECREF(__pyx_v_frame_skips_cache); + __Pyx_XDECREF(__pyx_v_frame_cache_key); + __Pyx_XDECREF(__pyx_v_line_cache_key); + __Pyx_XDECREF(__pyx_v_main_debugger); + __Pyx_XDECREF(__pyx_v_thread); + __Pyx_XDECREF(__pyx_v_plugin_manager); + __Pyx_XDECREF(__pyx_v_flag); + __Pyx_XDECREF(__pyx_v_need_trace_return); + __Pyx_XDECREF(__pyx_v_stop_frame); + __Pyx_XDECREF(__pyx_v_breakpoints_for_file); + __Pyx_XDECREF(__pyx_v_breakpoint); + __Pyx_XDECREF(__pyx_v_stop_info); + __Pyx_XDECREF(__pyx_v_stop); + __Pyx_XDECREF(__pyx_v_bp_type); + __Pyx_XDECREF(__pyx_v_new_frame); + __Pyx_XDECREF(__pyx_v_result); + __Pyx_XDECREF(__pyx_v_condition); + __Pyx_XDECREF(__pyx_v_back); + __Pyx_XDECREF(__pyx_v__); + __Pyx_XDECREF(__pyx_v_back_filename); + __Pyx_XDECREF(__pyx_v_base); + __Pyx_XDECREF(__pyx_v_plugin_stop); + __Pyx_XDECREF(__pyx_v_f_code); + __Pyx_XDECREF(__pyx_v_file_type); + __Pyx_XDECREF(__pyx_v_stopped_on_plugin); + __Pyx_XDECREF(__pyx_v_retVal); + __Pyx_XDECREF(__pyx_v_frame); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_21trace_dispatch(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_21trace_dispatch(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_frame = 0; + PyObject *__pyx_v_event = 0; + PyObject *__pyx_v_arg = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("trace_dispatch (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_frame,&__pyx_n_s_event,&__pyx_n_s_arg,0}; + PyObject* values[3] = {0,0,0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + CYTHON_FALLTHROUGH; + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_frame)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (likely((values[1] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_event)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("trace_dispatch", 1, 3, 3, 1); __PYX_ERR(0, 498, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 2: + if (likely((values[2] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_arg)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("trace_dispatch", 1, 3, 3, 2); __PYX_ERR(0, 498, __pyx_L3_error) + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "trace_dispatch") < 0)) __PYX_ERR(0, 498, __pyx_L3_error) + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 3) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + } + __pyx_v_frame = values[0]; + __pyx_v_event = ((PyObject*)values[1]); + __pyx_v_arg = values[2]; + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("trace_dispatch", 1, 3, 3, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 498, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBFrame.trace_dispatch", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_event), (&PyString_Type), 1, "event", 1))) __PYX_ERR(0, 498, __pyx_L1_error) + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_20trace_dispatch(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *)__pyx_v_self), __pyx_v_frame, __pyx_v_event, __pyx_v_arg); + + /* function exit code */ + goto __pyx_L0; + __pyx_L1_error:; + __pyx_r = NULL; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_20trace_dispatch(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *__pyx_v_self, PyObject *__pyx_v_frame, PyObject *__pyx_v_event, PyObject *__pyx_v_arg) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("trace_dispatch", 0); + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __pyx_f_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_trace_dispatch(__pyx_v_self, __pyx_v_frame, __pyx_v_event, __pyx_v_arg, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 498, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBFrame.trace_dispatch", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":1 + * def __reduce_cython__(self): # <<<<<<<<<<<<<< + * cdef bint use_setstate + * state = (self._args, self.should_skip) + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_23__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_23__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__reduce_cython__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_22__reduce_cython__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_22__reduce_cython__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *__pyx_v_self) { + int __pyx_v_use_setstate; + PyObject *__pyx_v_state = NULL; + PyObject *__pyx_v__dict = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + int __pyx_t_3; + int __pyx_t_4; + PyObject *__pyx_t_5 = NULL; + __Pyx_RefNannySetupContext("__reduce_cython__", 0); + + /* "(tree fragment)":3 + * def __reduce_cython__(self): + * cdef bint use_setstate + * state = (self._args, self.should_skip) # <<<<<<<<<<<<<< + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: + */ + __pyx_t_1 = __Pyx_PyInt_From_int(__pyx_v_self->should_skip); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 3, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = PyTuple_New(2); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 3, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_v_self->_args); + __Pyx_GIVEREF(__pyx_v_self->_args); + PyTuple_SET_ITEM(__pyx_t_2, 0, __pyx_v_self->_args); + __Pyx_GIVEREF(__pyx_t_1); + PyTuple_SET_ITEM(__pyx_t_2, 1, __pyx_t_1); + __pyx_t_1 = 0; + __pyx_v_state = ((PyObject*)__pyx_t_2); + __pyx_t_2 = 0; + + /* "(tree fragment)":4 + * cdef bint use_setstate + * state = (self._args, self.should_skip) + * _dict = getattr(self, '__dict__', None) # <<<<<<<<<<<<<< + * if _dict is not None: + * state += _dict, + */ + __pyx_t_2 = __Pyx_GetAttr3(((PyObject *)__pyx_v_self), __pyx_n_s_dict, Py_None); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 4, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_v__dict = __pyx_t_2; + __pyx_t_2 = 0; + + /* "(tree fragment)":5 + * state = (self._args, self.should_skip) + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: # <<<<<<<<<<<<<< + * state += _dict, + * use_setstate = True + */ + __pyx_t_3 = (__pyx_v__dict != Py_None); + __pyx_t_4 = (__pyx_t_3 != 0); + if (__pyx_t_4) { + + /* "(tree fragment)":6 + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: + * state += _dict, # <<<<<<<<<<<<<< + * use_setstate = True + * else: + */ + __pyx_t_2 = PyTuple_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_v__dict); + __Pyx_GIVEREF(__pyx_v__dict); + PyTuple_SET_ITEM(__pyx_t_2, 0, __pyx_v__dict); + __pyx_t_1 = PyNumber_InPlaceAdd(__pyx_v_state, __pyx_t_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF_SET(__pyx_v_state, ((PyObject*)__pyx_t_1)); + __pyx_t_1 = 0; + + /* "(tree fragment)":7 + * if _dict is not None: + * state += _dict, + * use_setstate = True # <<<<<<<<<<<<<< + * else: + * use_setstate = self._args is not None + */ + __pyx_v_use_setstate = 1; + + /* "(tree fragment)":5 + * state = (self._args, self.should_skip) + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: # <<<<<<<<<<<<<< + * state += _dict, + * use_setstate = True + */ + goto __pyx_L3; + } + + /* "(tree fragment)":9 + * use_setstate = True + * else: + * use_setstate = self._args is not None # <<<<<<<<<<<<<< + * if use_setstate: + * return __pyx_unpickle_PyDBFrame, (type(self), 0xfa6b183, None), state + */ + /*else*/ { + __pyx_t_4 = (__pyx_v_self->_args != ((PyObject*)Py_None)); + __pyx_v_use_setstate = __pyx_t_4; + } + __pyx_L3:; + + /* "(tree fragment)":10 + * else: + * use_setstate = self._args is not None + * if use_setstate: # <<<<<<<<<<<<<< + * return __pyx_unpickle_PyDBFrame, (type(self), 0xfa6b183, None), state + * else: + */ + __pyx_t_4 = (__pyx_v_use_setstate != 0); + if (__pyx_t_4) { + + /* "(tree fragment)":11 + * use_setstate = self._args is not None + * if use_setstate: + * return __pyx_unpickle_PyDBFrame, (type(self), 0xfa6b183, None), state # <<<<<<<<<<<<<< + * else: + * return __pyx_unpickle_PyDBFrame, (type(self), 0xfa6b183, state) + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_pyx_unpickle_PyDBFrame); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 11, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = PyTuple_New(3); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 11, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_GIVEREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + PyTuple_SET_ITEM(__pyx_t_2, 0, ((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_INCREF(__pyx_int_262582659); + __Pyx_GIVEREF(__pyx_int_262582659); + PyTuple_SET_ITEM(__pyx_t_2, 1, __pyx_int_262582659); + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + PyTuple_SET_ITEM(__pyx_t_2, 2, Py_None); + __pyx_t_5 = PyTuple_New(3); if (unlikely(!__pyx_t_5)) __PYX_ERR(1, 11, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_GIVEREF(__pyx_t_1); + PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_t_1); + __Pyx_GIVEREF(__pyx_t_2); + PyTuple_SET_ITEM(__pyx_t_5, 1, __pyx_t_2); + __Pyx_INCREF(__pyx_v_state); + __Pyx_GIVEREF(__pyx_v_state); + PyTuple_SET_ITEM(__pyx_t_5, 2, __pyx_v_state); + __pyx_t_1 = 0; + __pyx_t_2 = 0; + __pyx_r = __pyx_t_5; + __pyx_t_5 = 0; + goto __pyx_L0; + + /* "(tree fragment)":10 + * else: + * use_setstate = self._args is not None + * if use_setstate: # <<<<<<<<<<<<<< + * return __pyx_unpickle_PyDBFrame, (type(self), 0xfa6b183, None), state + * else: + */ + } + + /* "(tree fragment)":13 + * return __pyx_unpickle_PyDBFrame, (type(self), 0xfa6b183, None), state + * else: + * return __pyx_unpickle_PyDBFrame, (type(self), 0xfa6b183, state) # <<<<<<<<<<<<<< + * def __setstate_cython__(self, __pyx_state): + * __pyx_unpickle_PyDBFrame__set_state(self, __pyx_state) + */ + /*else*/ { + __Pyx_XDECREF(__pyx_r); + __pyx_t_5 = __Pyx_GetModuleGlobalName(__pyx_n_s_pyx_unpickle_PyDBFrame); if (unlikely(!__pyx_t_5)) __PYX_ERR(1, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_2 = PyTuple_New(3); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_GIVEREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + PyTuple_SET_ITEM(__pyx_t_2, 0, ((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_INCREF(__pyx_int_262582659); + __Pyx_GIVEREF(__pyx_int_262582659); + PyTuple_SET_ITEM(__pyx_t_2, 1, __pyx_int_262582659); + __Pyx_INCREF(__pyx_v_state); + __Pyx_GIVEREF(__pyx_v_state); + PyTuple_SET_ITEM(__pyx_t_2, 2, __pyx_v_state); + __pyx_t_1 = PyTuple_New(2); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_5); + PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_t_5); + __Pyx_GIVEREF(__pyx_t_2); + PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_t_2); + __pyx_t_5 = 0; + __pyx_t_2 = 0; + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + } + + /* "(tree fragment)":1 + * def __reduce_cython__(self): # <<<<<<<<<<<<<< + * cdef bint use_setstate + * state = (self._args, self.should_skip) + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBFrame.__reduce_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_state); + __Pyx_XDECREF(__pyx_v__dict); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":14 + * else: + * return __pyx_unpickle_PyDBFrame, (type(self), 0xfa6b183, state) + * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< + * __pyx_unpickle_PyDBFrame__set_state(self, __pyx_state) + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_25__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_25__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__setstate_cython__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_24__setstate_cython__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *)__pyx_v_self), ((PyObject *)__pyx_v___pyx_state)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_24__setstate_cython__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *__pyx_v_self, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("__setstate_cython__", 0); + + /* "(tree fragment)":15 + * return __pyx_unpickle_PyDBFrame, (type(self), 0xfa6b183, state) + * def __setstate_cython__(self, __pyx_state): + * __pyx_unpickle_PyDBFrame__set_state(self, __pyx_state) # <<<<<<<<<<<<<< + */ + if (!(likely(PyTuple_CheckExact(__pyx_v___pyx_state))||((__pyx_v___pyx_state) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_v___pyx_state)->tp_name), 0))) __PYX_ERR(1, 15, __pyx_L1_error) + __pyx_t_1 = __pyx_f_14_pydevd_bundle_13pydevd_cython___pyx_unpickle_PyDBFrame__set_state(__pyx_v_self, ((PyObject*)__pyx_v___pyx_state)); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "(tree fragment)":14 + * else: + * return __pyx_unpickle_PyDBFrame, (type(self), 0xfa6b183, state) + * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< + * __pyx_unpickle_PyDBFrame__set_state(self, __pyx_state) + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.PyDBFrame.__setstate_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pyx":931 + * from _pydevd_bundle.pydevd_signature import send_signature_call_trace + * except ImportError: + * def send_signature_call_trace(*args, **kwargs): # <<<<<<<<<<<<<< + * pass + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_11send_signature_call_trace(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static PyMethodDef __pyx_mdef_14_pydevd_bundle_13pydevd_cython_11send_signature_call_trace = {"send_signature_call_trace", (PyCFunction)__pyx_pw_14_pydevd_bundle_13pydevd_cython_11send_signature_call_trace, METH_VARARGS|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_11send_signature_call_trace(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + CYTHON_UNUSED PyObject *__pyx_v_args = 0; + CYTHON_UNUSED PyObject *__pyx_v_kwargs = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("send_signature_call_trace (wrapper)", 0); + if (unlikely(__pyx_kwds) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "send_signature_call_trace", 1))) return NULL; + __Pyx_INCREF(__pyx_args); + __pyx_v_args = __pyx_args; + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_10send_signature_call_trace(__pyx_self, __pyx_v_args, __pyx_v_kwargs); + + /* function exit code */ + __Pyx_XDECREF(__pyx_v_args); + __Pyx_XDECREF(__pyx_v_kwargs); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_10send_signature_call_trace(CYTHON_UNUSED PyObject *__pyx_self, CYTHON_UNUSED PyObject *__pyx_v_args, CYTHON_UNUSED PyObject *__pyx_v_kwargs) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("send_signature_call_trace", 0); + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pyx":951 + * global_cache_frame_skips = {} + * + * def trace_dispatch(py_db, frame, event, arg): # <<<<<<<<<<<<<< + * t = threadingCurrentThread() + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_13trace_dispatch(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static PyMethodDef __pyx_mdef_14_pydevd_bundle_13pydevd_cython_13trace_dispatch = {"trace_dispatch", (PyCFunction)__pyx_pw_14_pydevd_bundle_13pydevd_cython_13trace_dispatch, METH_VARARGS|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_13trace_dispatch(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_py_db = 0; + PyObject *__pyx_v_frame = 0; + PyObject *__pyx_v_event = 0; + PyObject *__pyx_v_arg = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("trace_dispatch (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_py_db,&__pyx_n_s_frame,&__pyx_n_s_event,&__pyx_n_s_arg,0}; + PyObject* values[4] = {0,0,0,0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 4: values[3] = PyTuple_GET_ITEM(__pyx_args, 3); + CYTHON_FALLTHROUGH; + case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + CYTHON_FALLTHROUGH; + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_py_db)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (likely((values[1] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_frame)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("trace_dispatch", 1, 4, 4, 1); __PYX_ERR(0, 951, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 2: + if (likely((values[2] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_event)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("trace_dispatch", 1, 4, 4, 2); __PYX_ERR(0, 951, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 3: + if (likely((values[3] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_arg)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("trace_dispatch", 1, 4, 4, 3); __PYX_ERR(0, 951, __pyx_L3_error) + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "trace_dispatch") < 0)) __PYX_ERR(0, 951, __pyx_L3_error) + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 4) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + values[3] = PyTuple_GET_ITEM(__pyx_args, 3); + } + __pyx_v_py_db = values[0]; + __pyx_v_frame = values[1]; + __pyx_v_event = values[2]; + __pyx_v_arg = values[3]; + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("trace_dispatch", 1, 4, 4, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 951, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.trace_dispatch", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_12trace_dispatch(__pyx_self, __pyx_v_py_db, __pyx_v_frame, __pyx_v_event, __pyx_v_arg); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_12trace_dispatch(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_py_db, PyObject *__pyx_v_frame, PyObject *__pyx_v_event, PyObject *__pyx_v_arg) { + PyObject *__pyx_v_t = NULL; + PyObject *__pyx_v_additional_info = NULL; + struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_ThreadTracer *__pyx_v_thread_tracer = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + int __pyx_t_4; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + PyObject *__pyx_t_7 = NULL; + int __pyx_t_8; + PyObject *__pyx_t_9 = NULL; + PyObject *__pyx_t_10 = NULL; + int __pyx_t_11; + __Pyx_RefNannySetupContext("trace_dispatch", 0); + + /* "_pydevd_bundle/pydevd_cython.pyx":952 + * + * def trace_dispatch(py_db, frame, event, arg): + * t = threadingCurrentThread() # <<<<<<<<<<<<<< + * + * if getattr(t, 'pydev_do_not_trace', None): + */ + __pyx_t_2 = __Pyx_GetModuleGlobalName(__pyx_n_s_threadingCurrentThread); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 952, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + } + } + if (__pyx_t_3) { + __pyx_t_1 = __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_t_3); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 952, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + } else { + __pyx_t_1 = __Pyx_PyObject_CallNoArg(__pyx_t_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 952, __pyx_L1_error) + } + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_v_t = __pyx_t_1; + __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":954 + * t = threadingCurrentThread() + * + * if getattr(t, 'pydev_do_not_trace', None): # <<<<<<<<<<<<<< + * return None + * + */ + __pyx_t_1 = __Pyx_GetAttr3(__pyx_v_t, __pyx_n_s_pydev_do_not_trace, Py_None); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 954, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_4 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_4 < 0)) __PYX_ERR(0, 954, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + if (__pyx_t_4) { + + /* "_pydevd_bundle/pydevd_cython.pyx":955 + * + * if getattr(t, 'pydev_do_not_trace', None): + * return None # <<<<<<<<<<<<<< + * + * try: + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(Py_None); + __pyx_r = Py_None; + goto __pyx_L0; + + /* "_pydevd_bundle/pydevd_cython.pyx":954 + * t = threadingCurrentThread() + * + * if getattr(t, 'pydev_do_not_trace', None): # <<<<<<<<<<<<<< + * return None + * + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":957 + * return None + * + * try: # <<<<<<<<<<<<<< + * additional_info = t.additional_info + * if additional_info is None: + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_5, &__pyx_t_6, &__pyx_t_7); + __Pyx_XGOTREF(__pyx_t_5); + __Pyx_XGOTREF(__pyx_t_6); + __Pyx_XGOTREF(__pyx_t_7); + /*try:*/ { + + /* "_pydevd_bundle/pydevd_cython.pyx":958 + * + * try: + * additional_info = t.additional_info # <<<<<<<<<<<<<< + * if additional_info is None: + * raise AttributeError() + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_t, __pyx_n_s_additional_info); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 958, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v_additional_info = __pyx_t_1; + __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":959 + * try: + * additional_info = t.additional_info + * if additional_info is None: # <<<<<<<<<<<<<< + * raise AttributeError() + * except: + */ + __pyx_t_4 = (__pyx_v_additional_info == Py_None); + __pyx_t_8 = (__pyx_t_4 != 0); + if (__pyx_t_8) { + + /* "_pydevd_bundle/pydevd_cython.pyx":960 + * additional_info = t.additional_info + * if additional_info is None: + * raise AttributeError() # <<<<<<<<<<<<<< + * except: + * additional_info = t.additional_info = PyDBAdditionalThreadInfo() + */ + __pyx_t_1 = __Pyx_PyObject_CallNoArg(__pyx_builtin_AttributeError); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 960, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_Raise(__pyx_t_1, 0, 0, 0); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __PYX_ERR(0, 960, __pyx_L4_error) + + /* "_pydevd_bundle/pydevd_cython.pyx":959 + * try: + * additional_info = t.additional_info + * if additional_info is None: # <<<<<<<<<<<<<< + * raise AttributeError() + * except: + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":957 + * return None + * + * try: # <<<<<<<<<<<<<< + * additional_info = t.additional_info + * if additional_info is None: + */ + } + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + goto __pyx_L9_try_end; + __pyx_L4_error:; + __Pyx_PyThreadState_assign + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":961 + * if additional_info is None: + * raise AttributeError() + * except: # <<<<<<<<<<<<<< + * additional_info = t.additional_info = PyDBAdditionalThreadInfo() + * + */ + /*except:*/ { + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.trace_dispatch", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_1, &__pyx_t_2, &__pyx_t_3) < 0) __PYX_ERR(0, 961, __pyx_L6_except_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_t_2); + __Pyx_GOTREF(__pyx_t_3); + + /* "_pydevd_bundle/pydevd_cython.pyx":962 + * raise AttributeError() + * except: + * additional_info = t.additional_info = PyDBAdditionalThreadInfo() # <<<<<<<<<<<<<< + * + * thread_tracer = ThreadTracer((py_db, t, additional_info, global_cache_skips, global_cache_frame_skips)) + */ + __pyx_t_9 = __Pyx_PyObject_Call(((PyObject *)__pyx_ptype_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo), __pyx_empty_tuple, NULL); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 962, __pyx_L6_except_error) + __Pyx_GOTREF(__pyx_t_9); + __Pyx_INCREF(__pyx_t_9); + __Pyx_XDECREF_SET(__pyx_v_additional_info, __pyx_t_9); + if (__Pyx_PyObject_SetAttrStr(__pyx_v_t, __pyx_n_s_additional_info, __pyx_t_9) < 0) __PYX_ERR(0, 962, __pyx_L6_except_error) + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + goto __pyx_L5_exception_handled; + } + __pyx_L6_except_error:; + + /* "_pydevd_bundle/pydevd_cython.pyx":957 + * return None + * + * try: # <<<<<<<<<<<<<< + * additional_info = t.additional_info + * if additional_info is None: + */ + __Pyx_PyThreadState_assign + __Pyx_XGIVEREF(__pyx_t_5); + __Pyx_XGIVEREF(__pyx_t_6); + __Pyx_XGIVEREF(__pyx_t_7); + __Pyx_ExceptionReset(__pyx_t_5, __pyx_t_6, __pyx_t_7); + goto __pyx_L1_error; + __pyx_L5_exception_handled:; + __Pyx_PyThreadState_assign + __Pyx_XGIVEREF(__pyx_t_5); + __Pyx_XGIVEREF(__pyx_t_6); + __Pyx_XGIVEREF(__pyx_t_7); + __Pyx_ExceptionReset(__pyx_t_5, __pyx_t_6, __pyx_t_7); + __pyx_L9_try_end:; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":964 + * additional_info = t.additional_info = PyDBAdditionalThreadInfo() + * + * thread_tracer = ThreadTracer((py_db, t, additional_info, global_cache_skips, global_cache_frame_skips)) # <<<<<<<<<<<<<< + * # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + * t._tracer = thread_tracer # Hack for cython to keep it alive while the thread is alive (just the method in the SetTrace is not enough). + */ + __pyx_t_3 = __Pyx_GetModuleGlobalName(__pyx_n_s_global_cache_skips); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 964, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_2 = __Pyx_GetModuleGlobalName(__pyx_n_s_global_cache_frame_skips); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 964, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_1 = PyTuple_New(5); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 964, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_v_py_db); + __Pyx_GIVEREF(__pyx_v_py_db); + PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_v_py_db); + __Pyx_INCREF(__pyx_v_t); + __Pyx_GIVEREF(__pyx_v_t); + PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_v_t); + __Pyx_INCREF(__pyx_v_additional_info); + __Pyx_GIVEREF(__pyx_v_additional_info); + PyTuple_SET_ITEM(__pyx_t_1, 2, __pyx_v_additional_info); + __Pyx_GIVEREF(__pyx_t_3); + PyTuple_SET_ITEM(__pyx_t_1, 3, __pyx_t_3); + __Pyx_GIVEREF(__pyx_t_2); + PyTuple_SET_ITEM(__pyx_t_1, 4, __pyx_t_2); + __pyx_t_3 = 0; + __pyx_t_2 = 0; + __pyx_t_2 = PyTuple_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 964, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_GIVEREF(__pyx_t_1); + PyTuple_SET_ITEM(__pyx_t_2, 0, __pyx_t_1); + __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_PyObject_Call(((PyObject *)__pyx_ptype_14_pydevd_bundle_13pydevd_cython_ThreadTracer), __pyx_t_2, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 964, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_v_thread_tracer = ((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_ThreadTracer *)__pyx_t_1); + __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":966 + * thread_tracer = ThreadTracer((py_db, t, additional_info, global_cache_skips, global_cache_frame_skips)) + * # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + * t._tracer = thread_tracer # Hack for cython to keep it alive while the thread is alive (just the method in the SetTrace is not enough). # <<<<<<<<<<<<<< + * # ELSE + * # ENDIF + */ + if (__Pyx_PyObject_SetAttrStr(__pyx_v_t, __pyx_n_s_tracer, ((PyObject *)__pyx_v_thread_tracer)) < 0) __PYX_ERR(0, 966, __pyx_L1_error) + + /* "_pydevd_bundle/pydevd_cython.pyx":969 + * # ELSE + * # ENDIF + * SetTrace(thread_tracer.__call__) # <<<<<<<<<<<<<< + * return thread_tracer.__call__(frame, event, arg) + * + */ + __pyx_t_2 = __Pyx_GetModuleGlobalName(__pyx_n_s_SetTrace); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 969, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_thread_tracer), __pyx_n_s_call_2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 969, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_9 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_9 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_9)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_9); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + } + } + if (!__pyx_t_9) { + __pyx_t_1 = __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_t_3); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 969, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_GOTREF(__pyx_t_1); + } else { + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_2)) { + PyObject *__pyx_temp[2] = {__pyx_t_9, __pyx_t_3}; + __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_2, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 969, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_2)) { + PyObject *__pyx_temp[2] = {__pyx_t_9, __pyx_t_3}; + __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_2, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 969, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + } else + #endif + { + __pyx_t_10 = PyTuple_New(1+1); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 969, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_10); + __Pyx_GIVEREF(__pyx_t_9); PyTuple_SET_ITEM(__pyx_t_10, 0, __pyx_t_9); __pyx_t_9 = NULL; + __Pyx_GIVEREF(__pyx_t_3); + PyTuple_SET_ITEM(__pyx_t_10, 0+1, __pyx_t_3); + __pyx_t_3 = 0; + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_2, __pyx_t_10, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 969, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; + } + } + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":970 + * # ENDIF + * SetTrace(thread_tracer.__call__) + * return thread_tracer.__call__(frame, event, arg) # <<<<<<<<<<<<<< + * + * # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_thread_tracer), __pyx_n_s_call_2); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 970, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_10 = NULL; + __pyx_t_11 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_10 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_10)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_10); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + __pyx_t_11 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_2)) { + PyObject *__pyx_temp[4] = {__pyx_t_10, __pyx_v_frame, __pyx_v_event, __pyx_v_arg}; + __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_2, __pyx_temp+1-__pyx_t_11, 3+__pyx_t_11); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 970, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_10); __pyx_t_10 = 0; + __Pyx_GOTREF(__pyx_t_1); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_2)) { + PyObject *__pyx_temp[4] = {__pyx_t_10, __pyx_v_frame, __pyx_v_event, __pyx_v_arg}; + __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_2, __pyx_temp+1-__pyx_t_11, 3+__pyx_t_11); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 970, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_10); __pyx_t_10 = 0; + __Pyx_GOTREF(__pyx_t_1); + } else + #endif + { + __pyx_t_3 = PyTuple_New(3+__pyx_t_11); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 970, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + if (__pyx_t_10) { + __Pyx_GIVEREF(__pyx_t_10); PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_t_10); __pyx_t_10 = NULL; + } + __Pyx_INCREF(__pyx_v_frame); + __Pyx_GIVEREF(__pyx_v_frame); + PyTuple_SET_ITEM(__pyx_t_3, 0+__pyx_t_11, __pyx_v_frame); + __Pyx_INCREF(__pyx_v_event); + __Pyx_GIVEREF(__pyx_v_event); + PyTuple_SET_ITEM(__pyx_t_3, 1+__pyx_t_11, __pyx_v_event); + __Pyx_INCREF(__pyx_v_arg); + __Pyx_GIVEREF(__pyx_v_arg); + PyTuple_SET_ITEM(__pyx_t_3, 2+__pyx_t_11, __pyx_v_arg); + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_2, __pyx_t_3, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 970, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + } + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "_pydevd_bundle/pydevd_cython.pyx":951 + * global_cache_frame_skips = {} + * + * def trace_dispatch(py_db, frame, event, arg): # <<<<<<<<<<<<<< + * t = threadingCurrentThread() + * + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_9); + __Pyx_XDECREF(__pyx_t_10); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.trace_dispatch", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_t); + __Pyx_XDECREF(__pyx_v_additional_info); + __Pyx_XDECREF((PyObject *)__pyx_v_thread_tracer); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pyx":975 + * cdef class SafeCallWrapper: + * cdef method_object + * def __init__(self, method_object): # <<<<<<<<<<<<<< + * self.method_object = method_object + * def __call__(self, *args): + */ + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_15SafeCallWrapper_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_15SafeCallWrapper_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_method_object = 0; + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__init__ (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_method_object,0}; + PyObject* values[1] = {0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_method_object)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__init__") < 0)) __PYX_ERR(0, 975, __pyx_L3_error) + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 1) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + } + __pyx_v_method_object = values[0]; + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("__init__", 1, 1, 1, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 975, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.SafeCallWrapper.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return -1; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_15SafeCallWrapper___init__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper *)__pyx_v_self), __pyx_v_method_object); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_15SafeCallWrapper___init__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper *__pyx_v_self, PyObject *__pyx_v_method_object) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__init__", 0); + + /* "_pydevd_bundle/pydevd_cython.pyx":976 + * cdef method_object + * def __init__(self, method_object): + * self.method_object = method_object # <<<<<<<<<<<<<< + * def __call__(self, *args): + * #Cannot use 'self' once inside the delegate call since we are borrowing the self reference f_trace field + */ + __Pyx_INCREF(__pyx_v_method_object); + __Pyx_GIVEREF(__pyx_v_method_object); + __Pyx_GOTREF(__pyx_v_self->method_object); + __Pyx_DECREF(__pyx_v_self->method_object); + __pyx_v_self->method_object = __pyx_v_method_object; + + /* "_pydevd_bundle/pydevd_cython.pyx":975 + * cdef class SafeCallWrapper: + * cdef method_object + * def __init__(self, method_object): # <<<<<<<<<<<<<< + * self.method_object = method_object + * def __call__(self, *args): + */ + + /* function exit code */ + __pyx_r = 0; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pyx":977 + * def __init__(self, method_object): + * self.method_object = method_object + * def __call__(self, *args): # <<<<<<<<<<<<<< + * #Cannot use 'self' once inside the delegate call since we are borrowing the self reference f_trace field + * #in the frame, and that reference might get destroyed by set trace on frame and parents + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_15SafeCallWrapper_3__call__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_15SafeCallWrapper_3__call__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_args = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__call__ (wrapper)", 0); + if (unlikely(__pyx_kwds) && unlikely(PyDict_Size(__pyx_kwds) > 0) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "__call__", 0))) return NULL; + __Pyx_INCREF(__pyx_args); + __pyx_v_args = __pyx_args; + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_15SafeCallWrapper_2__call__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper *)__pyx_v_self), __pyx_v_args); + + /* function exit code */ + __Pyx_XDECREF(__pyx_v_args); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_15SafeCallWrapper_2__call__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper *__pyx_v_self, PyObject *__pyx_v_args) { + PyObject *__pyx_v_method_obj; + PyObject *__pyx_v_ret = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + __Pyx_RefNannySetupContext("__call__", 0); + + /* "_pydevd_bundle/pydevd_cython.pyx":980 + * #Cannot use 'self' once inside the delegate call since we are borrowing the self reference f_trace field + * #in the frame, and that reference might get destroyed by set trace on frame and parents + * cdef PyObject* method_obj = self.method_object # <<<<<<<<<<<<<< + * Py_INCREF(method_obj) + * ret = (method_obj)(*args) + */ + __pyx_v_method_obj = ((PyObject *)__pyx_v_self->method_object); + + /* "_pydevd_bundle/pydevd_cython.pyx":981 + * #in the frame, and that reference might get destroyed by set trace on frame and parents + * cdef PyObject* method_obj = self.method_object + * Py_INCREF(method_obj) # <<<<<<<<<<<<<< + * ret = (method_obj)(*args) + * Py_XDECREF (method_obj) + */ + Py_INCREF(((PyObject *)__pyx_v_method_obj)); + + /* "_pydevd_bundle/pydevd_cython.pyx":982 + * cdef PyObject* method_obj = self.method_object + * Py_INCREF(method_obj) + * ret = (method_obj)(*args) # <<<<<<<<<<<<<< + * Py_XDECREF (method_obj) + * return SafeCallWrapper(ret) if ret is not None else None + */ + __pyx_t_1 = __Pyx_PyObject_Call(((PyObject *)__pyx_v_method_obj), __pyx_v_args, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 982, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v_ret = __pyx_t_1; + __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":983 + * Py_INCREF(method_obj) + * ret = (method_obj)(*args) + * Py_XDECREF (method_obj) # <<<<<<<<<<<<<< + * return SafeCallWrapper(ret) if ret is not None else None + * cdef class ThreadTracer: + */ + Py_XDECREF(__pyx_v_method_obj); + + /* "_pydevd_bundle/pydevd_cython.pyx":984 + * ret = (method_obj)(*args) + * Py_XDECREF (method_obj) + * return SafeCallWrapper(ret) if ret is not None else None # <<<<<<<<<<<<<< + * cdef class ThreadTracer: + * cdef public tuple _args; + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_2 = (__pyx_v_ret != Py_None); + if ((__pyx_t_2 != 0)) { + __pyx_t_3 = PyTuple_New(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 984, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_INCREF(__pyx_v_ret); + __Pyx_GIVEREF(__pyx_v_ret); + PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_v_ret); + __pyx_t_4 = __Pyx_PyObject_Call(((PyObject *)__pyx_ptype_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper), __pyx_t_3, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 984, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_1 = __pyx_t_4; + __pyx_t_4 = 0; + } else { + __Pyx_INCREF(Py_None); + __pyx_t_1 = Py_None; + } + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* "_pydevd_bundle/pydevd_cython.pyx":977 + * def __init__(self, method_object): + * self.method_object = method_object + * def __call__(self, *args): # <<<<<<<<<<<<<< + * #Cannot use 'self' once inside the delegate call since we are borrowing the self reference f_trace field + * #in the frame, and that reference might get destroyed by set trace on frame and parents + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.SafeCallWrapper.__call__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_ret); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":1 + * def __reduce_cython__(self): # <<<<<<<<<<<<<< + * cdef bint use_setstate + * state = (self.method_object,) + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_15SafeCallWrapper_5__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_15SafeCallWrapper_5__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__reduce_cython__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_15SafeCallWrapper_4__reduce_cython__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_15SafeCallWrapper_4__reduce_cython__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper *__pyx_v_self) { + int __pyx_v_use_setstate; + PyObject *__pyx_v_state = NULL; + PyObject *__pyx_v__dict = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + int __pyx_t_3; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + __Pyx_RefNannySetupContext("__reduce_cython__", 0); + + /* "(tree fragment)":3 + * def __reduce_cython__(self): + * cdef bint use_setstate + * state = (self.method_object,) # <<<<<<<<<<<<<< + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: + */ + __pyx_t_1 = PyTuple_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 3, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_v_self->method_object); + __Pyx_GIVEREF(__pyx_v_self->method_object); + PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_v_self->method_object); + __pyx_v_state = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "(tree fragment)":4 + * cdef bint use_setstate + * state = (self.method_object,) + * _dict = getattr(self, '__dict__', None) # <<<<<<<<<<<<<< + * if _dict is not None: + * state += _dict, + */ + __pyx_t_1 = __Pyx_GetAttr3(((PyObject *)__pyx_v_self), __pyx_n_s_dict, Py_None); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 4, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v__dict = __pyx_t_1; + __pyx_t_1 = 0; + + /* "(tree fragment)":5 + * state = (self.method_object,) + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: # <<<<<<<<<<<<<< + * state += _dict, + * use_setstate = True + */ + __pyx_t_2 = (__pyx_v__dict != Py_None); + __pyx_t_3 = (__pyx_t_2 != 0); + if (__pyx_t_3) { + + /* "(tree fragment)":6 + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: + * state += _dict, # <<<<<<<<<<<<<< + * use_setstate = True + * else: + */ + __pyx_t_1 = PyTuple_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_v__dict); + __Pyx_GIVEREF(__pyx_v__dict); + PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_v__dict); + __pyx_t_4 = PyNumber_InPlaceAdd(__pyx_v_state, __pyx_t_1); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF_SET(__pyx_v_state, ((PyObject*)__pyx_t_4)); + __pyx_t_4 = 0; + + /* "(tree fragment)":7 + * if _dict is not None: + * state += _dict, + * use_setstate = True # <<<<<<<<<<<<<< + * else: + * use_setstate = self.method_object is not None + */ + __pyx_v_use_setstate = 1; + + /* "(tree fragment)":5 + * state = (self.method_object,) + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: # <<<<<<<<<<<<<< + * state += _dict, + * use_setstate = True + */ + goto __pyx_L3; + } + + /* "(tree fragment)":9 + * use_setstate = True + * else: + * use_setstate = self.method_object is not None # <<<<<<<<<<<<<< + * if use_setstate: + * return __pyx_unpickle_SafeCallWrapper, (type(self), 0x77c077b, None), state + */ + /*else*/ { + __pyx_t_3 = (__pyx_v_self->method_object != Py_None); + __pyx_v_use_setstate = __pyx_t_3; + } + __pyx_L3:; + + /* "(tree fragment)":10 + * else: + * use_setstate = self.method_object is not None + * if use_setstate: # <<<<<<<<<<<<<< + * return __pyx_unpickle_SafeCallWrapper, (type(self), 0x77c077b, None), state + * else: + */ + __pyx_t_3 = (__pyx_v_use_setstate != 0); + if (__pyx_t_3) { + + /* "(tree fragment)":11 + * use_setstate = self.method_object is not None + * if use_setstate: + * return __pyx_unpickle_SafeCallWrapper, (type(self), 0x77c077b, None), state # <<<<<<<<<<<<<< + * else: + * return __pyx_unpickle_SafeCallWrapper, (type(self), 0x77c077b, state) + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_4 = __Pyx_GetModuleGlobalName(__pyx_n_s_pyx_unpickle_SafeCallWrapper); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 11, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_1 = PyTuple_New(3); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 11, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_GIVEREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + PyTuple_SET_ITEM(__pyx_t_1, 0, ((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_INCREF(__pyx_int_125568891); + __Pyx_GIVEREF(__pyx_int_125568891); + PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_int_125568891); + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + PyTuple_SET_ITEM(__pyx_t_1, 2, Py_None); + __pyx_t_5 = PyTuple_New(3); if (unlikely(!__pyx_t_5)) __PYX_ERR(1, 11, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_GIVEREF(__pyx_t_4); + PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_t_4); + __Pyx_GIVEREF(__pyx_t_1); + PyTuple_SET_ITEM(__pyx_t_5, 1, __pyx_t_1); + __Pyx_INCREF(__pyx_v_state); + __Pyx_GIVEREF(__pyx_v_state); + PyTuple_SET_ITEM(__pyx_t_5, 2, __pyx_v_state); + __pyx_t_4 = 0; + __pyx_t_1 = 0; + __pyx_r = __pyx_t_5; + __pyx_t_5 = 0; + goto __pyx_L0; + + /* "(tree fragment)":10 + * else: + * use_setstate = self.method_object is not None + * if use_setstate: # <<<<<<<<<<<<<< + * return __pyx_unpickle_SafeCallWrapper, (type(self), 0x77c077b, None), state + * else: + */ + } + + /* "(tree fragment)":13 + * return __pyx_unpickle_SafeCallWrapper, (type(self), 0x77c077b, None), state + * else: + * return __pyx_unpickle_SafeCallWrapper, (type(self), 0x77c077b, state) # <<<<<<<<<<<<<< + * def __setstate_cython__(self, __pyx_state): + * __pyx_unpickle_SafeCallWrapper__set_state(self, __pyx_state) + */ + /*else*/ { + __Pyx_XDECREF(__pyx_r); + __pyx_t_5 = __Pyx_GetModuleGlobalName(__pyx_n_s_pyx_unpickle_SafeCallWrapper); if (unlikely(!__pyx_t_5)) __PYX_ERR(1, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_1 = PyTuple_New(3); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_GIVEREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + PyTuple_SET_ITEM(__pyx_t_1, 0, ((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_INCREF(__pyx_int_125568891); + __Pyx_GIVEREF(__pyx_int_125568891); + PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_int_125568891); + __Pyx_INCREF(__pyx_v_state); + __Pyx_GIVEREF(__pyx_v_state); + PyTuple_SET_ITEM(__pyx_t_1, 2, __pyx_v_state); + __pyx_t_4 = PyTuple_New(2); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_GIVEREF(__pyx_t_5); + PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_5); + __Pyx_GIVEREF(__pyx_t_1); + PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_t_1); + __pyx_t_5 = 0; + __pyx_t_1 = 0; + __pyx_r = __pyx_t_4; + __pyx_t_4 = 0; + goto __pyx_L0; + } + + /* "(tree fragment)":1 + * def __reduce_cython__(self): # <<<<<<<<<<<<<< + * cdef bint use_setstate + * state = (self.method_object,) + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.SafeCallWrapper.__reduce_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_state); + __Pyx_XDECREF(__pyx_v__dict); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":14 + * else: + * return __pyx_unpickle_SafeCallWrapper, (type(self), 0x77c077b, state) + * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< + * __pyx_unpickle_SafeCallWrapper__set_state(self, __pyx_state) + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_15SafeCallWrapper_7__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_15SafeCallWrapper_7__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__setstate_cython__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_15SafeCallWrapper_6__setstate_cython__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper *)__pyx_v_self), ((PyObject *)__pyx_v___pyx_state)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_15SafeCallWrapper_6__setstate_cython__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper *__pyx_v_self, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("__setstate_cython__", 0); + + /* "(tree fragment)":15 + * return __pyx_unpickle_SafeCallWrapper, (type(self), 0x77c077b, state) + * def __setstate_cython__(self, __pyx_state): + * __pyx_unpickle_SafeCallWrapper__set_state(self, __pyx_state) # <<<<<<<<<<<<<< + */ + if (!(likely(PyTuple_CheckExact(__pyx_v___pyx_state))||((__pyx_v___pyx_state) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_v___pyx_state)->tp_name), 0))) __PYX_ERR(1, 15, __pyx_L1_error) + __pyx_t_1 = __pyx_f_14_pydevd_bundle_13pydevd_cython___pyx_unpickle_SafeCallWrapper__set_state(__pyx_v_self, ((PyObject*)__pyx_v___pyx_state)); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "(tree fragment)":14 + * else: + * return __pyx_unpickle_SafeCallWrapper, (type(self), 0x77c077b, state) + * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< + * __pyx_unpickle_SafeCallWrapper__set_state(self, __pyx_state) + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.SafeCallWrapper.__setstate_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pyx":987 + * cdef class ThreadTracer: + * cdef public tuple _args; + * def __init__(self, tuple args): # <<<<<<<<<<<<<< + * self._args = args + * # ELSE + */ + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_12ThreadTracer_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_12ThreadTracer_1__init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_args = 0; + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__init__ (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_args,0}; + PyObject* values[1] = {0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_args)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__init__") < 0)) __PYX_ERR(0, 987, __pyx_L3_error) + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 1) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + } + __pyx_v_args = ((PyObject*)values[0]); + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("__init__", 1, 1, 1, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 987, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.ThreadTracer.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return -1; + __pyx_L4_argument_unpacking_done:; + if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_args), (&PyTuple_Type), 1, "args", 1))) __PYX_ERR(0, 987, __pyx_L1_error) + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_12ThreadTracer___init__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_ThreadTracer *)__pyx_v_self), __pyx_v_args); + + /* function exit code */ + goto __pyx_L0; + __pyx_L1_error:; + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_12ThreadTracer___init__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_ThreadTracer *__pyx_v_self, PyObject *__pyx_v_args) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__init__", 0); + + /* "_pydevd_bundle/pydevd_cython.pyx":988 + * cdef public tuple _args; + * def __init__(self, tuple args): + * self._args = args # <<<<<<<<<<<<<< + * # ELSE + * # class ThreadTracer: + */ + __Pyx_INCREF(__pyx_v_args); + __Pyx_GIVEREF(__pyx_v_args); + __Pyx_GOTREF(__pyx_v_self->_args); + __Pyx_DECREF(__pyx_v_self->_args); + __pyx_v_self->_args = __pyx_v_args; + + /* "_pydevd_bundle/pydevd_cython.pyx":987 + * cdef class ThreadTracer: + * cdef public tuple _args; + * def __init__(self, tuple args): # <<<<<<<<<<<<<< + * self._args = args + * # ELSE + */ + + /* function exit code */ + __pyx_r = 0; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pyx":996 + * + * + * def __call__(self, frame, event, arg): # <<<<<<<<<<<<<< + * ''' This is the callback used when we enter some context in the debugger. + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_12ThreadTracer_3__call__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static char __pyx_doc_14_pydevd_bundle_13pydevd_cython_12ThreadTracer_2__call__[] = " This is the callback used when we enter some context in the debugger.\n\n We also decorate the thread we are in with info about the debugging.\n The attributes added are:\n pydev_state\n pydev_step_stop\n pydev_step_cmd\n pydev_notify_kill\n\n :param PyDB py_db:\n This is the global debugger (this method should actually be added as a method to it).\n "; +#if CYTHON_COMPILING_IN_CPYTHON +struct wrapperbase __pyx_wrapperbase_14_pydevd_bundle_13pydevd_cython_12ThreadTracer_2__call__; +#endif +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_12ThreadTracer_3__call__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_frame = 0; + PyObject *__pyx_v_event = 0; + PyObject *__pyx_v_arg = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__call__ (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_frame,&__pyx_n_s_event,&__pyx_n_s_arg,0}; + PyObject* values[3] = {0,0,0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + CYTHON_FALLTHROUGH; + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_frame)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (likely((values[1] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_event)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("__call__", 1, 3, 3, 1); __PYX_ERR(0, 996, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 2: + if (likely((values[2] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_arg)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("__call__", 1, 3, 3, 2); __PYX_ERR(0, 996, __pyx_L3_error) + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__call__") < 0)) __PYX_ERR(0, 996, __pyx_L3_error) + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 3) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + } + __pyx_v_frame = values[0]; + __pyx_v_event = values[1]; + __pyx_v_arg = values[2]; + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("__call__", 1, 3, 3, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 996, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.ThreadTracer.__call__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_12ThreadTracer_2__call__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_ThreadTracer *)__pyx_v_self), __pyx_v_frame, __pyx_v_event, __pyx_v_arg); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_12ThreadTracer_2__call__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_ThreadTracer *__pyx_v_self, PyObject *__pyx_v_frame, PyObject *__pyx_v_event, PyObject *__pyx_v_arg) { + PyObject *__pyx_v_filename = 0; + int __pyx_v_pydev_step_cmd; + PyObject *__pyx_v_cache_key = 0; + PyObject *__pyx_v_cache_skips = 0; + int __pyx_v_is_stepping; + PyObject *__pyx_v_abs_path_real_path_and_base = 0; + struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_additional_info = 0; + PyObject *__pyx_v_py_db = NULL; + PyObject *__pyx_v_t = NULL; + PyObject *__pyx_v_frame_skips_cache = NULL; + PyObject *__pyx_v_file_type = NULL; + PyObject *__pyx_v_ret = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + int __pyx_t_7; + PyObject *__pyx_t_8 = NULL; + PyObject *__pyx_t_9 = NULL; + PyObject *__pyx_t_10 = NULL; + int __pyx_t_11; + int __pyx_t_12; + PyObject *__pyx_t_13 = NULL; + PyObject *__pyx_t_14 = NULL; + PyObject *__pyx_t_15 = NULL; + PyObject *__pyx_t_16 = NULL; + int __pyx_t_17; + __Pyx_RefNannySetupContext("__call__", 0); + + /* "_pydevd_bundle/pydevd_cython.pyx":1020 + * # ENDIF + * # print('ENTER: trace_dispatch', frame.f_code.co_filename, frame.f_lineno, event, frame.f_code.co_name) + * py_db, t, additional_info, cache_skips, frame_skips_cache = self._args # <<<<<<<<<<<<<< + * pydev_step_cmd = additional_info.pydev_step_cmd + * is_stepping = pydev_step_cmd != -1 + */ + __pyx_t_1 = __pyx_v_self->_args; + __Pyx_INCREF(__pyx_t_1); + if (likely(__pyx_t_1 != Py_None)) { + PyObject* sequence = __pyx_t_1; + #if !CYTHON_COMPILING_IN_PYPY + Py_ssize_t size = Py_SIZE(sequence); + #else + Py_ssize_t size = PySequence_Size(sequence); + #endif + if (unlikely(size != 5)) { + if (size > 5) __Pyx_RaiseTooManyValuesError(5); + else if (size >= 0) __Pyx_RaiseNeedMoreValuesError(size); + __PYX_ERR(0, 1020, __pyx_L1_error) + } + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_2 = PyTuple_GET_ITEM(sequence, 0); + __pyx_t_3 = PyTuple_GET_ITEM(sequence, 1); + __pyx_t_4 = PyTuple_GET_ITEM(sequence, 2); + __pyx_t_5 = PyTuple_GET_ITEM(sequence, 3); + __pyx_t_6 = PyTuple_GET_ITEM(sequence, 4); + __Pyx_INCREF(__pyx_t_2); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(__pyx_t_5); + __Pyx_INCREF(__pyx_t_6); + #else + { + Py_ssize_t i; + PyObject** temps[5] = {&__pyx_t_2,&__pyx_t_3,&__pyx_t_4,&__pyx_t_5,&__pyx_t_6}; + for (i=0; i < 5; i++) { + PyObject* item = PySequence_ITEM(sequence, i); if (unlikely(!item)) __PYX_ERR(0, 1020, __pyx_L1_error) + __Pyx_GOTREF(item); + *(temps[i]) = item; + } + } + #endif + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + } else { + __Pyx_RaiseNoneNotIterableError(); __PYX_ERR(0, 1020, __pyx_L1_error) + } + if (!(likely(((__pyx_t_4) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_4, __pyx_ptype_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo))))) __PYX_ERR(0, 1020, __pyx_L1_error) + if (!(likely(PyDict_CheckExact(__pyx_t_5))||((__pyx_t_5) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "dict", Py_TYPE(__pyx_t_5)->tp_name), 0))) __PYX_ERR(0, 1020, __pyx_L1_error) + __pyx_v_py_db = __pyx_t_2; + __pyx_t_2 = 0; + __pyx_v_t = __pyx_t_3; + __pyx_t_3 = 0; + __pyx_v_additional_info = ((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_t_4); + __pyx_t_4 = 0; + __pyx_v_cache_skips = ((PyObject*)__pyx_t_5); + __pyx_t_5 = 0; + __pyx_v_frame_skips_cache = __pyx_t_6; + __pyx_t_6 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1021 + * # print('ENTER: trace_dispatch', frame.f_code.co_filename, frame.f_lineno, event, frame.f_code.co_name) + * py_db, t, additional_info, cache_skips, frame_skips_cache = self._args + * pydev_step_cmd = additional_info.pydev_step_cmd # <<<<<<<<<<<<<< + * is_stepping = pydev_step_cmd != -1 + * + */ + __pyx_t_7 = __pyx_v_additional_info->pydev_step_cmd; + __pyx_v_pydev_step_cmd = __pyx_t_7; + + /* "_pydevd_bundle/pydevd_cython.pyx":1022 + * py_db, t, additional_info, cache_skips, frame_skips_cache = self._args + * pydev_step_cmd = additional_info.pydev_step_cmd + * is_stepping = pydev_step_cmd != -1 # <<<<<<<<<<<<<< + * + * try: + */ + __pyx_v_is_stepping = (__pyx_v_pydev_step_cmd != -1L); + + /* "_pydevd_bundle/pydevd_cython.pyx":1024 + * is_stepping = pydev_step_cmd != -1 + * + * try: # <<<<<<<<<<<<<< + * if py_db._finish_debugging_session: + * if not py_db._termination_event_set: + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_8, &__pyx_t_9, &__pyx_t_10); + __Pyx_XGOTREF(__pyx_t_8); + __Pyx_XGOTREF(__pyx_t_9); + __Pyx_XGOTREF(__pyx_t_10); + /*try:*/ { + + /* "_pydevd_bundle/pydevd_cython.pyx":1025 + * + * try: + * if py_db._finish_debugging_session: # <<<<<<<<<<<<<< + * if not py_db._termination_event_set: + * #that was not working very well because jython gave some socket errors + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_py_db, __pyx_n_s_finish_debugging_session); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1025, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_11 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_11 < 0)) __PYX_ERR(0, 1025, __pyx_L3_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + if (__pyx_t_11) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1026 + * try: + * if py_db._finish_debugging_session: + * if not py_db._termination_event_set: # <<<<<<<<<<<<<< + * #that was not working very well because jython gave some socket errors + * try: + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_py_db, __pyx_n_s_termination_event_set); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1026, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_11 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_11 < 0)) __PYX_ERR(0, 1026, __pyx_L3_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_12 = ((!__pyx_t_11) != 0); + if (__pyx_t_12) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1028 + * if not py_db._termination_event_set: + * #that was not working very well because jython gave some socket errors + * try: # <<<<<<<<<<<<<< + * if py_db.output_checker is None: + * kill_all_pydev_threads() + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_13, &__pyx_t_14, &__pyx_t_15); + __Pyx_XGOTREF(__pyx_t_13); + __Pyx_XGOTREF(__pyx_t_14); + __Pyx_XGOTREF(__pyx_t_15); + /*try:*/ { + + /* "_pydevd_bundle/pydevd_cython.pyx":1029 + * #that was not working very well because jython gave some socket errors + * try: + * if py_db.output_checker is None: # <<<<<<<<<<<<<< + * kill_all_pydev_threads() + * except: + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_py_db, __pyx_n_s_output_checker); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1029, __pyx_L11_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_12 = (__pyx_t_1 == Py_None); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_11 = (__pyx_t_12 != 0); + if (__pyx_t_11) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1030 + * try: + * if py_db.output_checker is None: + * kill_all_pydev_threads() # <<<<<<<<<<<<<< + * except: + * traceback.print_exc() + */ + __pyx_t_6 = __Pyx_GetModuleGlobalName(__pyx_n_s_kill_all_pydev_threads); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1030, __pyx_L11_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_5 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_6))) { + __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_6); + if (likely(__pyx_t_5)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_6); + __Pyx_INCREF(__pyx_t_5); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_6, function); + } + } + if (__pyx_t_5) { + __pyx_t_1 = __Pyx_PyObject_CallOneArg(__pyx_t_6, __pyx_t_5); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1030, __pyx_L11_error) + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + } else { + __pyx_t_1 = __Pyx_PyObject_CallNoArg(__pyx_t_6); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1030, __pyx_L11_error) + } + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1029 + * #that was not working very well because jython gave some socket errors + * try: + * if py_db.output_checker is None: # <<<<<<<<<<<<<< + * kill_all_pydev_threads() + * except: + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1028 + * if not py_db._termination_event_set: + * #that was not working very well because jython gave some socket errors + * try: # <<<<<<<<<<<<<< + * if py_db.output_checker is None: + * kill_all_pydev_threads() + */ + } + __Pyx_XDECREF(__pyx_t_13); __pyx_t_13 = 0; + __Pyx_XDECREF(__pyx_t_14); __pyx_t_14 = 0; + __Pyx_XDECREF(__pyx_t_15); __pyx_t_15 = 0; + goto __pyx_L16_try_end; + __pyx_L11_error:; + __Pyx_PyThreadState_assign + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1031 + * if py_db.output_checker is None: + * kill_all_pydev_threads() + * except: # <<<<<<<<<<<<<< + * traceback.print_exc() + * py_db._termination_event_set = True + */ + /*except:*/ { + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.ThreadTracer.__call__", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_1, &__pyx_t_6, &__pyx_t_5) < 0) __PYX_ERR(0, 1031, __pyx_L13_except_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_t_6); + __Pyx_GOTREF(__pyx_t_5); + + /* "_pydevd_bundle/pydevd_cython.pyx":1032 + * kill_all_pydev_threads() + * except: + * traceback.print_exc() # <<<<<<<<<<<<<< + * py_db._termination_event_set = True + * return None + */ + __pyx_t_3 = __Pyx_GetModuleGlobalName(__pyx_n_s_traceback); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1032, __pyx_L13_except_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_print_exc); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1032, __pyx_L13_except_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_3 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + } + } + if (__pyx_t_3) { + __pyx_t_4 = __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_t_3); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1032, __pyx_L13_except_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + } else { + __pyx_t_4 = __Pyx_PyObject_CallNoArg(__pyx_t_2); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1032, __pyx_L13_except_error) + } + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + goto __pyx_L12_exception_handled; + } + __pyx_L13_except_error:; + + /* "_pydevd_bundle/pydevd_cython.pyx":1028 + * if not py_db._termination_event_set: + * #that was not working very well because jython gave some socket errors + * try: # <<<<<<<<<<<<<< + * if py_db.output_checker is None: + * kill_all_pydev_threads() + */ + __Pyx_PyThreadState_assign + __Pyx_XGIVEREF(__pyx_t_13); + __Pyx_XGIVEREF(__pyx_t_14); + __Pyx_XGIVEREF(__pyx_t_15); + __Pyx_ExceptionReset(__pyx_t_13, __pyx_t_14, __pyx_t_15); + goto __pyx_L3_error; + __pyx_L12_exception_handled:; + __Pyx_PyThreadState_assign + __Pyx_XGIVEREF(__pyx_t_13); + __Pyx_XGIVEREF(__pyx_t_14); + __Pyx_XGIVEREF(__pyx_t_15); + __Pyx_ExceptionReset(__pyx_t_13, __pyx_t_14, __pyx_t_15); + __pyx_L16_try_end:; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1033 + * except: + * traceback.print_exc() + * py_db._termination_event_set = True # <<<<<<<<<<<<<< + * return None + * + */ + if (__Pyx_PyObject_SetAttrStr(__pyx_v_py_db, __pyx_n_s_termination_event_set, Py_True) < 0) __PYX_ERR(0, 1033, __pyx_L3_error) + + /* "_pydevd_bundle/pydevd_cython.pyx":1026 + * try: + * if py_db._finish_debugging_session: + * if not py_db._termination_event_set: # <<<<<<<<<<<<<< + * #that was not working very well because jython gave some socket errors + * try: + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1034 + * traceback.print_exc() + * py_db._termination_event_set = True + * return None # <<<<<<<<<<<<<< + * + * # if thread is not alive, cancel trace_dispatch processing + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(Py_None); + __pyx_r = Py_None; + goto __pyx_L7_try_return; + + /* "_pydevd_bundle/pydevd_cython.pyx":1025 + * + * try: + * if py_db._finish_debugging_session: # <<<<<<<<<<<<<< + * if not py_db._termination_event_set: + * #that was not working very well because jython gave some socket errors + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1037 + * + * # if thread is not alive, cancel trace_dispatch processing + * if not is_thread_alive(t): # <<<<<<<<<<<<<< + * py_db._process_thread_not_alive(get_thread_id(t)) + * return None # suspend tracing + */ + __pyx_t_6 = __Pyx_GetModuleGlobalName(__pyx_n_s_is_thread_alive); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1037, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_1 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_6))) { + __pyx_t_1 = PyMethod_GET_SELF(__pyx_t_6); + if (likely(__pyx_t_1)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_6); + __Pyx_INCREF(__pyx_t_1); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_6, function); + } + } + if (!__pyx_t_1) { + __pyx_t_5 = __Pyx_PyObject_CallOneArg(__pyx_t_6, __pyx_v_t); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 1037, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_5); + } else { + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_6)) { + PyObject *__pyx_temp[2] = {__pyx_t_1, __pyx_v_t}; + __pyx_t_5 = __Pyx_PyFunction_FastCall(__pyx_t_6, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 1037, __pyx_L3_error) + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_GOTREF(__pyx_t_5); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_6)) { + PyObject *__pyx_temp[2] = {__pyx_t_1, __pyx_v_t}; + __pyx_t_5 = __Pyx_PyCFunction_FastCall(__pyx_t_6, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 1037, __pyx_L3_error) + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_GOTREF(__pyx_t_5); + } else + #endif + { + __pyx_t_4 = PyTuple_New(1+1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1037, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_GIVEREF(__pyx_t_1); PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_1); __pyx_t_1 = NULL; + __Pyx_INCREF(__pyx_v_t); + __Pyx_GIVEREF(__pyx_v_t); + PyTuple_SET_ITEM(__pyx_t_4, 0+1, __pyx_v_t); + __pyx_t_5 = __Pyx_PyObject_Call(__pyx_t_6, __pyx_t_4, NULL); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 1037, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + } + } + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __pyx_t_11 = __Pyx_PyObject_IsTrue(__pyx_t_5); if (unlikely(__pyx_t_11 < 0)) __PYX_ERR(0, 1037, __pyx_L3_error) + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_12 = ((!__pyx_t_11) != 0); + if (__pyx_t_12) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1038 + * # if thread is not alive, cancel trace_dispatch processing + * if not is_thread_alive(t): + * py_db._process_thread_not_alive(get_thread_id(t)) # <<<<<<<<<<<<<< + * return None # suspend tracing + * + */ + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_v_py_db, __pyx_n_s_process_thread_not_alive); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1038, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_get_thread_id); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1038, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_1))) { + __pyx_t_2 = PyMethod_GET_SELF(__pyx_t_1); + if (likely(__pyx_t_2)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1); + __Pyx_INCREF(__pyx_t_2); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_1, function); + } + } + if (!__pyx_t_2) { + __pyx_t_4 = __Pyx_PyObject_CallOneArg(__pyx_t_1, __pyx_v_t); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1038, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_4); + } else { + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_1)) { + PyObject *__pyx_temp[2] = {__pyx_t_2, __pyx_v_t}; + __pyx_t_4 = __Pyx_PyFunction_FastCall(__pyx_t_1, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1038, __pyx_L3_error) + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_GOTREF(__pyx_t_4); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_1)) { + PyObject *__pyx_temp[2] = {__pyx_t_2, __pyx_v_t}; + __pyx_t_4 = __Pyx_PyCFunction_FastCall(__pyx_t_1, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1038, __pyx_L3_error) + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_GOTREF(__pyx_t_4); + } else + #endif + { + __pyx_t_3 = PyTuple_New(1+1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1038, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_GIVEREF(__pyx_t_2); PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_t_2); __pyx_t_2 = NULL; + __Pyx_INCREF(__pyx_v_t); + __Pyx_GIVEREF(__pyx_v_t); + PyTuple_SET_ITEM(__pyx_t_3, 0+1, __pyx_v_t); + __pyx_t_4 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_t_3, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1038, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + } + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_6))) { + __pyx_t_1 = PyMethod_GET_SELF(__pyx_t_6); + if (likely(__pyx_t_1)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_6); + __Pyx_INCREF(__pyx_t_1); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_6, function); + } + } + if (!__pyx_t_1) { + __pyx_t_5 = __Pyx_PyObject_CallOneArg(__pyx_t_6, __pyx_t_4); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 1038, __pyx_L3_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_GOTREF(__pyx_t_5); + } else { + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_6)) { + PyObject *__pyx_temp[2] = {__pyx_t_1, __pyx_t_4}; + __pyx_t_5 = __Pyx_PyFunction_FastCall(__pyx_t_6, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 1038, __pyx_L3_error) + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_6)) { + PyObject *__pyx_temp[2] = {__pyx_t_1, __pyx_t_4}; + __pyx_t_5 = __Pyx_PyCFunction_FastCall(__pyx_t_6, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 1038, __pyx_L3_error) + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + } else + #endif + { + __pyx_t_3 = PyTuple_New(1+1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1038, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_GIVEREF(__pyx_t_1); PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_t_1); __pyx_t_1 = NULL; + __Pyx_GIVEREF(__pyx_t_4); + PyTuple_SET_ITEM(__pyx_t_3, 0+1, __pyx_t_4); + __pyx_t_4 = 0; + __pyx_t_5 = __Pyx_PyObject_Call(__pyx_t_6, __pyx_t_3, NULL); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 1038, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + } + } + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1039 + * if not is_thread_alive(t): + * py_db._process_thread_not_alive(get_thread_id(t)) + * return None # suspend tracing # <<<<<<<<<<<<<< + * + * try: + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(Py_None); + __pyx_r = Py_None; + goto __pyx_L7_try_return; + + /* "_pydevd_bundle/pydevd_cython.pyx":1037 + * + * # if thread is not alive, cancel trace_dispatch processing + * if not is_thread_alive(t): # <<<<<<<<<<<<<< + * py_db._process_thread_not_alive(get_thread_id(t)) + * return None # suspend tracing + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1041 + * return None # suspend tracing + * + * try: # <<<<<<<<<<<<<< + * # Make fast path faster! + * abs_path_real_path_and_base = NORM_PATHS_AND_BASE_CONTAINER[frame.f_code.co_filename] + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_15, &__pyx_t_14, &__pyx_t_13); + __Pyx_XGOTREF(__pyx_t_15); + __Pyx_XGOTREF(__pyx_t_14); + __Pyx_XGOTREF(__pyx_t_13); + /*try:*/ { + + /* "_pydevd_bundle/pydevd_cython.pyx":1043 + * try: + * # Make fast path faster! + * abs_path_real_path_and_base = NORM_PATHS_AND_BASE_CONTAINER[frame.f_code.co_filename] # <<<<<<<<<<<<<< + * except: + * abs_path_real_path_and_base = get_abs_path_real_path_and_base_from_frame(frame) + */ + __pyx_t_5 = __Pyx_GetModuleGlobalName(__pyx_n_s_NORM_PATHS_AND_BASE_CONTAINER); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 1043, __pyx_L21_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_code); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1043, __pyx_L21_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_6, __pyx_n_s_co_filename); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1043, __pyx_L21_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __pyx_t_6 = PyObject_GetItem(__pyx_t_5, __pyx_t_3); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1043, __pyx_L21_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (!(likely(PyTuple_CheckExact(__pyx_t_6))||((__pyx_t_6) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_t_6)->tp_name), 0))) __PYX_ERR(0, 1043, __pyx_L21_error) + __pyx_v_abs_path_real_path_and_base = ((PyObject*)__pyx_t_6); + __pyx_t_6 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1041 + * return None # suspend tracing + * + * try: # <<<<<<<<<<<<<< + * # Make fast path faster! + * abs_path_real_path_and_base = NORM_PATHS_AND_BASE_CONTAINER[frame.f_code.co_filename] + */ + } + __Pyx_XDECREF(__pyx_t_15); __pyx_t_15 = 0; + __Pyx_XDECREF(__pyx_t_14); __pyx_t_14 = 0; + __Pyx_XDECREF(__pyx_t_13); __pyx_t_13 = 0; + goto __pyx_L26_try_end; + __pyx_L21_error:; + __Pyx_PyThreadState_assign + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1044 + * # Make fast path faster! + * abs_path_real_path_and_base = NORM_PATHS_AND_BASE_CONTAINER[frame.f_code.co_filename] + * except: # <<<<<<<<<<<<<< + * abs_path_real_path_and_base = get_abs_path_real_path_and_base_from_frame(frame) + * + */ + /*except:*/ { + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.ThreadTracer.__call__", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_6, &__pyx_t_3, &__pyx_t_5) < 0) __PYX_ERR(0, 1044, __pyx_L23_except_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_GOTREF(__pyx_t_3); + __Pyx_GOTREF(__pyx_t_5); + + /* "_pydevd_bundle/pydevd_cython.pyx":1045 + * abs_path_real_path_and_base = NORM_PATHS_AND_BASE_CONTAINER[frame.f_code.co_filename] + * except: + * abs_path_real_path_and_base = get_abs_path_real_path_and_base_from_frame(frame) # <<<<<<<<<<<<<< + * + * if py_db.thread_analyser is not None: + */ + __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_get_abs_path_real_path_and_base); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1045, __pyx_L23_except_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_1))) { + __pyx_t_2 = PyMethod_GET_SELF(__pyx_t_1); + if (likely(__pyx_t_2)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1); + __Pyx_INCREF(__pyx_t_2); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_1, function); + } + } + if (!__pyx_t_2) { + __pyx_t_4 = __Pyx_PyObject_CallOneArg(__pyx_t_1, __pyx_v_frame); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1045, __pyx_L23_except_error) + __Pyx_GOTREF(__pyx_t_4); + } else { + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_1)) { + PyObject *__pyx_temp[2] = {__pyx_t_2, __pyx_v_frame}; + __pyx_t_4 = __Pyx_PyFunction_FastCall(__pyx_t_1, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1045, __pyx_L23_except_error) + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_GOTREF(__pyx_t_4); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_1)) { + PyObject *__pyx_temp[2] = {__pyx_t_2, __pyx_v_frame}; + __pyx_t_4 = __Pyx_PyCFunction_FastCall(__pyx_t_1, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1045, __pyx_L23_except_error) + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_GOTREF(__pyx_t_4); + } else + #endif + { + __pyx_t_16 = PyTuple_New(1+1); if (unlikely(!__pyx_t_16)) __PYX_ERR(0, 1045, __pyx_L23_except_error) + __Pyx_GOTREF(__pyx_t_16); + __Pyx_GIVEREF(__pyx_t_2); PyTuple_SET_ITEM(__pyx_t_16, 0, __pyx_t_2); __pyx_t_2 = NULL; + __Pyx_INCREF(__pyx_v_frame); + __Pyx_GIVEREF(__pyx_v_frame); + PyTuple_SET_ITEM(__pyx_t_16, 0+1, __pyx_v_frame); + __pyx_t_4 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_t_16, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1045, __pyx_L23_except_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_16); __pyx_t_16 = 0; + } + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + if (!(likely(PyTuple_CheckExact(__pyx_t_4))||((__pyx_t_4) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_t_4)->tp_name), 0))) __PYX_ERR(0, 1045, __pyx_L23_except_error) + __Pyx_XDECREF_SET(__pyx_v_abs_path_real_path_and_base, ((PyObject*)__pyx_t_4)); + __pyx_t_4 = 0; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + goto __pyx_L22_exception_handled; + } + __pyx_L23_except_error:; + + /* "_pydevd_bundle/pydevd_cython.pyx":1041 + * return None # suspend tracing + * + * try: # <<<<<<<<<<<<<< + * # Make fast path faster! + * abs_path_real_path_and_base = NORM_PATHS_AND_BASE_CONTAINER[frame.f_code.co_filename] + */ + __Pyx_PyThreadState_assign + __Pyx_XGIVEREF(__pyx_t_15); + __Pyx_XGIVEREF(__pyx_t_14); + __Pyx_XGIVEREF(__pyx_t_13); + __Pyx_ExceptionReset(__pyx_t_15, __pyx_t_14, __pyx_t_13); + goto __pyx_L3_error; + __pyx_L22_exception_handled:; + __Pyx_PyThreadState_assign + __Pyx_XGIVEREF(__pyx_t_15); + __Pyx_XGIVEREF(__pyx_t_14); + __Pyx_XGIVEREF(__pyx_t_13); + __Pyx_ExceptionReset(__pyx_t_15, __pyx_t_14, __pyx_t_13); + __pyx_L26_try_end:; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1047 + * abs_path_real_path_and_base = get_abs_path_real_path_and_base_from_frame(frame) + * + * if py_db.thread_analyser is not None: # <<<<<<<<<<<<<< + * py_db.thread_analyser.log_event(frame) + * + */ + __pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_v_py_db, __pyx_n_s_thread_analyser); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 1047, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_12 = (__pyx_t_5 != Py_None); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_11 = (__pyx_t_12 != 0); + if (__pyx_t_11) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1048 + * + * if py_db.thread_analyser is not None: + * py_db.thread_analyser.log_event(frame) # <<<<<<<<<<<<<< + * + * if py_db.asyncio_analyser is not None: + */ + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_py_db, __pyx_n_s_thread_analyser); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1048, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_log_event); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1048, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_3 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_6))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_6); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_6); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_6, function); + } + } + if (!__pyx_t_3) { + __pyx_t_5 = __Pyx_PyObject_CallOneArg(__pyx_t_6, __pyx_v_frame); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 1048, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_5); + } else { + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_6)) { + PyObject *__pyx_temp[2] = {__pyx_t_3, __pyx_v_frame}; + __pyx_t_5 = __Pyx_PyFunction_FastCall(__pyx_t_6, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 1048, __pyx_L3_error) + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_GOTREF(__pyx_t_5); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_6)) { + PyObject *__pyx_temp[2] = {__pyx_t_3, __pyx_v_frame}; + __pyx_t_5 = __Pyx_PyCFunction_FastCall(__pyx_t_6, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 1048, __pyx_L3_error) + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_GOTREF(__pyx_t_5); + } else + #endif + { + __pyx_t_4 = PyTuple_New(1+1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1048, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_GIVEREF(__pyx_t_3); PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_3); __pyx_t_3 = NULL; + __Pyx_INCREF(__pyx_v_frame); + __Pyx_GIVEREF(__pyx_v_frame); + PyTuple_SET_ITEM(__pyx_t_4, 0+1, __pyx_v_frame); + __pyx_t_5 = __Pyx_PyObject_Call(__pyx_t_6, __pyx_t_4, NULL); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 1048, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + } + } + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1047 + * abs_path_real_path_and_base = get_abs_path_real_path_and_base_from_frame(frame) + * + * if py_db.thread_analyser is not None: # <<<<<<<<<<<<<< + * py_db.thread_analyser.log_event(frame) + * + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1050 + * py_db.thread_analyser.log_event(frame) + * + * if py_db.asyncio_analyser is not None: # <<<<<<<<<<<<<< + * py_db.asyncio_analyser.log_event(frame) + * + */ + __pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_v_py_db, __pyx_n_s_asyncio_analyser); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 1050, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_11 = (__pyx_t_5 != Py_None); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_12 = (__pyx_t_11 != 0); + if (__pyx_t_12) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1051 + * + * if py_db.asyncio_analyser is not None: + * py_db.asyncio_analyser.log_event(frame) # <<<<<<<<<<<<<< + * + * filename = abs_path_real_path_and_base[1] + */ + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_v_py_db, __pyx_n_s_asyncio_analyser); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1051, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_t_6, __pyx_n_s_log_event); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1051, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __pyx_t_6 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_4))) { + __pyx_t_6 = PyMethod_GET_SELF(__pyx_t_4); + if (likely(__pyx_t_6)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); + __Pyx_INCREF(__pyx_t_6); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_4, function); + } + } + if (!__pyx_t_6) { + __pyx_t_5 = __Pyx_PyObject_CallOneArg(__pyx_t_4, __pyx_v_frame); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 1051, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_5); + } else { + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_4)) { + PyObject *__pyx_temp[2] = {__pyx_t_6, __pyx_v_frame}; + __pyx_t_5 = __Pyx_PyFunction_FastCall(__pyx_t_4, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 1051, __pyx_L3_error) + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_GOTREF(__pyx_t_5); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_4)) { + PyObject *__pyx_temp[2] = {__pyx_t_6, __pyx_v_frame}; + __pyx_t_5 = __Pyx_PyCFunction_FastCall(__pyx_t_4, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 1051, __pyx_L3_error) + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_GOTREF(__pyx_t_5); + } else + #endif + { + __pyx_t_3 = PyTuple_New(1+1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1051, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_GIVEREF(__pyx_t_6); PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_t_6); __pyx_t_6 = NULL; + __Pyx_INCREF(__pyx_v_frame); + __Pyx_GIVEREF(__pyx_v_frame); + PyTuple_SET_ITEM(__pyx_t_3, 0+1, __pyx_v_frame); + __pyx_t_5 = __Pyx_PyObject_Call(__pyx_t_4, __pyx_t_3, NULL); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 1051, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + } + } + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1050 + * py_db.thread_analyser.log_event(frame) + * + * if py_db.asyncio_analyser is not None: # <<<<<<<<<<<<<< + * py_db.asyncio_analyser.log_event(frame) + * + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1053 + * py_db.asyncio_analyser.log_event(frame) + * + * filename = abs_path_real_path_and_base[1] # <<<<<<<<<<<<<< + * # Note: it's important that the context name is also given because we may hit something once + * # in the global context and another in the local context. + */ + if (unlikely(__pyx_v_abs_path_real_path_and_base == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(0, 1053, __pyx_L3_error) + } + __pyx_t_5 = __Pyx_GetItemInt_Tuple(__pyx_v_abs_path_real_path_and_base, 1, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 1053, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_5); + if (!(likely(PyString_CheckExact(__pyx_t_5))||((__pyx_t_5) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "str", Py_TYPE(__pyx_t_5)->tp_name), 0))) __PYX_ERR(0, 1053, __pyx_L3_error) + __pyx_v_filename = ((PyObject*)__pyx_t_5); + __pyx_t_5 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1056 + * # Note: it's important that the context name is also given because we may hit something once + * # in the global context and another in the local context. + * cache_key = (frame.f_lineno, frame.f_code.co_name, filename) # <<<<<<<<<<<<<< + * if not is_stepping and cache_key in cache_skips: + * # print('skipped: trace_dispatch (cache hit)', cache_key, frame.f_lineno, event, frame.f_code.co_name) + */ + __pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_lineno); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 1056, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_code); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1056, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_4, __pyx_n_s_co_name); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1056, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_4 = PyTuple_New(3); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1056, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_GIVEREF(__pyx_t_5); + PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_5); + __Pyx_GIVEREF(__pyx_t_3); + PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_t_3); + __Pyx_INCREF(__pyx_v_filename); + __Pyx_GIVEREF(__pyx_v_filename); + PyTuple_SET_ITEM(__pyx_t_4, 2, __pyx_v_filename); + __pyx_t_5 = 0; + __pyx_t_3 = 0; + __pyx_v_cache_key = ((PyObject*)__pyx_t_4); + __pyx_t_4 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1057 + * # in the global context and another in the local context. + * cache_key = (frame.f_lineno, frame.f_code.co_name, filename) + * if not is_stepping and cache_key in cache_skips: # <<<<<<<<<<<<<< + * # print('skipped: trace_dispatch (cache hit)', cache_key, frame.f_lineno, event, frame.f_code.co_name) + * return None + */ + __pyx_t_11 = ((!(__pyx_v_is_stepping != 0)) != 0); + if (__pyx_t_11) { + } else { + __pyx_t_12 = __pyx_t_11; + goto __pyx_L32_bool_binop_done; + } + if (unlikely(__pyx_v_cache_skips == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable"); + __PYX_ERR(0, 1057, __pyx_L3_error) + } + __pyx_t_11 = (__Pyx_PyDict_ContainsTF(__pyx_v_cache_key, __pyx_v_cache_skips, Py_EQ)); if (unlikely(__pyx_t_11 < 0)) __PYX_ERR(0, 1057, __pyx_L3_error) + __pyx_t_17 = (__pyx_t_11 != 0); + __pyx_t_12 = __pyx_t_17; + __pyx_L32_bool_binop_done:; + if (__pyx_t_12) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1059 + * if not is_stepping and cache_key in cache_skips: + * # print('skipped: trace_dispatch (cache hit)', cache_key, frame.f_lineno, event, frame.f_code.co_name) + * return None # <<<<<<<<<<<<<< + * + * file_type = get_file_type(abs_path_real_path_and_base[-1]) #we don't want to debug threading or anything related to pydevd + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(Py_None); + __pyx_r = Py_None; + goto __pyx_L7_try_return; + + /* "_pydevd_bundle/pydevd_cython.pyx":1057 + * # in the global context and another in the local context. + * cache_key = (frame.f_lineno, frame.f_code.co_name, filename) + * if not is_stepping and cache_key in cache_skips: # <<<<<<<<<<<<<< + * # print('skipped: trace_dispatch (cache hit)', cache_key, frame.f_lineno, event, frame.f_code.co_name) + * return None + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1061 + * return None + * + * file_type = get_file_type(abs_path_real_path_and_base[-1]) #we don't want to debug threading or anything related to pydevd # <<<<<<<<<<<<<< + * + * if file_type is not None: + */ + __pyx_t_3 = __Pyx_GetModuleGlobalName(__pyx_n_s_get_file_type); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1061, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_3); + if (unlikely(__pyx_v_abs_path_real_path_and_base == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(0, 1061, __pyx_L3_error) + } + __pyx_t_5 = __Pyx_GetItemInt_Tuple(__pyx_v_abs_path_real_path_and_base, -1L, long, 1, __Pyx_PyInt_From_long, 0, 1, 1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 1061, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_6 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_3))) { + __pyx_t_6 = PyMethod_GET_SELF(__pyx_t_3); + if (likely(__pyx_t_6)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); + __Pyx_INCREF(__pyx_t_6); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_3, function); + } + } + if (!__pyx_t_6) { + __pyx_t_4 = __Pyx_PyObject_CallOneArg(__pyx_t_3, __pyx_t_5); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1061, __pyx_L3_error) + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_GOTREF(__pyx_t_4); + } else { + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_3)) { + PyObject *__pyx_temp[2] = {__pyx_t_6, __pyx_t_5}; + __pyx_t_4 = __Pyx_PyFunction_FastCall(__pyx_t_3, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1061, __pyx_L3_error) + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_3)) { + PyObject *__pyx_temp[2] = {__pyx_t_6, __pyx_t_5}; + __pyx_t_4 = __Pyx_PyCFunction_FastCall(__pyx_t_3, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1061, __pyx_L3_error) + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + } else + #endif + { + __pyx_t_1 = PyTuple_New(1+1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1061, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_6); PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_t_6); __pyx_t_6 = NULL; + __Pyx_GIVEREF(__pyx_t_5); + PyTuple_SET_ITEM(__pyx_t_1, 0+1, __pyx_t_5); + __pyx_t_5 = 0; + __pyx_t_4 = __Pyx_PyObject_Call(__pyx_t_3, __pyx_t_1, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1061, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + } + } + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_v_file_type = __pyx_t_4; + __pyx_t_4 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1063 + * file_type = get_file_type(abs_path_real_path_and_base[-1]) #we don't want to debug threading or anything related to pydevd + * + * if file_type is not None: # <<<<<<<<<<<<<< + * if file_type == 1: # inlining LIB_FILE = 1 + * if py_db.not_in_scope(filename): + */ + __pyx_t_12 = (__pyx_v_file_type != Py_None); + __pyx_t_17 = (__pyx_t_12 != 0); + if (__pyx_t_17) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1064 + * + * if file_type is not None: + * if file_type == 1: # inlining LIB_FILE = 1 # <<<<<<<<<<<<<< + * if py_db.not_in_scope(filename): + * # print('skipped: trace_dispatch (not in scope)', abs_path_real_path_and_base[-1], frame.f_lineno, event, frame.f_code.co_name, file_type) + */ + __pyx_t_4 = __Pyx_PyInt_EqObjC(__pyx_v_file_type, __pyx_int_1, 1, 0); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1064, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_17 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_17 < 0)) __PYX_ERR(0, 1064, __pyx_L3_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if (__pyx_t_17) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1065 + * if file_type is not None: + * if file_type == 1: # inlining LIB_FILE = 1 + * if py_db.not_in_scope(filename): # <<<<<<<<<<<<<< + * # print('skipped: trace_dispatch (not in scope)', abs_path_real_path_and_base[-1], frame.f_lineno, event, frame.f_code.co_name, file_type) + * cache_skips[cache_key] = 1 + */ + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_py_db, __pyx_n_s_not_in_scope); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1065, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_1 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_3))) { + __pyx_t_1 = PyMethod_GET_SELF(__pyx_t_3); + if (likely(__pyx_t_1)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); + __Pyx_INCREF(__pyx_t_1); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_3, function); + } + } + if (!__pyx_t_1) { + __pyx_t_4 = __Pyx_PyObject_CallOneArg(__pyx_t_3, __pyx_v_filename); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1065, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_4); + } else { + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_3)) { + PyObject *__pyx_temp[2] = {__pyx_t_1, __pyx_v_filename}; + __pyx_t_4 = __Pyx_PyFunction_FastCall(__pyx_t_3, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1065, __pyx_L3_error) + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_GOTREF(__pyx_t_4); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_3)) { + PyObject *__pyx_temp[2] = {__pyx_t_1, __pyx_v_filename}; + __pyx_t_4 = __Pyx_PyCFunction_FastCall(__pyx_t_3, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1065, __pyx_L3_error) + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_GOTREF(__pyx_t_4); + } else + #endif + { + __pyx_t_5 = PyTuple_New(1+1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 1065, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_GIVEREF(__pyx_t_1); PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_t_1); __pyx_t_1 = NULL; + __Pyx_INCREF(__pyx_v_filename); + __Pyx_GIVEREF(__pyx_v_filename); + PyTuple_SET_ITEM(__pyx_t_5, 0+1, __pyx_v_filename); + __pyx_t_4 = __Pyx_PyObject_Call(__pyx_t_3, __pyx_t_5, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1065, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + } + } + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_17 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_17 < 0)) __PYX_ERR(0, 1065, __pyx_L3_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if (__pyx_t_17) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1067 + * if py_db.not_in_scope(filename): + * # print('skipped: trace_dispatch (not in scope)', abs_path_real_path_and_base[-1], frame.f_lineno, event, frame.f_code.co_name, file_type) + * cache_skips[cache_key] = 1 # <<<<<<<<<<<<<< + * return None + * else: + */ + if (unlikely(__pyx_v_cache_skips == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(0, 1067, __pyx_L3_error) + } + if (unlikely(PyDict_SetItem(__pyx_v_cache_skips, __pyx_v_cache_key, __pyx_int_1) < 0)) __PYX_ERR(0, 1067, __pyx_L3_error) + + /* "_pydevd_bundle/pydevd_cython.pyx":1068 + * # print('skipped: trace_dispatch (not in scope)', abs_path_real_path_and_base[-1], frame.f_lineno, event, frame.f_code.co_name, file_type) + * cache_skips[cache_key] = 1 + * return None # <<<<<<<<<<<<<< + * else: + * # print('skipped: trace_dispatch', abs_path_real_path_and_base[-1], frame.f_lineno, event, frame.f_code.co_name, file_type) + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(Py_None); + __pyx_r = Py_None; + goto __pyx_L7_try_return; + + /* "_pydevd_bundle/pydevd_cython.pyx":1065 + * if file_type is not None: + * if file_type == 1: # inlining LIB_FILE = 1 + * if py_db.not_in_scope(filename): # <<<<<<<<<<<<<< + * # print('skipped: trace_dispatch (not in scope)', abs_path_real_path_and_base[-1], frame.f_lineno, event, frame.f_code.co_name, file_type) + * cache_skips[cache_key] = 1 + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1064 + * + * if file_type is not None: + * if file_type == 1: # inlining LIB_FILE = 1 # <<<<<<<<<<<<<< + * if py_db.not_in_scope(filename): + * # print('skipped: trace_dispatch (not in scope)', abs_path_real_path_and_base[-1], frame.f_lineno, event, frame.f_code.co_name, file_type) + */ + goto __pyx_L35; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1071 + * else: + * # print('skipped: trace_dispatch', abs_path_real_path_and_base[-1], frame.f_lineno, event, frame.f_code.co_name, file_type) + * cache_skips[cache_key] = 1 # <<<<<<<<<<<<<< + * return None + * + */ + /*else*/ { + if (unlikely(__pyx_v_cache_skips == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(0, 1071, __pyx_L3_error) + } + if (unlikely(PyDict_SetItem(__pyx_v_cache_skips, __pyx_v_cache_key, __pyx_int_1) < 0)) __PYX_ERR(0, 1071, __pyx_L3_error) + + /* "_pydevd_bundle/pydevd_cython.pyx":1072 + * # print('skipped: trace_dispatch', abs_path_real_path_and_base[-1], frame.f_lineno, event, frame.f_code.co_name, file_type) + * cache_skips[cache_key] = 1 + * return None # <<<<<<<<<<<<<< + * + * if is_stepping: + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(Py_None); + __pyx_r = Py_None; + goto __pyx_L7_try_return; + } + __pyx_L35:; + + /* "_pydevd_bundle/pydevd_cython.pyx":1063 + * file_type = get_file_type(abs_path_real_path_and_base[-1]) #we don't want to debug threading or anything related to pydevd + * + * if file_type is not None: # <<<<<<<<<<<<<< + * if file_type == 1: # inlining LIB_FILE = 1 + * if py_db.not_in_scope(filename): + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1074 + * return None + * + * if is_stepping: # <<<<<<<<<<<<<< + * if py_db.is_filter_enabled and py_db.is_ignored_by_filters(filename): + * # ignore files matching stepping filters + */ + __pyx_t_17 = (__pyx_v_is_stepping != 0); + if (__pyx_t_17) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1075 + * + * if is_stepping: + * if py_db.is_filter_enabled and py_db.is_ignored_by_filters(filename): # <<<<<<<<<<<<<< + * # ignore files matching stepping filters + * return None + */ + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_py_db, __pyx_n_s_is_filter_enabled); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1075, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_12 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_12 < 0)) __PYX_ERR(0, 1075, __pyx_L3_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if (__pyx_t_12) { + } else { + __pyx_t_17 = __pyx_t_12; + goto __pyx_L39_bool_binop_done; + } + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_py_db, __pyx_n_s_is_ignored_by_filters); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1075, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_5 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_3))) { + __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_3); + if (likely(__pyx_t_5)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); + __Pyx_INCREF(__pyx_t_5); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_3, function); + } + } + if (!__pyx_t_5) { + __pyx_t_4 = __Pyx_PyObject_CallOneArg(__pyx_t_3, __pyx_v_filename); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1075, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_4); + } else { + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_3)) { + PyObject *__pyx_temp[2] = {__pyx_t_5, __pyx_v_filename}; + __pyx_t_4 = __Pyx_PyFunction_FastCall(__pyx_t_3, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1075, __pyx_L3_error) + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_GOTREF(__pyx_t_4); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_3)) { + PyObject *__pyx_temp[2] = {__pyx_t_5, __pyx_v_filename}; + __pyx_t_4 = __Pyx_PyCFunction_FastCall(__pyx_t_3, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1075, __pyx_L3_error) + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_GOTREF(__pyx_t_4); + } else + #endif + { + __pyx_t_1 = PyTuple_New(1+1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1075, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_5); PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_t_5); __pyx_t_5 = NULL; + __Pyx_INCREF(__pyx_v_filename); + __Pyx_GIVEREF(__pyx_v_filename); + PyTuple_SET_ITEM(__pyx_t_1, 0+1, __pyx_v_filename); + __pyx_t_4 = __Pyx_PyObject_Call(__pyx_t_3, __pyx_t_1, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1075, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + } + } + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_12 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_12 < 0)) __PYX_ERR(0, 1075, __pyx_L3_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_17 = __pyx_t_12; + __pyx_L39_bool_binop_done:; + if (__pyx_t_17) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1077 + * if py_db.is_filter_enabled and py_db.is_ignored_by_filters(filename): + * # ignore files matching stepping filters + * return None # <<<<<<<<<<<<<< + * if py_db.is_filter_libraries and py_db.not_in_scope(filename): + * # ignore library files while stepping + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(Py_None); + __pyx_r = Py_None; + goto __pyx_L7_try_return; + + /* "_pydevd_bundle/pydevd_cython.pyx":1075 + * + * if is_stepping: + * if py_db.is_filter_enabled and py_db.is_ignored_by_filters(filename): # <<<<<<<<<<<<<< + * # ignore files matching stepping filters + * return None + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1078 + * # ignore files matching stepping filters + * return None + * if py_db.is_filter_libraries and py_db.not_in_scope(filename): # <<<<<<<<<<<<<< + * # ignore library files while stepping + * return None + */ + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_py_db, __pyx_n_s_is_filter_libraries); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1078, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_12 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_12 < 0)) __PYX_ERR(0, 1078, __pyx_L3_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if (__pyx_t_12) { + } else { + __pyx_t_17 = __pyx_t_12; + goto __pyx_L42_bool_binop_done; + } + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_py_db, __pyx_n_s_not_in_scope); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1078, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_1 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_3))) { + __pyx_t_1 = PyMethod_GET_SELF(__pyx_t_3); + if (likely(__pyx_t_1)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); + __Pyx_INCREF(__pyx_t_1); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_3, function); + } + } + if (!__pyx_t_1) { + __pyx_t_4 = __Pyx_PyObject_CallOneArg(__pyx_t_3, __pyx_v_filename); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1078, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_4); + } else { + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_3)) { + PyObject *__pyx_temp[2] = {__pyx_t_1, __pyx_v_filename}; + __pyx_t_4 = __Pyx_PyFunction_FastCall(__pyx_t_3, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1078, __pyx_L3_error) + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_GOTREF(__pyx_t_4); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_3)) { + PyObject *__pyx_temp[2] = {__pyx_t_1, __pyx_v_filename}; + __pyx_t_4 = __Pyx_PyCFunction_FastCall(__pyx_t_3, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1078, __pyx_L3_error) + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_GOTREF(__pyx_t_4); + } else + #endif + { + __pyx_t_5 = PyTuple_New(1+1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 1078, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_GIVEREF(__pyx_t_1); PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_t_1); __pyx_t_1 = NULL; + __Pyx_INCREF(__pyx_v_filename); + __Pyx_GIVEREF(__pyx_v_filename); + PyTuple_SET_ITEM(__pyx_t_5, 0+1, __pyx_v_filename); + __pyx_t_4 = __Pyx_PyObject_Call(__pyx_t_3, __pyx_t_5, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1078, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + } + } + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_12 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_12 < 0)) __PYX_ERR(0, 1078, __pyx_L3_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_17 = __pyx_t_12; + __pyx_L42_bool_binop_done:; + if (__pyx_t_17) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1080 + * if py_db.is_filter_libraries and py_db.not_in_scope(filename): + * # ignore library files while stepping + * return None # <<<<<<<<<<<<<< + * + * # print('trace_dispatch', base, frame.f_lineno, event, frame.f_code.co_name, file_type) + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(Py_None); + __pyx_r = Py_None; + goto __pyx_L7_try_return; + + /* "_pydevd_bundle/pydevd_cython.pyx":1078 + * # ignore files matching stepping filters + * return None + * if py_db.is_filter_libraries and py_db.not_in_scope(filename): # <<<<<<<<<<<<<< + * # ignore library files while stepping + * return None + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1074 + * return None + * + * if is_stepping: # <<<<<<<<<<<<<< + * if py_db.is_filter_enabled and py_db.is_ignored_by_filters(filename): + * # ignore files matching stepping filters + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1083 + * + * # print('trace_dispatch', base, frame.f_lineno, event, frame.f_code.co_name, file_type) + * if additional_info.is_tracing: # <<<<<<<<<<<<<< + * return None #we don't wan't to trace code invoked from pydevd_frame.trace_dispatch + * + */ + __pyx_t_17 = (__pyx_v_additional_info->is_tracing != 0); + if (__pyx_t_17) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1084 + * # print('trace_dispatch', base, frame.f_lineno, event, frame.f_code.co_name, file_type) + * if additional_info.is_tracing: + * return None #we don't wan't to trace code invoked from pydevd_frame.trace_dispatch # <<<<<<<<<<<<<< + * + * if event == 'call' and py_db.signature_factory: + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(Py_None); + __pyx_r = Py_None; + goto __pyx_L7_try_return; + + /* "_pydevd_bundle/pydevd_cython.pyx":1083 + * + * # print('trace_dispatch', base, frame.f_lineno, event, frame.f_code.co_name, file_type) + * if additional_info.is_tracing: # <<<<<<<<<<<<<< + * return None #we don't wan't to trace code invoked from pydevd_frame.trace_dispatch + * + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1086 + * return None #we don't wan't to trace code invoked from pydevd_frame.trace_dispatch + * + * if event == 'call' and py_db.signature_factory: # <<<<<<<<<<<<<< + * # We can only have a call when entering a context, so, check at this level, not at the PyDBFrame. + * send_signature_call_trace(py_db, frame, filename) + */ + __pyx_t_12 = (__Pyx_PyString_Equals(__pyx_v_event, __pyx_n_s_call, Py_EQ)); if (unlikely(__pyx_t_12 < 0)) __PYX_ERR(0, 1086, __pyx_L3_error) + if (__pyx_t_12) { + } else { + __pyx_t_17 = __pyx_t_12; + goto __pyx_L46_bool_binop_done; + } + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_py_db, __pyx_n_s_signature_factory); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1086, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_12 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_12 < 0)) __PYX_ERR(0, 1086, __pyx_L3_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_17 = __pyx_t_12; + __pyx_L46_bool_binop_done:; + if (__pyx_t_17) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1088 + * if event == 'call' and py_db.signature_factory: + * # We can only have a call when entering a context, so, check at this level, not at the PyDBFrame. + * send_signature_call_trace(py_db, frame, filename) # <<<<<<<<<<<<<< + * + * # Just create PyDBFrame directly (removed support for Python versions < 2.5, which required keeping a weak + */ + __pyx_t_3 = __Pyx_GetModuleGlobalName(__pyx_n_s_send_signature_call_trace); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1088, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_5 = NULL; + __pyx_t_7 = 0; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_3))) { + __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_3); + if (likely(__pyx_t_5)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); + __Pyx_INCREF(__pyx_t_5); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_3, function); + __pyx_t_7 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_3)) { + PyObject *__pyx_temp[4] = {__pyx_t_5, __pyx_v_py_db, __pyx_v_frame, __pyx_v_filename}; + __pyx_t_4 = __Pyx_PyFunction_FastCall(__pyx_t_3, __pyx_temp+1-__pyx_t_7, 3+__pyx_t_7); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1088, __pyx_L3_error) + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_GOTREF(__pyx_t_4); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_3)) { + PyObject *__pyx_temp[4] = {__pyx_t_5, __pyx_v_py_db, __pyx_v_frame, __pyx_v_filename}; + __pyx_t_4 = __Pyx_PyCFunction_FastCall(__pyx_t_3, __pyx_temp+1-__pyx_t_7, 3+__pyx_t_7); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1088, __pyx_L3_error) + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_GOTREF(__pyx_t_4); + } else + #endif + { + __pyx_t_1 = PyTuple_New(3+__pyx_t_7); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1088, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_1); + if (__pyx_t_5) { + __Pyx_GIVEREF(__pyx_t_5); PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_t_5); __pyx_t_5 = NULL; + } + __Pyx_INCREF(__pyx_v_py_db); + __Pyx_GIVEREF(__pyx_v_py_db); + PyTuple_SET_ITEM(__pyx_t_1, 0+__pyx_t_7, __pyx_v_py_db); + __Pyx_INCREF(__pyx_v_frame); + __Pyx_GIVEREF(__pyx_v_frame); + PyTuple_SET_ITEM(__pyx_t_1, 1+__pyx_t_7, __pyx_v_frame); + __Pyx_INCREF(__pyx_v_filename); + __Pyx_GIVEREF(__pyx_v_filename); + PyTuple_SET_ITEM(__pyx_t_1, 2+__pyx_t_7, __pyx_v_filename); + __pyx_t_4 = __Pyx_PyObject_Call(__pyx_t_3, __pyx_t_1, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1088, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + } + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1086 + * return None #we don't wan't to trace code invoked from pydevd_frame.trace_dispatch + * + * if event == 'call' and py_db.signature_factory: # <<<<<<<<<<<<<< + * # We can only have a call when entering a context, so, check at this level, not at the PyDBFrame. + * send_signature_call_trace(py_db, frame, filename) + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1092 + * # Just create PyDBFrame directly (removed support for Python versions < 2.5, which required keeping a weak + * # reference to the frame). + * ret = PyDBFrame((py_db, filename, additional_info, t, frame_skips_cache, (frame.f_code.co_name, frame.f_code.co_firstlineno, filename))).trace_dispatch(frame, event, arg) # <<<<<<<<<<<<<< + * if ret is None: + * cache_skips[cache_key] = 1 + */ + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_code); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1092, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_4, __pyx_n_s_co_name); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1092, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_code); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1092, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_4, __pyx_n_s_co_firstlineno); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1092, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_t_4 = PyTuple_New(3); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1092, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_GIVEREF(__pyx_t_3); + PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_3); + __Pyx_GIVEREF(__pyx_t_1); + PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_t_1); + __Pyx_INCREF(__pyx_v_filename); + __Pyx_GIVEREF(__pyx_v_filename); + PyTuple_SET_ITEM(__pyx_t_4, 2, __pyx_v_filename); + __pyx_t_3 = 0; + __pyx_t_1 = 0; + __pyx_t_1 = PyTuple_New(6); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1092, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_v_py_db); + __Pyx_GIVEREF(__pyx_v_py_db); + PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_v_py_db); + __Pyx_INCREF(__pyx_v_filename); + __Pyx_GIVEREF(__pyx_v_filename); + PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_v_filename); + __Pyx_INCREF(((PyObject *)__pyx_v_additional_info)); + __Pyx_GIVEREF(((PyObject *)__pyx_v_additional_info)); + PyTuple_SET_ITEM(__pyx_t_1, 2, ((PyObject *)__pyx_v_additional_info)); + __Pyx_INCREF(__pyx_v_t); + __Pyx_GIVEREF(__pyx_v_t); + PyTuple_SET_ITEM(__pyx_t_1, 3, __pyx_v_t); + __Pyx_INCREF(__pyx_v_frame_skips_cache); + __Pyx_GIVEREF(__pyx_v_frame_skips_cache); + PyTuple_SET_ITEM(__pyx_t_1, 4, __pyx_v_frame_skips_cache); + __Pyx_GIVEREF(__pyx_t_4); + PyTuple_SET_ITEM(__pyx_t_1, 5, __pyx_t_4); + __pyx_t_4 = 0; + __pyx_t_4 = PyTuple_New(1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1092, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_GIVEREF(__pyx_t_1); + PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_1); + __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_PyObject_Call(((PyObject *)__pyx_ptype_14_pydevd_bundle_13pydevd_cython_PyDBFrame), __pyx_t_4, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1092, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if (!(likely(PyString_CheckExact(__pyx_v_event))||((__pyx_v_event) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "str", Py_TYPE(__pyx_v_event)->tp_name), 0))) __PYX_ERR(0, 1092, __pyx_L3_error) + __pyx_t_4 = ((struct __pyx_vtabstruct_14_pydevd_bundle_13pydevd_cython_PyDBFrame *)((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *)__pyx_t_1)->__pyx_vtab)->trace_dispatch(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *)__pyx_t_1), __pyx_v_frame, ((PyObject*)__pyx_v_event), __pyx_v_arg, 0); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1092, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_v_ret = __pyx_t_4; + __pyx_t_4 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1093 + * # reference to the frame). + * ret = PyDBFrame((py_db, filename, additional_info, t, frame_skips_cache, (frame.f_code.co_name, frame.f_code.co_firstlineno, filename))).trace_dispatch(frame, event, arg) + * if ret is None: # <<<<<<<<<<<<<< + * cache_skips[cache_key] = 1 + * return None + */ + __pyx_t_17 = (__pyx_v_ret == Py_None); + __pyx_t_12 = (__pyx_t_17 != 0); + if (__pyx_t_12) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1094 + * ret = PyDBFrame((py_db, filename, additional_info, t, frame_skips_cache, (frame.f_code.co_name, frame.f_code.co_firstlineno, filename))).trace_dispatch(frame, event, arg) + * if ret is None: + * cache_skips[cache_key] = 1 # <<<<<<<<<<<<<< + * return None + * + */ + if (unlikely(__pyx_v_cache_skips == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(0, 1094, __pyx_L3_error) + } + if (unlikely(PyDict_SetItem(__pyx_v_cache_skips, __pyx_v_cache_key, __pyx_int_1) < 0)) __PYX_ERR(0, 1094, __pyx_L3_error) + + /* "_pydevd_bundle/pydevd_cython.pyx":1095 + * if ret is None: + * cache_skips[cache_key] = 1 + * return None # <<<<<<<<<<<<<< + * + * # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(Py_None); + __pyx_r = Py_None; + goto __pyx_L7_try_return; + + /* "_pydevd_bundle/pydevd_cython.pyx":1093 + * # reference to the frame). + * ret = PyDBFrame((py_db, filename, additional_info, t, frame_skips_cache, (frame.f_code.co_name, frame.f_code.co_firstlineno, filename))).trace_dispatch(frame, event, arg) + * if ret is None: # <<<<<<<<<<<<<< + * cache_skips[cache_key] = 1 + * return None + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1098 + * + * # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + * return SafeCallWrapper(ret) # <<<<<<<<<<<<<< + * # ELSE + * # return ret + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_4 = PyTuple_New(1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 1098, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_INCREF(__pyx_v_ret); + __Pyx_GIVEREF(__pyx_v_ret); + PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_v_ret); + __pyx_t_1 = __Pyx_PyObject_Call(((PyObject *)__pyx_ptype_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper), __pyx_t_4, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1098, __pyx_L3_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L7_try_return; + + /* "_pydevd_bundle/pydevd_cython.pyx":1024 + * is_stepping = pydev_step_cmd != -1 + * + * try: # <<<<<<<<<<<<<< + * if py_db._finish_debugging_session: + * if not py_db._termination_event_set: + */ + } + __pyx_L3_error:; + __Pyx_PyThreadState_assign + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_XDECREF(__pyx_t_16); __pyx_t_16 = 0; + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1103 + * # ENDIF + * + * except SystemExit: # <<<<<<<<<<<<<< + * return None + * + */ + __pyx_t_7 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_SystemExit); + if (__pyx_t_7) { + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.ThreadTracer.__call__", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_1, &__pyx_t_4, &__pyx_t_3) < 0) __PYX_ERR(0, 1103, __pyx_L5_except_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_t_4); + __Pyx_GOTREF(__pyx_t_3); + + /* "_pydevd_bundle/pydevd_cython.pyx":1104 + * + * except SystemExit: + * return None # <<<<<<<<<<<<<< + * + * except Exception: + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(Py_None); + __pyx_r = Py_None; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + goto __pyx_L6_except_return; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1106 + * return None + * + * except Exception: # <<<<<<<<<<<<<< + * if py_db._finish_debugging_session: + * return None # Don't log errors when we're shutting down. + */ + __pyx_t_7 = __Pyx_PyErr_ExceptionMatches(((PyObject *)(&((PyTypeObject*)PyExc_Exception)[0]))); + if (__pyx_t_7) { + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.ThreadTracer.__call__", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_3, &__pyx_t_4, &__pyx_t_1) < 0) __PYX_ERR(0, 1106, __pyx_L5_except_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_GOTREF(__pyx_t_4); + __Pyx_GOTREF(__pyx_t_1); + + /* "_pydevd_bundle/pydevd_cython.pyx":1107 + * + * except Exception: + * if py_db._finish_debugging_session: # <<<<<<<<<<<<<< + * return None # Don't log errors when we're shutting down. + * # Log it + */ + __pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_v_py_db, __pyx_n_s_finish_debugging_session); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 1107, __pyx_L5_except_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_12 = __Pyx_PyObject_IsTrue(__pyx_t_5); if (unlikely(__pyx_t_12 < 0)) __PYX_ERR(0, 1107, __pyx_L5_except_error) + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + if (__pyx_t_12) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1108 + * except Exception: + * if py_db._finish_debugging_session: + * return None # Don't log errors when we're shutting down. # <<<<<<<<<<<<<< + * # Log it + * try: + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(Py_None); + __pyx_r = Py_None; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + goto __pyx_L6_except_return; + + /* "_pydevd_bundle/pydevd_cython.pyx":1107 + * + * except Exception: + * if py_db._finish_debugging_session: # <<<<<<<<<<<<<< + * return None # Don't log errors when we're shutting down. + * # Log it + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1110 + * return None # Don't log errors when we're shutting down. + * # Log it + * try: # <<<<<<<<<<<<<< + * if traceback is not None: + * # This can actually happen during the interpreter shutdown in Python 2.7 + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_13, &__pyx_t_14, &__pyx_t_15); + __Pyx_XGOTREF(__pyx_t_13); + __Pyx_XGOTREF(__pyx_t_14); + __Pyx_XGOTREF(__pyx_t_15); + /*try:*/ { + + /* "_pydevd_bundle/pydevd_cython.pyx":1111 + * # Log it + * try: + * if traceback is not None: # <<<<<<<<<<<<<< + * # This can actually happen during the interpreter shutdown in Python 2.7 + * traceback.print_exc() + */ + __pyx_t_5 = __Pyx_GetModuleGlobalName(__pyx_n_s_traceback); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 1111, __pyx_L54_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_12 = (__pyx_t_5 != Py_None); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __pyx_t_17 = (__pyx_t_12 != 0); + if (__pyx_t_17) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1113 + * if traceback is not None: + * # This can actually happen during the interpreter shutdown in Python 2.7 + * traceback.print_exc() # <<<<<<<<<<<<<< + * except: + * # Error logging? We're really in the interpreter shutdown... + */ + __pyx_t_6 = __Pyx_GetModuleGlobalName(__pyx_n_s_traceback); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 1113, __pyx_L54_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_16 = __Pyx_PyObject_GetAttrStr(__pyx_t_6, __pyx_n_s_print_exc); if (unlikely(!__pyx_t_16)) __PYX_ERR(0, 1113, __pyx_L54_error) + __Pyx_GOTREF(__pyx_t_16); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __pyx_t_6 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_16))) { + __pyx_t_6 = PyMethod_GET_SELF(__pyx_t_16); + if (likely(__pyx_t_6)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_16); + __Pyx_INCREF(__pyx_t_6); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_16, function); + } + } + if (__pyx_t_6) { + __pyx_t_5 = __Pyx_PyObject_CallOneArg(__pyx_t_16, __pyx_t_6); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 1113, __pyx_L54_error) + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + } else { + __pyx_t_5 = __Pyx_PyObject_CallNoArg(__pyx_t_16); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 1113, __pyx_L54_error) + } + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_16); __pyx_t_16 = 0; + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1111 + * # Log it + * try: + * if traceback is not None: # <<<<<<<<<<<<<< + * # This can actually happen during the interpreter shutdown in Python 2.7 + * traceback.print_exc() + */ + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1110 + * return None # Don't log errors when we're shutting down. + * # Log it + * try: # <<<<<<<<<<<<<< + * if traceback is not None: + * # This can actually happen during the interpreter shutdown in Python 2.7 + */ + } + __Pyx_XDECREF(__pyx_t_13); __pyx_t_13 = 0; + __Pyx_XDECREF(__pyx_t_14); __pyx_t_14 = 0; + __Pyx_XDECREF(__pyx_t_15); __pyx_t_15 = 0; + goto __pyx_L61_try_end; + __pyx_L54_error:; + __Pyx_PyThreadState_assign + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_XDECREF(__pyx_t_16); __pyx_t_16 = 0; + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1114 + * # This can actually happen during the interpreter shutdown in Python 2.7 + * traceback.print_exc() + * except: # <<<<<<<<<<<<<< + * # Error logging? We're really in the interpreter shutdown... + * # (https://github.com/fabioz/PyDev.Debugger/issues/8) + */ + /*except:*/ { + __Pyx_ErrRestore(0,0,0); + goto __pyx_L55_exception_handled; + } + __pyx_L55_exception_handled:; + __Pyx_PyThreadState_assign + __Pyx_XGIVEREF(__pyx_t_13); + __Pyx_XGIVEREF(__pyx_t_14); + __Pyx_XGIVEREF(__pyx_t_15); + __Pyx_ExceptionReset(__pyx_t_13, __pyx_t_14, __pyx_t_15); + __pyx_L61_try_end:; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":1118 + * # (https://github.com/fabioz/PyDev.Debugger/issues/8) + * pass + * return None # <<<<<<<<<<<<<< + * + * + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(Py_None); + __pyx_r = Py_None; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + goto __pyx_L6_except_return; + } + goto __pyx_L5_except_error; + __pyx_L5_except_error:; + + /* "_pydevd_bundle/pydevd_cython.pyx":1024 + * is_stepping = pydev_step_cmd != -1 + * + * try: # <<<<<<<<<<<<<< + * if py_db._finish_debugging_session: + * if not py_db._termination_event_set: + */ + __Pyx_PyThreadState_assign + __Pyx_XGIVEREF(__pyx_t_8); + __Pyx_XGIVEREF(__pyx_t_9); + __Pyx_XGIVEREF(__pyx_t_10); + __Pyx_ExceptionReset(__pyx_t_8, __pyx_t_9, __pyx_t_10); + goto __pyx_L1_error; + __pyx_L7_try_return:; + __Pyx_PyThreadState_assign + __Pyx_XGIVEREF(__pyx_t_8); + __Pyx_XGIVEREF(__pyx_t_9); + __Pyx_XGIVEREF(__pyx_t_10); + __Pyx_ExceptionReset(__pyx_t_8, __pyx_t_9, __pyx_t_10); + goto __pyx_L0; + __pyx_L6_except_return:; + __Pyx_PyThreadState_assign + __Pyx_XGIVEREF(__pyx_t_8); + __Pyx_XGIVEREF(__pyx_t_9); + __Pyx_XGIVEREF(__pyx_t_10); + __Pyx_ExceptionReset(__pyx_t_8, __pyx_t_9, __pyx_t_10); + goto __pyx_L0; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":996 + * + * + * def __call__(self, frame, event, arg): # <<<<<<<<<<<<<< + * ''' This is the callback used when we enter some context in the debugger. + * + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_16); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.ThreadTracer.__call__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_filename); + __Pyx_XDECREF(__pyx_v_cache_key); + __Pyx_XDECREF(__pyx_v_cache_skips); + __Pyx_XDECREF(__pyx_v_abs_path_real_path_and_base); + __Pyx_XDECREF((PyObject *)__pyx_v_additional_info); + __Pyx_XDECREF(__pyx_v_py_db); + __Pyx_XDECREF(__pyx_v_t); + __Pyx_XDECREF(__pyx_v_frame_skips_cache); + __Pyx_XDECREF(__pyx_v_file_type); + __Pyx_XDECREF(__pyx_v_ret); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pyx":986 + * return SafeCallWrapper(ret) if ret is not None else None + * cdef class ThreadTracer: + * cdef public tuple _args; # <<<<<<<<<<<<<< + * def __init__(self, tuple args): + * self._args = args + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_12ThreadTracer_5_args_1__get__(PyObject *__pyx_v_self); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_12ThreadTracer_5_args_1__get__(PyObject *__pyx_v_self) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_12ThreadTracer_5_args___get__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_ThreadTracer *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_12ThreadTracer_5_args___get__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_ThreadTracer *__pyx_v_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__get__", 0); + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_self->_args); + __pyx_r = __pyx_v_self->_args; + goto __pyx_L0; + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_12ThreadTracer_5_args_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_12ThreadTracer_5_args_3__set__(PyObject *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__set__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_12ThreadTracer_5_args_2__set__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_ThreadTracer *)__pyx_v_self), ((PyObject *)__pyx_v_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_12ThreadTracer_5_args_2__set__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_ThreadTracer *__pyx_v_self, PyObject *__pyx_v_value) { + int __pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("__set__", 0); + if (!(likely(PyTuple_CheckExact(__pyx_v_value))||((__pyx_v_value) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_v_value)->tp_name), 0))) __PYX_ERR(0, 986, __pyx_L1_error) + __pyx_t_1 = __pyx_v_value; + __Pyx_INCREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v_self->_args); + __Pyx_DECREF(__pyx_v_self->_args); + __pyx_v_self->_args = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* function exit code */ + __pyx_r = 0; + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.ThreadTracer._args.__set__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = -1; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_12ThreadTracer_5_args_5__del__(PyObject *__pyx_v_self); /*proto*/ +static int __pyx_pw_14_pydevd_bundle_13pydevd_cython_12ThreadTracer_5_args_5__del__(PyObject *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__del__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_12ThreadTracer_5_args_4__del__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_ThreadTracer *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static int __pyx_pf_14_pydevd_bundle_13pydevd_cython_12ThreadTracer_5_args_4__del__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_ThreadTracer *__pyx_v_self) { + int __pyx_r; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__del__", 0); + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + __Pyx_GOTREF(__pyx_v_self->_args); + __Pyx_DECREF(__pyx_v_self->_args); + __pyx_v_self->_args = ((PyObject*)Py_None); + + /* function exit code */ + __pyx_r = 0; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":1 + * def __reduce_cython__(self): # <<<<<<<<<<<<<< + * cdef bint use_setstate + * state = (self._args,) + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_12ThreadTracer_5__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_12ThreadTracer_5__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__reduce_cython__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_12ThreadTracer_4__reduce_cython__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_ThreadTracer *)__pyx_v_self)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_12ThreadTracer_4__reduce_cython__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_ThreadTracer *__pyx_v_self) { + int __pyx_v_use_setstate; + PyObject *__pyx_v_state = NULL; + PyObject *__pyx_v__dict = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + int __pyx_t_3; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + __Pyx_RefNannySetupContext("__reduce_cython__", 0); + + /* "(tree fragment)":3 + * def __reduce_cython__(self): + * cdef bint use_setstate + * state = (self._args,) # <<<<<<<<<<<<<< + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: + */ + __pyx_t_1 = PyTuple_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 3, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_v_self->_args); + __Pyx_GIVEREF(__pyx_v_self->_args); + PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_v_self->_args); + __pyx_v_state = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "(tree fragment)":4 + * cdef bint use_setstate + * state = (self._args,) + * _dict = getattr(self, '__dict__', None) # <<<<<<<<<<<<<< + * if _dict is not None: + * state += _dict, + */ + __pyx_t_1 = __Pyx_GetAttr3(((PyObject *)__pyx_v_self), __pyx_n_s_dict, Py_None); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 4, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v__dict = __pyx_t_1; + __pyx_t_1 = 0; + + /* "(tree fragment)":5 + * state = (self._args,) + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: # <<<<<<<<<<<<<< + * state += _dict, + * use_setstate = True + */ + __pyx_t_2 = (__pyx_v__dict != Py_None); + __pyx_t_3 = (__pyx_t_2 != 0); + if (__pyx_t_3) { + + /* "(tree fragment)":6 + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: + * state += _dict, # <<<<<<<<<<<<<< + * use_setstate = True + * else: + */ + __pyx_t_1 = PyTuple_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_v__dict); + __Pyx_GIVEREF(__pyx_v__dict); + PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_v__dict); + __pyx_t_4 = PyNumber_InPlaceAdd(__pyx_v_state, __pyx_t_1); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF_SET(__pyx_v_state, ((PyObject*)__pyx_t_4)); + __pyx_t_4 = 0; + + /* "(tree fragment)":7 + * if _dict is not None: + * state += _dict, + * use_setstate = True # <<<<<<<<<<<<<< + * else: + * use_setstate = self._args is not None + */ + __pyx_v_use_setstate = 1; + + /* "(tree fragment)":5 + * state = (self._args,) + * _dict = getattr(self, '__dict__', None) + * if _dict is not None: # <<<<<<<<<<<<<< + * state += _dict, + * use_setstate = True + */ + goto __pyx_L3; + } + + /* "(tree fragment)":9 + * use_setstate = True + * else: + * use_setstate = self._args is not None # <<<<<<<<<<<<<< + * if use_setstate: + * return __pyx_unpickle_ThreadTracer, (type(self), 0x3d7902a, None), state + */ + /*else*/ { + __pyx_t_3 = (__pyx_v_self->_args != ((PyObject*)Py_None)); + __pyx_v_use_setstate = __pyx_t_3; + } + __pyx_L3:; + + /* "(tree fragment)":10 + * else: + * use_setstate = self._args is not None + * if use_setstate: # <<<<<<<<<<<<<< + * return __pyx_unpickle_ThreadTracer, (type(self), 0x3d7902a, None), state + * else: + */ + __pyx_t_3 = (__pyx_v_use_setstate != 0); + if (__pyx_t_3) { + + /* "(tree fragment)":11 + * use_setstate = self._args is not None + * if use_setstate: + * return __pyx_unpickle_ThreadTracer, (type(self), 0x3d7902a, None), state # <<<<<<<<<<<<<< + * else: + * return __pyx_unpickle_ThreadTracer, (type(self), 0x3d7902a, state) + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_4 = __Pyx_GetModuleGlobalName(__pyx_n_s_pyx_unpickle_ThreadTracer); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 11, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_1 = PyTuple_New(3); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 11, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_GIVEREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + PyTuple_SET_ITEM(__pyx_t_1, 0, ((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_INCREF(__pyx_int_64458794); + __Pyx_GIVEREF(__pyx_int_64458794); + PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_int_64458794); + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + PyTuple_SET_ITEM(__pyx_t_1, 2, Py_None); + __pyx_t_5 = PyTuple_New(3); if (unlikely(!__pyx_t_5)) __PYX_ERR(1, 11, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_GIVEREF(__pyx_t_4); + PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_t_4); + __Pyx_GIVEREF(__pyx_t_1); + PyTuple_SET_ITEM(__pyx_t_5, 1, __pyx_t_1); + __Pyx_INCREF(__pyx_v_state); + __Pyx_GIVEREF(__pyx_v_state); + PyTuple_SET_ITEM(__pyx_t_5, 2, __pyx_v_state); + __pyx_t_4 = 0; + __pyx_t_1 = 0; + __pyx_r = __pyx_t_5; + __pyx_t_5 = 0; + goto __pyx_L0; + + /* "(tree fragment)":10 + * else: + * use_setstate = self._args is not None + * if use_setstate: # <<<<<<<<<<<<<< + * return __pyx_unpickle_ThreadTracer, (type(self), 0x3d7902a, None), state + * else: + */ + } + + /* "(tree fragment)":13 + * return __pyx_unpickle_ThreadTracer, (type(self), 0x3d7902a, None), state + * else: + * return __pyx_unpickle_ThreadTracer, (type(self), 0x3d7902a, state) # <<<<<<<<<<<<<< + * def __setstate_cython__(self, __pyx_state): + * __pyx_unpickle_ThreadTracer__set_state(self, __pyx_state) + */ + /*else*/ { + __Pyx_XDECREF(__pyx_r); + __pyx_t_5 = __Pyx_GetModuleGlobalName(__pyx_n_s_pyx_unpickle_ThreadTracer); if (unlikely(!__pyx_t_5)) __PYX_ERR(1, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_1 = PyTuple_New(3); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_GIVEREF(((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + PyTuple_SET_ITEM(__pyx_t_1, 0, ((PyObject *)Py_TYPE(((PyObject *)__pyx_v_self)))); + __Pyx_INCREF(__pyx_int_64458794); + __Pyx_GIVEREF(__pyx_int_64458794); + PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_int_64458794); + __Pyx_INCREF(__pyx_v_state); + __Pyx_GIVEREF(__pyx_v_state); + PyTuple_SET_ITEM(__pyx_t_1, 2, __pyx_v_state); + __pyx_t_4 = PyTuple_New(2); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 13, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_GIVEREF(__pyx_t_5); + PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_5); + __Pyx_GIVEREF(__pyx_t_1); + PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_t_1); + __pyx_t_5 = 0; + __pyx_t_1 = 0; + __pyx_r = __pyx_t_4; + __pyx_t_4 = 0; + goto __pyx_L0; + } + + /* "(tree fragment)":1 + * def __reduce_cython__(self): # <<<<<<<<<<<<<< + * cdef bint use_setstate + * state = (self._args,) + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.ThreadTracer.__reduce_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_state); + __Pyx_XDECREF(__pyx_v__dict); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":14 + * else: + * return __pyx_unpickle_ThreadTracer, (type(self), 0x3d7902a, state) + * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< + * __pyx_unpickle_ThreadTracer__set_state(self, __pyx_state) + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_12ThreadTracer_7__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state); /*proto*/ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_12ThreadTracer_7__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__setstate_cython__ (wrapper)", 0); + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_12ThreadTracer_6__setstate_cython__(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_ThreadTracer *)__pyx_v_self), ((PyObject *)__pyx_v___pyx_state)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_12ThreadTracer_6__setstate_cython__(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_ThreadTracer *__pyx_v_self, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("__setstate_cython__", 0); + + /* "(tree fragment)":15 + * return __pyx_unpickle_ThreadTracer, (type(self), 0x3d7902a, state) + * def __setstate_cython__(self, __pyx_state): + * __pyx_unpickle_ThreadTracer__set_state(self, __pyx_state) # <<<<<<<<<<<<<< + */ + if (!(likely(PyTuple_CheckExact(__pyx_v___pyx_state))||((__pyx_v___pyx_state) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_v___pyx_state)->tp_name), 0))) __PYX_ERR(1, 15, __pyx_L1_error) + __pyx_t_1 = __pyx_f_14_pydevd_bundle_13pydevd_cython___pyx_unpickle_ThreadTracer__set_state(__pyx_v_self, ((PyObject*)__pyx_v___pyx_state)); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 15, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "(tree fragment)":14 + * else: + * return __pyx_unpickle_ThreadTracer, (type(self), 0x3d7902a, state) + * def __setstate_cython__(self, __pyx_state): # <<<<<<<<<<<<<< + * __pyx_unpickle_ThreadTracer__set_state(self, __pyx_state) + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.ThreadTracer.__setstate_cython__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_bundle/pydevd_cython.pyx":1133 + * _original_call = ThreadTracer.__call__ + * + * def __call__(self, frame, event, arg): # <<<<<<<<<<<<<< + * _tid_to_last_frame[self._args[1].ident] = frame + * return _original_call(self, frame, event, arg) + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_15__call__(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static PyMethodDef __pyx_mdef_14_pydevd_bundle_13pydevd_cython_15__call__ = {"__call__", (PyCFunction)__pyx_pw_14_pydevd_bundle_13pydevd_cython_15__call__, METH_VARARGS|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_15__call__(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_self = 0; + PyObject *__pyx_v_frame = 0; + PyObject *__pyx_v_event = 0; + PyObject *__pyx_v_arg = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__call__ (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_self,&__pyx_n_s_frame,&__pyx_n_s_event,&__pyx_n_s_arg,0}; + PyObject* values[4] = {0,0,0,0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 4: values[3] = PyTuple_GET_ITEM(__pyx_args, 3); + CYTHON_FALLTHROUGH; + case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + CYTHON_FALLTHROUGH; + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_self)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (likely((values[1] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_frame)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("__call__", 1, 4, 4, 1); __PYX_ERR(0, 1133, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 2: + if (likely((values[2] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_event)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("__call__", 1, 4, 4, 2); __PYX_ERR(0, 1133, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 3: + if (likely((values[3] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_arg)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("__call__", 1, 4, 4, 3); __PYX_ERR(0, 1133, __pyx_L3_error) + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__call__") < 0)) __PYX_ERR(0, 1133, __pyx_L3_error) + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 4) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + values[3] = PyTuple_GET_ITEM(__pyx_args, 3); + } + __pyx_v_self = values[0]; + __pyx_v_frame = values[1]; + __pyx_v_event = values[2]; + __pyx_v_arg = values[3]; + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("__call__", 1, 4, 4, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 1133, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.__call__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_14__call__(__pyx_self, __pyx_v_self, __pyx_v_frame, __pyx_v_event, __pyx_v_arg); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_14__call__(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_self, PyObject *__pyx_v_frame, PyObject *__pyx_v_event, PyObject *__pyx_v_arg) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + int __pyx_t_4; + PyObject *__pyx_t_5 = NULL; + __Pyx_RefNannySetupContext("__call__", 0); + + /* "_pydevd_bundle/pydevd_cython.pyx":1134 + * + * def __call__(self, frame, event, arg): + * _tid_to_last_frame[self._args[1].ident] = frame # <<<<<<<<<<<<<< + * return _original_call(self, frame, event, arg) + * + */ + __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_tid_to_last_frame); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1134, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_self, __pyx_n_s_args_2); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1134, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = __Pyx_GetItemInt(__pyx_t_2, 1, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 1134, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_ident); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1134, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + if (unlikely(PyObject_SetItem(__pyx_t_1, __pyx_t_2, __pyx_v_frame) < 0)) __PYX_ERR(0, 1134, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1135 + * def __call__(self, frame, event, arg): + * _tid_to_last_frame[self._args[1].ident] = frame + * return _original_call(self, frame, event, arg) # <<<<<<<<<<<<<< + * + * ThreadTracer.__call__ = __call__ + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_original_call); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1135, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_3 = NULL; + __pyx_t_4 = 0; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_1))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_1); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_1, function); + __pyx_t_4 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_1)) { + PyObject *__pyx_temp[5] = {__pyx_t_3, __pyx_v_self, __pyx_v_frame, __pyx_v_event, __pyx_v_arg}; + __pyx_t_2 = __Pyx_PyFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_4, 4+__pyx_t_4); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1135, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_GOTREF(__pyx_t_2); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_1)) { + PyObject *__pyx_temp[5] = {__pyx_t_3, __pyx_v_self, __pyx_v_frame, __pyx_v_event, __pyx_v_arg}; + __pyx_t_2 = __Pyx_PyCFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_4, 4+__pyx_t_4); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1135, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_GOTREF(__pyx_t_2); + } else + #endif + { + __pyx_t_5 = PyTuple_New(4+__pyx_t_4); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 1135, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + if (__pyx_t_3) { + __Pyx_GIVEREF(__pyx_t_3); PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_t_3); __pyx_t_3 = NULL; + } + __Pyx_INCREF(__pyx_v_self); + __Pyx_GIVEREF(__pyx_v_self); + PyTuple_SET_ITEM(__pyx_t_5, 0+__pyx_t_4, __pyx_v_self); + __Pyx_INCREF(__pyx_v_frame); + __Pyx_GIVEREF(__pyx_v_frame); + PyTuple_SET_ITEM(__pyx_t_5, 1+__pyx_t_4, __pyx_v_frame); + __Pyx_INCREF(__pyx_v_event); + __Pyx_GIVEREF(__pyx_v_event); + PyTuple_SET_ITEM(__pyx_t_5, 2+__pyx_t_4, __pyx_v_event); + __Pyx_INCREF(__pyx_v_arg); + __Pyx_GIVEREF(__pyx_v_arg); + PyTuple_SET_ITEM(__pyx_t_5, 3+__pyx_t_4, __pyx_v_arg); + __pyx_t_2 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_t_5, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1135, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_r = __pyx_t_2; + __pyx_t_2 = 0; + goto __pyx_L0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1133 + * _original_call = ThreadTracer.__call__ + * + * def __call__(self, frame, event, arg): # <<<<<<<<<<<<<< + * _tid_to_last_frame[self._args[1].ident] = frame + * return _original_call(self, frame, event, arg) + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.__call__", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":1 + * def __pyx_unpickle_PyDBAdditionalThreadInfo(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< + * if __pyx_checksum != 0xa9a4341: + * from pickle import PickleError + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_17__pyx_unpickle_PyDBAdditionalThreadInfo(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static PyMethodDef __pyx_mdef_14_pydevd_bundle_13pydevd_cython_17__pyx_unpickle_PyDBAdditionalThreadInfo = {"__pyx_unpickle_PyDBAdditionalThreadInfo", (PyCFunction)__pyx_pw_14_pydevd_bundle_13pydevd_cython_17__pyx_unpickle_PyDBAdditionalThreadInfo, METH_VARARGS|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_17__pyx_unpickle_PyDBAdditionalThreadInfo(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v___pyx_type = 0; + long __pyx_v___pyx_checksum; + PyObject *__pyx_v___pyx_state = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__pyx_unpickle_PyDBAdditionalThreadInfo (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_pyx_type,&__pyx_n_s_pyx_checksum,&__pyx_n_s_pyx_state,0}; + PyObject* values[3] = {0,0,0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + CYTHON_FALLTHROUGH; + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_pyx_type)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (likely((values[1] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_pyx_checksum)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_PyDBAdditionalThreadInfo", 1, 3, 3, 1); __PYX_ERR(1, 1, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 2: + if (likely((values[2] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_pyx_state)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_PyDBAdditionalThreadInfo", 1, 3, 3, 2); __PYX_ERR(1, 1, __pyx_L3_error) + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__pyx_unpickle_PyDBAdditionalThreadInfo") < 0)) __PYX_ERR(1, 1, __pyx_L3_error) + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 3) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + } + __pyx_v___pyx_type = values[0]; + __pyx_v___pyx_checksum = __Pyx_PyInt_As_long(values[1]); if (unlikely((__pyx_v___pyx_checksum == (long)-1) && PyErr_Occurred())) __PYX_ERR(1, 1, __pyx_L3_error) + __pyx_v___pyx_state = values[2]; + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_PyDBAdditionalThreadInfo", 1, 3, 3, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(1, 1, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.__pyx_unpickle_PyDBAdditionalThreadInfo", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_16__pyx_unpickle_PyDBAdditionalThreadInfo(__pyx_self, __pyx_v___pyx_type, __pyx_v___pyx_checksum, __pyx_v___pyx_state); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_16__pyx_unpickle_PyDBAdditionalThreadInfo(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v___pyx_type, long __pyx_v___pyx_checksum, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_v_PickleError = NULL; + PyObject *__pyx_v_result = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + int __pyx_t_7; + __Pyx_RefNannySetupContext("__pyx_unpickle_PyDBAdditionalThreadInfo", 0); + + /* "(tree fragment)":2 + * def __pyx_unpickle_PyDBAdditionalThreadInfo(__pyx_type, long __pyx_checksum, __pyx_state): + * if __pyx_checksum != 0xa9a4341: # <<<<<<<<<<<<<< + * from pickle import PickleError + * raise PickleError("Incompatible checksums (%s vs 0xa9a4341 = (conditional_breakpoint_exception, is_tracing, pydev_call_from_jinja2, pydev_call_inside_jinja2, pydev_django_resolve_frame, pydev_func_name, pydev_message, pydev_next_line, pydev_notify_kill, pydev_smart_step_stop, pydev_state, pydev_step_cmd, pydev_step_stop, suspend_type))" % __pyx_checksum) + */ + __pyx_t_1 = ((__pyx_v___pyx_checksum != 0xa9a4341) != 0); + if (__pyx_t_1) { + + /* "(tree fragment)":3 + * def __pyx_unpickle_PyDBAdditionalThreadInfo(__pyx_type, long __pyx_checksum, __pyx_state): + * if __pyx_checksum != 0xa9a4341: + * from pickle import PickleError # <<<<<<<<<<<<<< + * raise PickleError("Incompatible checksums (%s vs 0xa9a4341 = (conditional_breakpoint_exception, is_tracing, pydev_call_from_jinja2, pydev_call_inside_jinja2, pydev_django_resolve_frame, pydev_func_name, pydev_message, pydev_next_line, pydev_notify_kill, pydev_smart_step_stop, pydev_state, pydev_step_cmd, pydev_step_stop, suspend_type))" % __pyx_checksum) + * result = PyDBAdditionalThreadInfo.__new__(__pyx_type) + */ + __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 3, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_n_s_PickleError); + __Pyx_GIVEREF(__pyx_n_s_PickleError); + PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_PickleError); + __pyx_t_3 = __Pyx_Import(__pyx_n_s_pickle, __pyx_t_2, -1); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 3, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_PickleError); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 3, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_t_2); + __pyx_v_PickleError = __pyx_t_2; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "(tree fragment)":4 + * if __pyx_checksum != 0xa9a4341: + * from pickle import PickleError + * raise PickleError("Incompatible checksums (%s vs 0xa9a4341 = (conditional_breakpoint_exception, is_tracing, pydev_call_from_jinja2, pydev_call_inside_jinja2, pydev_django_resolve_frame, pydev_func_name, pydev_message, pydev_next_line, pydev_notify_kill, pydev_smart_step_stop, pydev_state, pydev_step_cmd, pydev_step_stop, suspend_type))" % __pyx_checksum) # <<<<<<<<<<<<<< + * result = PyDBAdditionalThreadInfo.__new__(__pyx_type) + * if __pyx_state is not None: + */ + __pyx_t_2 = __Pyx_PyInt_From_long(__pyx_v___pyx_checksum); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 4, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_4 = __Pyx_PyString_Format(__pyx_kp_s_Incompatible_checksums_s_vs_0xa9, __pyx_t_2); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 4, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_INCREF(__pyx_v_PickleError); + __pyx_t_2 = __pyx_v_PickleError; __pyx_t_5 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_5)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_5); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + } + } + if (!__pyx_t_5) { + __pyx_t_3 = __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_t_4); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 4, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_GOTREF(__pyx_t_3); + } else { + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_2)) { + PyObject *__pyx_temp[2] = {__pyx_t_5, __pyx_t_4}; + __pyx_t_3 = __Pyx_PyFunction_FastCall(__pyx_t_2, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 4, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_2)) { + PyObject *__pyx_temp[2] = {__pyx_t_5, __pyx_t_4}; + __pyx_t_3 = __Pyx_PyCFunction_FastCall(__pyx_t_2, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 4, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + } else + #endif + { + __pyx_t_6 = PyTuple_New(1+1); if (unlikely(!__pyx_t_6)) __PYX_ERR(1, 4, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_GIVEREF(__pyx_t_5); PyTuple_SET_ITEM(__pyx_t_6, 0, __pyx_t_5); __pyx_t_5 = NULL; + __Pyx_GIVEREF(__pyx_t_4); + PyTuple_SET_ITEM(__pyx_t_6, 0+1, __pyx_t_4); + __pyx_t_4 = 0; + __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_2, __pyx_t_6, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 4, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + } + } + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_Raise(__pyx_t_3, 0, 0, 0); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __PYX_ERR(1, 4, __pyx_L1_error) + + /* "(tree fragment)":2 + * def __pyx_unpickle_PyDBAdditionalThreadInfo(__pyx_type, long __pyx_checksum, __pyx_state): + * if __pyx_checksum != 0xa9a4341: # <<<<<<<<<<<<<< + * from pickle import PickleError + * raise PickleError("Incompatible checksums (%s vs 0xa9a4341 = (conditional_breakpoint_exception, is_tracing, pydev_call_from_jinja2, pydev_call_inside_jinja2, pydev_django_resolve_frame, pydev_func_name, pydev_message, pydev_next_line, pydev_notify_kill, pydev_smart_step_stop, pydev_state, pydev_step_cmd, pydev_step_stop, suspend_type))" % __pyx_checksum) + */ + } + + /* "(tree fragment)":5 + * from pickle import PickleError + * raise PickleError("Incompatible checksums (%s vs 0xa9a4341 = (conditional_breakpoint_exception, is_tracing, pydev_call_from_jinja2, pydev_call_inside_jinja2, pydev_django_resolve_frame, pydev_func_name, pydev_message, pydev_next_line, pydev_notify_kill, pydev_smart_step_stop, pydev_state, pydev_step_cmd, pydev_step_stop, suspend_type))" % __pyx_checksum) + * result = PyDBAdditionalThreadInfo.__new__(__pyx_type) # <<<<<<<<<<<<<< + * if __pyx_state is not None: + * __pyx_unpickle_PyDBAdditionalThreadInfo__set_state( result, __pyx_state) + */ + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_ptype_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo), __pyx_n_s_new); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_6 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_6 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_6)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_6); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + } + } + if (!__pyx_t_6) { + __pyx_t_3 = __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_v___pyx_type); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + } else { + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_2)) { + PyObject *__pyx_temp[2] = {__pyx_t_6, __pyx_v___pyx_type}; + __pyx_t_3 = __Pyx_PyFunction_FastCall(__pyx_t_2, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 5, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_GOTREF(__pyx_t_3); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_2)) { + PyObject *__pyx_temp[2] = {__pyx_t_6, __pyx_v___pyx_type}; + __pyx_t_3 = __Pyx_PyCFunction_FastCall(__pyx_t_2, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 5, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_GOTREF(__pyx_t_3); + } else + #endif + { + __pyx_t_4 = PyTuple_New(1+1); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_GIVEREF(__pyx_t_6); PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_6); __pyx_t_6 = NULL; + __Pyx_INCREF(__pyx_v___pyx_type); + __Pyx_GIVEREF(__pyx_v___pyx_type); + PyTuple_SET_ITEM(__pyx_t_4, 0+1, __pyx_v___pyx_type); + __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_2, __pyx_t_4, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + } + } + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_v_result = __pyx_t_3; + __pyx_t_3 = 0; + + /* "(tree fragment)":6 + * raise PickleError("Incompatible checksums (%s vs 0xa9a4341 = (conditional_breakpoint_exception, is_tracing, pydev_call_from_jinja2, pydev_call_inside_jinja2, pydev_django_resolve_frame, pydev_func_name, pydev_message, pydev_next_line, pydev_notify_kill, pydev_smart_step_stop, pydev_state, pydev_step_cmd, pydev_step_stop, suspend_type))" % __pyx_checksum) + * result = PyDBAdditionalThreadInfo.__new__(__pyx_type) + * if __pyx_state is not None: # <<<<<<<<<<<<<< + * __pyx_unpickle_PyDBAdditionalThreadInfo__set_state( result, __pyx_state) + * return result + */ + __pyx_t_1 = (__pyx_v___pyx_state != Py_None); + __pyx_t_7 = (__pyx_t_1 != 0); + if (__pyx_t_7) { + + /* "(tree fragment)":7 + * result = PyDBAdditionalThreadInfo.__new__(__pyx_type) + * if __pyx_state is not None: + * __pyx_unpickle_PyDBAdditionalThreadInfo__set_state( result, __pyx_state) # <<<<<<<<<<<<<< + * return result + * cdef __pyx_unpickle_PyDBAdditionalThreadInfo__set_state(PyDBAdditionalThreadInfo result, tuple __pyx_state): + */ + if (!(likely(PyTuple_CheckExact(__pyx_v___pyx_state))||((__pyx_v___pyx_state) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_v___pyx_state)->tp_name), 0))) __PYX_ERR(1, 7, __pyx_L1_error) + __pyx_t_3 = __pyx_f_14_pydevd_bundle_13pydevd_cython___pyx_unpickle_PyDBAdditionalThreadInfo__set_state(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)__pyx_v_result), ((PyObject*)__pyx_v___pyx_state)); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 7, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "(tree fragment)":6 + * raise PickleError("Incompatible checksums (%s vs 0xa9a4341 = (conditional_breakpoint_exception, is_tracing, pydev_call_from_jinja2, pydev_call_inside_jinja2, pydev_django_resolve_frame, pydev_func_name, pydev_message, pydev_next_line, pydev_notify_kill, pydev_smart_step_stop, pydev_state, pydev_step_cmd, pydev_step_stop, suspend_type))" % __pyx_checksum) + * result = PyDBAdditionalThreadInfo.__new__(__pyx_type) + * if __pyx_state is not None: # <<<<<<<<<<<<<< + * __pyx_unpickle_PyDBAdditionalThreadInfo__set_state( result, __pyx_state) + * return result + */ + } + + /* "(tree fragment)":8 + * if __pyx_state is not None: + * __pyx_unpickle_PyDBAdditionalThreadInfo__set_state( result, __pyx_state) + * return result # <<<<<<<<<<<<<< + * cdef __pyx_unpickle_PyDBAdditionalThreadInfo__set_state(PyDBAdditionalThreadInfo result, tuple __pyx_state): + * result.conditional_breakpoint_exception = __pyx_state[0]; result.is_tracing = __pyx_state[1]; result.pydev_call_from_jinja2 = __pyx_state[2]; result.pydev_call_inside_jinja2 = __pyx_state[3]; result.pydev_django_resolve_frame = __pyx_state[4]; result.pydev_func_name = __pyx_state[5]; result.pydev_message = __pyx_state[6]; result.pydev_next_line = __pyx_state[7]; result.pydev_notify_kill = __pyx_state[8]; result.pydev_smart_step_stop = __pyx_state[9]; result.pydev_state = __pyx_state[10]; result.pydev_step_cmd = __pyx_state[11]; result.pydev_step_stop = __pyx_state[12]; result.suspend_type = __pyx_state[13] + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_result); + __pyx_r = __pyx_v_result; + goto __pyx_L0; + + /* "(tree fragment)":1 + * def __pyx_unpickle_PyDBAdditionalThreadInfo(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< + * if __pyx_checksum != 0xa9a4341: + * from pickle import PickleError + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.__pyx_unpickle_PyDBAdditionalThreadInfo", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_PickleError); + __Pyx_XDECREF(__pyx_v_result); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":9 + * __pyx_unpickle_PyDBAdditionalThreadInfo__set_state( result, __pyx_state) + * return result + * cdef __pyx_unpickle_PyDBAdditionalThreadInfo__set_state(PyDBAdditionalThreadInfo result, tuple __pyx_state): # <<<<<<<<<<<<<< + * result.conditional_breakpoint_exception = __pyx_state[0]; result.is_tracing = __pyx_state[1]; result.pydev_call_from_jinja2 = __pyx_state[2]; result.pydev_call_inside_jinja2 = __pyx_state[3]; result.pydev_django_resolve_frame = __pyx_state[4]; result.pydev_func_name = __pyx_state[5]; result.pydev_message = __pyx_state[6]; result.pydev_next_line = __pyx_state[7]; result.pydev_notify_kill = __pyx_state[8]; result.pydev_smart_step_stop = __pyx_state[9]; result.pydev_state = __pyx_state[10]; result.pydev_step_cmd = __pyx_state[11]; result.pydev_step_stop = __pyx_state[12]; result.suspend_type = __pyx_state[13] + * if hasattr(result, '__dict__'): + */ + +static PyObject *__pyx_f_14_pydevd_bundle_13pydevd_cython___pyx_unpickle_PyDBAdditionalThreadInfo__set_state(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *__pyx_v_result, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + int __pyx_t_3; + int __pyx_t_4; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + PyObject *__pyx_t_7 = NULL; + PyObject *__pyx_t_8 = NULL; + __Pyx_RefNannySetupContext("__pyx_unpickle_PyDBAdditionalThreadInfo__set_state", 0); + + /* "(tree fragment)":10 + * return result + * cdef __pyx_unpickle_PyDBAdditionalThreadInfo__set_state(PyDBAdditionalThreadInfo result, tuple __pyx_state): + * result.conditional_breakpoint_exception = __pyx_state[0]; result.is_tracing = __pyx_state[1]; result.pydev_call_from_jinja2 = __pyx_state[2]; result.pydev_call_inside_jinja2 = __pyx_state[3]; result.pydev_django_resolve_frame = __pyx_state[4]; result.pydev_func_name = __pyx_state[5]; result.pydev_message = __pyx_state[6]; result.pydev_next_line = __pyx_state[7]; result.pydev_notify_kill = __pyx_state[8]; result.pydev_smart_step_stop = __pyx_state[9]; result.pydev_state = __pyx_state[10]; result.pydev_step_cmd = __pyx_state[11]; result.pydev_step_stop = __pyx_state[12]; result.suspend_type = __pyx_state[13] # <<<<<<<<<<<<<< + * if hasattr(result, '__dict__'): + * result.__dict__.update(__pyx_state[14]) + */ + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 10, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 10, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (!(likely(PyTuple_CheckExact(__pyx_t_1))||((__pyx_t_1) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_t_1)->tp_name), 0))) __PYX_ERR(1, 10, __pyx_L1_error) + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v_result->conditional_breakpoint_exception); + __Pyx_DECREF(__pyx_v_result->conditional_breakpoint_exception); + __pyx_v_result->conditional_breakpoint_exception = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 10, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 1, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 10, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(1, 10, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_v_result->is_tracing = __pyx_t_2; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 10, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 2, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 10, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v_result->pydev_call_from_jinja2); + __Pyx_DECREF(__pyx_v_result->pydev_call_from_jinja2); + __pyx_v_result->pydev_call_from_jinja2 = __pyx_t_1; + __pyx_t_1 = 0; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 10, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 3, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 10, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v_result->pydev_call_inside_jinja2); + __Pyx_DECREF(__pyx_v_result->pydev_call_inside_jinja2); + __pyx_v_result->pydev_call_inside_jinja2 = __pyx_t_1; + __pyx_t_1 = 0; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 10, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 4, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 10, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(1, 10, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_v_result->pydev_django_resolve_frame = __pyx_t_2; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 10, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 5, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 10, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (!(likely(PyString_CheckExact(__pyx_t_1))||((__pyx_t_1) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "str", Py_TYPE(__pyx_t_1)->tp_name), 0))) __PYX_ERR(1, 10, __pyx_L1_error) + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v_result->pydev_func_name); + __Pyx_DECREF(__pyx_v_result->pydev_func_name); + __pyx_v_result->pydev_func_name = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 10, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 6, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 10, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (!(likely(PyString_CheckExact(__pyx_t_1))||((__pyx_t_1) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "str", Py_TYPE(__pyx_t_1)->tp_name), 0))) __PYX_ERR(1, 10, __pyx_L1_error) + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v_result->pydev_message); + __Pyx_DECREF(__pyx_v_result->pydev_message); + __pyx_v_result->pydev_message = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 10, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 7, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 10, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_3 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_3 == (int)-1) && PyErr_Occurred())) __PYX_ERR(1, 10, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_v_result->pydev_next_line = __pyx_t_3; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 10, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 8, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 10, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(1, 10, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_v_result->pydev_notify_kill = __pyx_t_2; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 10, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 9, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 10, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v_result->pydev_smart_step_stop); + __Pyx_DECREF(__pyx_v_result->pydev_smart_step_stop); + __pyx_v_result->pydev_smart_step_stop = __pyx_t_1; + __pyx_t_1 = 0; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 10, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 10, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 10, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_3 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_3 == (int)-1) && PyErr_Occurred())) __PYX_ERR(1, 10, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_v_result->pydev_state = __pyx_t_3; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 10, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 11, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 10, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_3 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_3 == (int)-1) && PyErr_Occurred())) __PYX_ERR(1, 10, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_v_result->pydev_step_cmd = __pyx_t_3; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 10, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 12, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 10, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v_result->pydev_step_stop); + __Pyx_DECREF(__pyx_v_result->pydev_step_stop); + __pyx_v_result->pydev_step_stop = __pyx_t_1; + __pyx_t_1 = 0; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 10, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 13, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 10, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_3 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_3 == (int)-1) && PyErr_Occurred())) __PYX_ERR(1, 10, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_v_result->suspend_type = __pyx_t_3; + + /* "(tree fragment)":11 + * cdef __pyx_unpickle_PyDBAdditionalThreadInfo__set_state(PyDBAdditionalThreadInfo result, tuple __pyx_state): + * result.conditional_breakpoint_exception = __pyx_state[0]; result.is_tracing = __pyx_state[1]; result.pydev_call_from_jinja2 = __pyx_state[2]; result.pydev_call_inside_jinja2 = __pyx_state[3]; result.pydev_django_resolve_frame = __pyx_state[4]; result.pydev_func_name = __pyx_state[5]; result.pydev_message = __pyx_state[6]; result.pydev_next_line = __pyx_state[7]; result.pydev_notify_kill = __pyx_state[8]; result.pydev_smart_step_stop = __pyx_state[9]; result.pydev_state = __pyx_state[10]; result.pydev_step_cmd = __pyx_state[11]; result.pydev_step_stop = __pyx_state[12]; result.suspend_type = __pyx_state[13] + * if hasattr(result, '__dict__'): # <<<<<<<<<<<<<< + * result.__dict__.update(__pyx_state[14]) + */ + __pyx_t_2 = __Pyx_HasAttr(((PyObject *)__pyx_v_result), __pyx_n_s_dict); if (unlikely(__pyx_t_2 == -1)) __PYX_ERR(1, 11, __pyx_L1_error) + __pyx_t_4 = (__pyx_t_2 != 0); + if (__pyx_t_4) { + + /* "(tree fragment)":12 + * result.conditional_breakpoint_exception = __pyx_state[0]; result.is_tracing = __pyx_state[1]; result.pydev_call_from_jinja2 = __pyx_state[2]; result.pydev_call_inside_jinja2 = __pyx_state[3]; result.pydev_django_resolve_frame = __pyx_state[4]; result.pydev_func_name = __pyx_state[5]; result.pydev_message = __pyx_state[6]; result.pydev_next_line = __pyx_state[7]; result.pydev_notify_kill = __pyx_state[8]; result.pydev_smart_step_stop = __pyx_state[9]; result.pydev_state = __pyx_state[10]; result.pydev_step_cmd = __pyx_state[11]; result.pydev_step_stop = __pyx_state[12]; result.suspend_type = __pyx_state[13] + * if hasattr(result, '__dict__'): + * result.__dict__.update(__pyx_state[14]) # <<<<<<<<<<<<<< + */ + __pyx_t_5 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_result), __pyx_n_s_dict); if (unlikely(!__pyx_t_5)) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_t_5, __pyx_n_s_update); if (unlikely(!__pyx_t_6)) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 12, __pyx_L1_error) + } + __pyx_t_5 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 14, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_5)) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_7 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_6))) { + __pyx_t_7 = PyMethod_GET_SELF(__pyx_t_6); + if (likely(__pyx_t_7)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_6); + __Pyx_INCREF(__pyx_t_7); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_6, function); + } + } + if (!__pyx_t_7) { + __pyx_t_1 = __Pyx_PyObject_CallOneArg(__pyx_t_6, __pyx_t_5); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_GOTREF(__pyx_t_1); + } else { + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_6)) { + PyObject *__pyx_temp[2] = {__pyx_t_7, __pyx_t_5}; + __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_6, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_6)) { + PyObject *__pyx_temp[2] = {__pyx_t_7, __pyx_t_5}; + __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_6, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + } else + #endif + { + __pyx_t_8 = PyTuple_New(1+1); if (unlikely(!__pyx_t_8)) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_GIVEREF(__pyx_t_7); PyTuple_SET_ITEM(__pyx_t_8, 0, __pyx_t_7); __pyx_t_7 = NULL; + __Pyx_GIVEREF(__pyx_t_5); + PyTuple_SET_ITEM(__pyx_t_8, 0+1, __pyx_t_5); + __pyx_t_5 = 0; + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_6, __pyx_t_8, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + } + } + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "(tree fragment)":11 + * cdef __pyx_unpickle_PyDBAdditionalThreadInfo__set_state(PyDBAdditionalThreadInfo result, tuple __pyx_state): + * result.conditional_breakpoint_exception = __pyx_state[0]; result.is_tracing = __pyx_state[1]; result.pydev_call_from_jinja2 = __pyx_state[2]; result.pydev_call_inside_jinja2 = __pyx_state[3]; result.pydev_django_resolve_frame = __pyx_state[4]; result.pydev_func_name = __pyx_state[5]; result.pydev_message = __pyx_state[6]; result.pydev_next_line = __pyx_state[7]; result.pydev_notify_kill = __pyx_state[8]; result.pydev_smart_step_stop = __pyx_state[9]; result.pydev_state = __pyx_state[10]; result.pydev_step_cmd = __pyx_state[11]; result.pydev_step_stop = __pyx_state[12]; result.suspend_type = __pyx_state[13] + * if hasattr(result, '__dict__'): # <<<<<<<<<<<<<< + * result.__dict__.update(__pyx_state[14]) + */ + } + + /* "(tree fragment)":9 + * __pyx_unpickle_PyDBAdditionalThreadInfo__set_state( result, __pyx_state) + * return result + * cdef __pyx_unpickle_PyDBAdditionalThreadInfo__set_state(PyDBAdditionalThreadInfo result, tuple __pyx_state): # <<<<<<<<<<<<<< + * result.conditional_breakpoint_exception = __pyx_state[0]; result.is_tracing = __pyx_state[1]; result.pydev_call_from_jinja2 = __pyx_state[2]; result.pydev_call_inside_jinja2 = __pyx_state[3]; result.pydev_django_resolve_frame = __pyx_state[4]; result.pydev_func_name = __pyx_state[5]; result.pydev_message = __pyx_state[6]; result.pydev_next_line = __pyx_state[7]; result.pydev_notify_kill = __pyx_state[8]; result.pydev_smart_step_stop = __pyx_state[9]; result.pydev_state = __pyx_state[10]; result.pydev_step_cmd = __pyx_state[11]; result.pydev_step_stop = __pyx_state[12]; result.suspend_type = __pyx_state[13] + * if hasattr(result, '__dict__'): + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_XDECREF(__pyx_t_8); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.__pyx_unpickle_PyDBAdditionalThreadInfo__set_state", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":1 + * def __pyx_unpickle_PyDBFrame(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< + * if __pyx_checksum != 0xfa6b183: + * from pickle import PickleError + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_19__pyx_unpickle_PyDBFrame(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static PyMethodDef __pyx_mdef_14_pydevd_bundle_13pydevd_cython_19__pyx_unpickle_PyDBFrame = {"__pyx_unpickle_PyDBFrame", (PyCFunction)__pyx_pw_14_pydevd_bundle_13pydevd_cython_19__pyx_unpickle_PyDBFrame, METH_VARARGS|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_19__pyx_unpickle_PyDBFrame(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v___pyx_type = 0; + long __pyx_v___pyx_checksum; + PyObject *__pyx_v___pyx_state = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__pyx_unpickle_PyDBFrame (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_pyx_type,&__pyx_n_s_pyx_checksum,&__pyx_n_s_pyx_state,0}; + PyObject* values[3] = {0,0,0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + CYTHON_FALLTHROUGH; + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_pyx_type)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (likely((values[1] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_pyx_checksum)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_PyDBFrame", 1, 3, 3, 1); __PYX_ERR(1, 1, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 2: + if (likely((values[2] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_pyx_state)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_PyDBFrame", 1, 3, 3, 2); __PYX_ERR(1, 1, __pyx_L3_error) + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__pyx_unpickle_PyDBFrame") < 0)) __PYX_ERR(1, 1, __pyx_L3_error) + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 3) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + } + __pyx_v___pyx_type = values[0]; + __pyx_v___pyx_checksum = __Pyx_PyInt_As_long(values[1]); if (unlikely((__pyx_v___pyx_checksum == (long)-1) && PyErr_Occurred())) __PYX_ERR(1, 1, __pyx_L3_error) + __pyx_v___pyx_state = values[2]; + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_PyDBFrame", 1, 3, 3, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(1, 1, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.__pyx_unpickle_PyDBFrame", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_18__pyx_unpickle_PyDBFrame(__pyx_self, __pyx_v___pyx_type, __pyx_v___pyx_checksum, __pyx_v___pyx_state); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_18__pyx_unpickle_PyDBFrame(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v___pyx_type, long __pyx_v___pyx_checksum, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_v_PickleError = NULL; + PyObject *__pyx_v_result = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + int __pyx_t_7; + __Pyx_RefNannySetupContext("__pyx_unpickle_PyDBFrame", 0); + + /* "(tree fragment)":2 + * def __pyx_unpickle_PyDBFrame(__pyx_type, long __pyx_checksum, __pyx_state): + * if __pyx_checksum != 0xfa6b183: # <<<<<<<<<<<<<< + * from pickle import PickleError + * raise PickleError("Incompatible checksums (%s vs 0xfa6b183 = (_args, should_skip))" % __pyx_checksum) + */ + __pyx_t_1 = ((__pyx_v___pyx_checksum != 0xfa6b183) != 0); + if (__pyx_t_1) { + + /* "(tree fragment)":3 + * def __pyx_unpickle_PyDBFrame(__pyx_type, long __pyx_checksum, __pyx_state): + * if __pyx_checksum != 0xfa6b183: + * from pickle import PickleError # <<<<<<<<<<<<<< + * raise PickleError("Incompatible checksums (%s vs 0xfa6b183 = (_args, should_skip))" % __pyx_checksum) + * result = PyDBFrame.__new__(__pyx_type) + */ + __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 3, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_n_s_PickleError); + __Pyx_GIVEREF(__pyx_n_s_PickleError); + PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_PickleError); + __pyx_t_3 = __Pyx_Import(__pyx_n_s_pickle, __pyx_t_2, -1); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 3, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_PickleError); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 3, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_t_2); + __pyx_v_PickleError = __pyx_t_2; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "(tree fragment)":4 + * if __pyx_checksum != 0xfa6b183: + * from pickle import PickleError + * raise PickleError("Incompatible checksums (%s vs 0xfa6b183 = (_args, should_skip))" % __pyx_checksum) # <<<<<<<<<<<<<< + * result = PyDBFrame.__new__(__pyx_type) + * if __pyx_state is not None: + */ + __pyx_t_2 = __Pyx_PyInt_From_long(__pyx_v___pyx_checksum); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 4, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_4 = __Pyx_PyString_Format(__pyx_kp_s_Incompatible_checksums_s_vs_0xfa, __pyx_t_2); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 4, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_INCREF(__pyx_v_PickleError); + __pyx_t_2 = __pyx_v_PickleError; __pyx_t_5 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_5)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_5); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + } + } + if (!__pyx_t_5) { + __pyx_t_3 = __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_t_4); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 4, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_GOTREF(__pyx_t_3); + } else { + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_2)) { + PyObject *__pyx_temp[2] = {__pyx_t_5, __pyx_t_4}; + __pyx_t_3 = __Pyx_PyFunction_FastCall(__pyx_t_2, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 4, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_2)) { + PyObject *__pyx_temp[2] = {__pyx_t_5, __pyx_t_4}; + __pyx_t_3 = __Pyx_PyCFunction_FastCall(__pyx_t_2, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 4, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + } else + #endif + { + __pyx_t_6 = PyTuple_New(1+1); if (unlikely(!__pyx_t_6)) __PYX_ERR(1, 4, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_GIVEREF(__pyx_t_5); PyTuple_SET_ITEM(__pyx_t_6, 0, __pyx_t_5); __pyx_t_5 = NULL; + __Pyx_GIVEREF(__pyx_t_4); + PyTuple_SET_ITEM(__pyx_t_6, 0+1, __pyx_t_4); + __pyx_t_4 = 0; + __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_2, __pyx_t_6, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 4, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + } + } + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_Raise(__pyx_t_3, 0, 0, 0); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __PYX_ERR(1, 4, __pyx_L1_error) + + /* "(tree fragment)":2 + * def __pyx_unpickle_PyDBFrame(__pyx_type, long __pyx_checksum, __pyx_state): + * if __pyx_checksum != 0xfa6b183: # <<<<<<<<<<<<<< + * from pickle import PickleError + * raise PickleError("Incompatible checksums (%s vs 0xfa6b183 = (_args, should_skip))" % __pyx_checksum) + */ + } + + /* "(tree fragment)":5 + * from pickle import PickleError + * raise PickleError("Incompatible checksums (%s vs 0xfa6b183 = (_args, should_skip))" % __pyx_checksum) + * result = PyDBFrame.__new__(__pyx_type) # <<<<<<<<<<<<<< + * if __pyx_state is not None: + * __pyx_unpickle_PyDBFrame__set_state( result, __pyx_state) + */ + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_ptype_14_pydevd_bundle_13pydevd_cython_PyDBFrame), __pyx_n_s_new); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_6 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_6 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_6)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_6); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + } + } + if (!__pyx_t_6) { + __pyx_t_3 = __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_v___pyx_type); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + } else { + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_2)) { + PyObject *__pyx_temp[2] = {__pyx_t_6, __pyx_v___pyx_type}; + __pyx_t_3 = __Pyx_PyFunction_FastCall(__pyx_t_2, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 5, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_GOTREF(__pyx_t_3); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_2)) { + PyObject *__pyx_temp[2] = {__pyx_t_6, __pyx_v___pyx_type}; + __pyx_t_3 = __Pyx_PyCFunction_FastCall(__pyx_t_2, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 5, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_GOTREF(__pyx_t_3); + } else + #endif + { + __pyx_t_4 = PyTuple_New(1+1); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_GIVEREF(__pyx_t_6); PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_6); __pyx_t_6 = NULL; + __Pyx_INCREF(__pyx_v___pyx_type); + __Pyx_GIVEREF(__pyx_v___pyx_type); + PyTuple_SET_ITEM(__pyx_t_4, 0+1, __pyx_v___pyx_type); + __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_2, __pyx_t_4, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + } + } + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_v_result = __pyx_t_3; + __pyx_t_3 = 0; + + /* "(tree fragment)":6 + * raise PickleError("Incompatible checksums (%s vs 0xfa6b183 = (_args, should_skip))" % __pyx_checksum) + * result = PyDBFrame.__new__(__pyx_type) + * if __pyx_state is not None: # <<<<<<<<<<<<<< + * __pyx_unpickle_PyDBFrame__set_state( result, __pyx_state) + * return result + */ + __pyx_t_1 = (__pyx_v___pyx_state != Py_None); + __pyx_t_7 = (__pyx_t_1 != 0); + if (__pyx_t_7) { + + /* "(tree fragment)":7 + * result = PyDBFrame.__new__(__pyx_type) + * if __pyx_state is not None: + * __pyx_unpickle_PyDBFrame__set_state( result, __pyx_state) # <<<<<<<<<<<<<< + * return result + * cdef __pyx_unpickle_PyDBFrame__set_state(PyDBFrame result, tuple __pyx_state): + */ + if (!(likely(PyTuple_CheckExact(__pyx_v___pyx_state))||((__pyx_v___pyx_state) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_v___pyx_state)->tp_name), 0))) __PYX_ERR(1, 7, __pyx_L1_error) + __pyx_t_3 = __pyx_f_14_pydevd_bundle_13pydevd_cython___pyx_unpickle_PyDBFrame__set_state(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *)__pyx_v_result), ((PyObject*)__pyx_v___pyx_state)); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 7, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "(tree fragment)":6 + * raise PickleError("Incompatible checksums (%s vs 0xfa6b183 = (_args, should_skip))" % __pyx_checksum) + * result = PyDBFrame.__new__(__pyx_type) + * if __pyx_state is not None: # <<<<<<<<<<<<<< + * __pyx_unpickle_PyDBFrame__set_state( result, __pyx_state) + * return result + */ + } + + /* "(tree fragment)":8 + * if __pyx_state is not None: + * __pyx_unpickle_PyDBFrame__set_state( result, __pyx_state) + * return result # <<<<<<<<<<<<<< + * cdef __pyx_unpickle_PyDBFrame__set_state(PyDBFrame result, tuple __pyx_state): + * result._args = __pyx_state[0]; result.should_skip = __pyx_state[1] + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_result); + __pyx_r = __pyx_v_result; + goto __pyx_L0; + + /* "(tree fragment)":1 + * def __pyx_unpickle_PyDBFrame(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< + * if __pyx_checksum != 0xfa6b183: + * from pickle import PickleError + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.__pyx_unpickle_PyDBFrame", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_PickleError); + __Pyx_XDECREF(__pyx_v_result); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":9 + * __pyx_unpickle_PyDBFrame__set_state( result, __pyx_state) + * return result + * cdef __pyx_unpickle_PyDBFrame__set_state(PyDBFrame result, tuple __pyx_state): # <<<<<<<<<<<<<< + * result._args = __pyx_state[0]; result.should_skip = __pyx_state[1] + * if hasattr(result, '__dict__'): + */ + +static PyObject *__pyx_f_14_pydevd_bundle_13pydevd_cython___pyx_unpickle_PyDBFrame__set_state(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *__pyx_v_result, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + int __pyx_t_3; + int __pyx_t_4; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + PyObject *__pyx_t_7 = NULL; + PyObject *__pyx_t_8 = NULL; + __Pyx_RefNannySetupContext("__pyx_unpickle_PyDBFrame__set_state", 0); + + /* "(tree fragment)":10 + * return result + * cdef __pyx_unpickle_PyDBFrame__set_state(PyDBFrame result, tuple __pyx_state): + * result._args = __pyx_state[0]; result.should_skip = __pyx_state[1] # <<<<<<<<<<<<<< + * if hasattr(result, '__dict__'): + * result.__dict__.update(__pyx_state[2]) + */ + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 10, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 10, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (!(likely(PyTuple_CheckExact(__pyx_t_1))||((__pyx_t_1) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_t_1)->tp_name), 0))) __PYX_ERR(1, 10, __pyx_L1_error) + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v_result->_args); + __Pyx_DECREF(__pyx_v_result->_args); + __pyx_v_result->_args = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 10, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 1, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 10, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __Pyx_PyInt_As_int(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) __PYX_ERR(1, 10, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_v_result->should_skip = __pyx_t_2; + + /* "(tree fragment)":11 + * cdef __pyx_unpickle_PyDBFrame__set_state(PyDBFrame result, tuple __pyx_state): + * result._args = __pyx_state[0]; result.should_skip = __pyx_state[1] + * if hasattr(result, '__dict__'): # <<<<<<<<<<<<<< + * result.__dict__.update(__pyx_state[2]) + */ + __pyx_t_3 = __Pyx_HasAttr(((PyObject *)__pyx_v_result), __pyx_n_s_dict); if (unlikely(__pyx_t_3 == -1)) __PYX_ERR(1, 11, __pyx_L1_error) + __pyx_t_4 = (__pyx_t_3 != 0); + if (__pyx_t_4) { + + /* "(tree fragment)":12 + * result._args = __pyx_state[0]; result.should_skip = __pyx_state[1] + * if hasattr(result, '__dict__'): + * result.__dict__.update(__pyx_state[2]) # <<<<<<<<<<<<<< + */ + __pyx_t_5 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_result), __pyx_n_s_dict); if (unlikely(!__pyx_t_5)) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_t_5, __pyx_n_s_update); if (unlikely(!__pyx_t_6)) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 12, __pyx_L1_error) + } + __pyx_t_5 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 2, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_5)) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __pyx_t_7 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_6))) { + __pyx_t_7 = PyMethod_GET_SELF(__pyx_t_6); + if (likely(__pyx_t_7)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_6); + __Pyx_INCREF(__pyx_t_7); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_6, function); + } + } + if (!__pyx_t_7) { + __pyx_t_1 = __Pyx_PyObject_CallOneArg(__pyx_t_6, __pyx_t_5); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_GOTREF(__pyx_t_1); + } else { + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_6)) { + PyObject *__pyx_temp[2] = {__pyx_t_7, __pyx_t_5}; + __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_6, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_6)) { + PyObject *__pyx_temp[2] = {__pyx_t_7, __pyx_t_5}; + __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_6, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + } else + #endif + { + __pyx_t_8 = PyTuple_New(1+1); if (unlikely(!__pyx_t_8)) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_8); + __Pyx_GIVEREF(__pyx_t_7); PyTuple_SET_ITEM(__pyx_t_8, 0, __pyx_t_7); __pyx_t_7 = NULL; + __Pyx_GIVEREF(__pyx_t_5); + PyTuple_SET_ITEM(__pyx_t_8, 0+1, __pyx_t_5); + __pyx_t_5 = 0; + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_6, __pyx_t_8, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; + } + } + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "(tree fragment)":11 + * cdef __pyx_unpickle_PyDBFrame__set_state(PyDBFrame result, tuple __pyx_state): + * result._args = __pyx_state[0]; result.should_skip = __pyx_state[1] + * if hasattr(result, '__dict__'): # <<<<<<<<<<<<<< + * result.__dict__.update(__pyx_state[2]) + */ + } + + /* "(tree fragment)":9 + * __pyx_unpickle_PyDBFrame__set_state( result, __pyx_state) + * return result + * cdef __pyx_unpickle_PyDBFrame__set_state(PyDBFrame result, tuple __pyx_state): # <<<<<<<<<<<<<< + * result._args = __pyx_state[0]; result.should_skip = __pyx_state[1] + * if hasattr(result, '__dict__'): + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_XDECREF(__pyx_t_8); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.__pyx_unpickle_PyDBFrame__set_state", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":1 + * def __pyx_unpickle_SafeCallWrapper(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< + * if __pyx_checksum != 0x77c077b: + * from pickle import PickleError + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_21__pyx_unpickle_SafeCallWrapper(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static PyMethodDef __pyx_mdef_14_pydevd_bundle_13pydevd_cython_21__pyx_unpickle_SafeCallWrapper = {"__pyx_unpickle_SafeCallWrapper", (PyCFunction)__pyx_pw_14_pydevd_bundle_13pydevd_cython_21__pyx_unpickle_SafeCallWrapper, METH_VARARGS|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_21__pyx_unpickle_SafeCallWrapper(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v___pyx_type = 0; + long __pyx_v___pyx_checksum; + PyObject *__pyx_v___pyx_state = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__pyx_unpickle_SafeCallWrapper (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_pyx_type,&__pyx_n_s_pyx_checksum,&__pyx_n_s_pyx_state,0}; + PyObject* values[3] = {0,0,0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + CYTHON_FALLTHROUGH; + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_pyx_type)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (likely((values[1] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_pyx_checksum)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_SafeCallWrapper", 1, 3, 3, 1); __PYX_ERR(1, 1, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 2: + if (likely((values[2] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_pyx_state)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_SafeCallWrapper", 1, 3, 3, 2); __PYX_ERR(1, 1, __pyx_L3_error) + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__pyx_unpickle_SafeCallWrapper") < 0)) __PYX_ERR(1, 1, __pyx_L3_error) + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 3) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + } + __pyx_v___pyx_type = values[0]; + __pyx_v___pyx_checksum = __Pyx_PyInt_As_long(values[1]); if (unlikely((__pyx_v___pyx_checksum == (long)-1) && PyErr_Occurred())) __PYX_ERR(1, 1, __pyx_L3_error) + __pyx_v___pyx_state = values[2]; + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_SafeCallWrapper", 1, 3, 3, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(1, 1, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.__pyx_unpickle_SafeCallWrapper", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_20__pyx_unpickle_SafeCallWrapper(__pyx_self, __pyx_v___pyx_type, __pyx_v___pyx_checksum, __pyx_v___pyx_state); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_20__pyx_unpickle_SafeCallWrapper(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v___pyx_type, long __pyx_v___pyx_checksum, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_v_PickleError = NULL; + PyObject *__pyx_v_result = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + int __pyx_t_7; + __Pyx_RefNannySetupContext("__pyx_unpickle_SafeCallWrapper", 0); + + /* "(tree fragment)":2 + * def __pyx_unpickle_SafeCallWrapper(__pyx_type, long __pyx_checksum, __pyx_state): + * if __pyx_checksum != 0x77c077b: # <<<<<<<<<<<<<< + * from pickle import PickleError + * raise PickleError("Incompatible checksums (%s vs 0x77c077b = (method_object))" % __pyx_checksum) + */ + __pyx_t_1 = ((__pyx_v___pyx_checksum != 0x77c077b) != 0); + if (__pyx_t_1) { + + /* "(tree fragment)":3 + * def __pyx_unpickle_SafeCallWrapper(__pyx_type, long __pyx_checksum, __pyx_state): + * if __pyx_checksum != 0x77c077b: + * from pickle import PickleError # <<<<<<<<<<<<<< + * raise PickleError("Incompatible checksums (%s vs 0x77c077b = (method_object))" % __pyx_checksum) + * result = SafeCallWrapper.__new__(__pyx_type) + */ + __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 3, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_n_s_PickleError); + __Pyx_GIVEREF(__pyx_n_s_PickleError); + PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_PickleError); + __pyx_t_3 = __Pyx_Import(__pyx_n_s_pickle, __pyx_t_2, -1); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 3, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_PickleError); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 3, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_t_2); + __pyx_v_PickleError = __pyx_t_2; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "(tree fragment)":4 + * if __pyx_checksum != 0x77c077b: + * from pickle import PickleError + * raise PickleError("Incompatible checksums (%s vs 0x77c077b = (method_object))" % __pyx_checksum) # <<<<<<<<<<<<<< + * result = SafeCallWrapper.__new__(__pyx_type) + * if __pyx_state is not None: + */ + __pyx_t_2 = __Pyx_PyInt_From_long(__pyx_v___pyx_checksum); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 4, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_4 = __Pyx_PyString_Format(__pyx_kp_s_Incompatible_checksums_s_vs_0x77, __pyx_t_2); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 4, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_INCREF(__pyx_v_PickleError); + __pyx_t_2 = __pyx_v_PickleError; __pyx_t_5 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_5)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_5); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + } + } + if (!__pyx_t_5) { + __pyx_t_3 = __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_t_4); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 4, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_GOTREF(__pyx_t_3); + } else { + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_2)) { + PyObject *__pyx_temp[2] = {__pyx_t_5, __pyx_t_4}; + __pyx_t_3 = __Pyx_PyFunction_FastCall(__pyx_t_2, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 4, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_2)) { + PyObject *__pyx_temp[2] = {__pyx_t_5, __pyx_t_4}; + __pyx_t_3 = __Pyx_PyCFunction_FastCall(__pyx_t_2, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 4, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + } else + #endif + { + __pyx_t_6 = PyTuple_New(1+1); if (unlikely(!__pyx_t_6)) __PYX_ERR(1, 4, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_GIVEREF(__pyx_t_5); PyTuple_SET_ITEM(__pyx_t_6, 0, __pyx_t_5); __pyx_t_5 = NULL; + __Pyx_GIVEREF(__pyx_t_4); + PyTuple_SET_ITEM(__pyx_t_6, 0+1, __pyx_t_4); + __pyx_t_4 = 0; + __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_2, __pyx_t_6, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 4, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + } + } + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_Raise(__pyx_t_3, 0, 0, 0); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __PYX_ERR(1, 4, __pyx_L1_error) + + /* "(tree fragment)":2 + * def __pyx_unpickle_SafeCallWrapper(__pyx_type, long __pyx_checksum, __pyx_state): + * if __pyx_checksum != 0x77c077b: # <<<<<<<<<<<<<< + * from pickle import PickleError + * raise PickleError("Incompatible checksums (%s vs 0x77c077b = (method_object))" % __pyx_checksum) + */ + } + + /* "(tree fragment)":5 + * from pickle import PickleError + * raise PickleError("Incompatible checksums (%s vs 0x77c077b = (method_object))" % __pyx_checksum) + * result = SafeCallWrapper.__new__(__pyx_type) # <<<<<<<<<<<<<< + * if __pyx_state is not None: + * __pyx_unpickle_SafeCallWrapper__set_state( result, __pyx_state) + */ + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_ptype_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper), __pyx_n_s_new); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_6 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_6 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_6)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_6); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + } + } + if (!__pyx_t_6) { + __pyx_t_3 = __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_v___pyx_type); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + } else { + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_2)) { + PyObject *__pyx_temp[2] = {__pyx_t_6, __pyx_v___pyx_type}; + __pyx_t_3 = __Pyx_PyFunction_FastCall(__pyx_t_2, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 5, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_GOTREF(__pyx_t_3); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_2)) { + PyObject *__pyx_temp[2] = {__pyx_t_6, __pyx_v___pyx_type}; + __pyx_t_3 = __Pyx_PyCFunction_FastCall(__pyx_t_2, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 5, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_GOTREF(__pyx_t_3); + } else + #endif + { + __pyx_t_4 = PyTuple_New(1+1); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_GIVEREF(__pyx_t_6); PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_6); __pyx_t_6 = NULL; + __Pyx_INCREF(__pyx_v___pyx_type); + __Pyx_GIVEREF(__pyx_v___pyx_type); + PyTuple_SET_ITEM(__pyx_t_4, 0+1, __pyx_v___pyx_type); + __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_2, __pyx_t_4, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + } + } + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_v_result = __pyx_t_3; + __pyx_t_3 = 0; + + /* "(tree fragment)":6 + * raise PickleError("Incompatible checksums (%s vs 0x77c077b = (method_object))" % __pyx_checksum) + * result = SafeCallWrapper.__new__(__pyx_type) + * if __pyx_state is not None: # <<<<<<<<<<<<<< + * __pyx_unpickle_SafeCallWrapper__set_state( result, __pyx_state) + * return result + */ + __pyx_t_1 = (__pyx_v___pyx_state != Py_None); + __pyx_t_7 = (__pyx_t_1 != 0); + if (__pyx_t_7) { + + /* "(tree fragment)":7 + * result = SafeCallWrapper.__new__(__pyx_type) + * if __pyx_state is not None: + * __pyx_unpickle_SafeCallWrapper__set_state( result, __pyx_state) # <<<<<<<<<<<<<< + * return result + * cdef __pyx_unpickle_SafeCallWrapper__set_state(SafeCallWrapper result, tuple __pyx_state): + */ + if (!(likely(PyTuple_CheckExact(__pyx_v___pyx_state))||((__pyx_v___pyx_state) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_v___pyx_state)->tp_name), 0))) __PYX_ERR(1, 7, __pyx_L1_error) + __pyx_t_3 = __pyx_f_14_pydevd_bundle_13pydevd_cython___pyx_unpickle_SafeCallWrapper__set_state(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper *)__pyx_v_result), ((PyObject*)__pyx_v___pyx_state)); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 7, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "(tree fragment)":6 + * raise PickleError("Incompatible checksums (%s vs 0x77c077b = (method_object))" % __pyx_checksum) + * result = SafeCallWrapper.__new__(__pyx_type) + * if __pyx_state is not None: # <<<<<<<<<<<<<< + * __pyx_unpickle_SafeCallWrapper__set_state( result, __pyx_state) + * return result + */ + } + + /* "(tree fragment)":8 + * if __pyx_state is not None: + * __pyx_unpickle_SafeCallWrapper__set_state( result, __pyx_state) + * return result # <<<<<<<<<<<<<< + * cdef __pyx_unpickle_SafeCallWrapper__set_state(SafeCallWrapper result, tuple __pyx_state): + * result.method_object = __pyx_state[0] + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_result); + __pyx_r = __pyx_v_result; + goto __pyx_L0; + + /* "(tree fragment)":1 + * def __pyx_unpickle_SafeCallWrapper(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< + * if __pyx_checksum != 0x77c077b: + * from pickle import PickleError + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.__pyx_unpickle_SafeCallWrapper", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_PickleError); + __Pyx_XDECREF(__pyx_v_result); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":9 + * __pyx_unpickle_SafeCallWrapper__set_state( result, __pyx_state) + * return result + * cdef __pyx_unpickle_SafeCallWrapper__set_state(SafeCallWrapper result, tuple __pyx_state): # <<<<<<<<<<<<<< + * result.method_object = __pyx_state[0] + * if hasattr(result, '__dict__'): + */ + +static PyObject *__pyx_f_14_pydevd_bundle_13pydevd_cython___pyx_unpickle_SafeCallWrapper__set_state(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper *__pyx_v_result, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + int __pyx_t_3; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + PyObject *__pyx_t_7 = NULL; + __Pyx_RefNannySetupContext("__pyx_unpickle_SafeCallWrapper__set_state", 0); + + /* "(tree fragment)":10 + * return result + * cdef __pyx_unpickle_SafeCallWrapper__set_state(SafeCallWrapper result, tuple __pyx_state): + * result.method_object = __pyx_state[0] # <<<<<<<<<<<<<< + * if hasattr(result, '__dict__'): + * result.__dict__.update(__pyx_state[1]) + */ + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 10, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 10, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v_result->method_object); + __Pyx_DECREF(__pyx_v_result->method_object); + __pyx_v_result->method_object = __pyx_t_1; + __pyx_t_1 = 0; + + /* "(tree fragment)":11 + * cdef __pyx_unpickle_SafeCallWrapper__set_state(SafeCallWrapper result, tuple __pyx_state): + * result.method_object = __pyx_state[0] + * if hasattr(result, '__dict__'): # <<<<<<<<<<<<<< + * result.__dict__.update(__pyx_state[1]) + */ + __pyx_t_2 = __Pyx_HasAttr(((PyObject *)__pyx_v_result), __pyx_n_s_dict); if (unlikely(__pyx_t_2 == -1)) __PYX_ERR(1, 11, __pyx_L1_error) + __pyx_t_3 = (__pyx_t_2 != 0); + if (__pyx_t_3) { + + /* "(tree fragment)":12 + * result.method_object = __pyx_state[0] + * if hasattr(result, '__dict__'): + * result.__dict__.update(__pyx_state[1]) # <<<<<<<<<<<<<< + */ + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_result), __pyx_n_s_dict); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_t_4, __pyx_n_s_update); if (unlikely(!__pyx_t_5)) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 12, __pyx_L1_error) + } + __pyx_t_4 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 1, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_6 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_5))) { + __pyx_t_6 = PyMethod_GET_SELF(__pyx_t_5); + if (likely(__pyx_t_6)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_5); + __Pyx_INCREF(__pyx_t_6); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_5, function); + } + } + if (!__pyx_t_6) { + __pyx_t_1 = __Pyx_PyObject_CallOneArg(__pyx_t_5, __pyx_t_4); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_GOTREF(__pyx_t_1); + } else { + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_5)) { + PyObject *__pyx_temp[2] = {__pyx_t_6, __pyx_t_4}; + __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_5, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_5)) { + PyObject *__pyx_temp[2] = {__pyx_t_6, __pyx_t_4}; + __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_5, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + } else + #endif + { + __pyx_t_7 = PyTuple_New(1+1); if (unlikely(!__pyx_t_7)) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_GIVEREF(__pyx_t_6); PyTuple_SET_ITEM(__pyx_t_7, 0, __pyx_t_6); __pyx_t_6 = NULL; + __Pyx_GIVEREF(__pyx_t_4); + PyTuple_SET_ITEM(__pyx_t_7, 0+1, __pyx_t_4); + __pyx_t_4 = 0; + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_5, __pyx_t_7, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + } + } + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "(tree fragment)":11 + * cdef __pyx_unpickle_SafeCallWrapper__set_state(SafeCallWrapper result, tuple __pyx_state): + * result.method_object = __pyx_state[0] + * if hasattr(result, '__dict__'): # <<<<<<<<<<<<<< + * result.__dict__.update(__pyx_state[1]) + */ + } + + /* "(tree fragment)":9 + * __pyx_unpickle_SafeCallWrapper__set_state( result, __pyx_state) + * return result + * cdef __pyx_unpickle_SafeCallWrapper__set_state(SafeCallWrapper result, tuple __pyx_state): # <<<<<<<<<<<<<< + * result.method_object = __pyx_state[0] + * if hasattr(result, '__dict__'): + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.__pyx_unpickle_SafeCallWrapper__set_state", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":1 + * def __pyx_unpickle_ThreadTracer(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< + * if __pyx_checksum != 0x3d7902a: + * from pickle import PickleError + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_23__pyx_unpickle_ThreadTracer(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static PyMethodDef __pyx_mdef_14_pydevd_bundle_13pydevd_cython_23__pyx_unpickle_ThreadTracer = {"__pyx_unpickle_ThreadTracer", (PyCFunction)__pyx_pw_14_pydevd_bundle_13pydevd_cython_23__pyx_unpickle_ThreadTracer, METH_VARARGS|METH_KEYWORDS, 0}; +static PyObject *__pyx_pw_14_pydevd_bundle_13pydevd_cython_23__pyx_unpickle_ThreadTracer(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v___pyx_type = 0; + long __pyx_v___pyx_checksum; + PyObject *__pyx_v___pyx_state = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__pyx_unpickle_ThreadTracer (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_pyx_type,&__pyx_n_s_pyx_checksum,&__pyx_n_s_pyx_state,0}; + PyObject* values[3] = {0,0,0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + CYTHON_FALLTHROUGH; + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + CYTHON_FALLTHROUGH; + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + CYTHON_FALLTHROUGH; + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_pyx_type)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + CYTHON_FALLTHROUGH; + case 1: + if (likely((values[1] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_pyx_checksum)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_ThreadTracer", 1, 3, 3, 1); __PYX_ERR(1, 1, __pyx_L3_error) + } + CYTHON_FALLTHROUGH; + case 2: + if (likely((values[2] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_pyx_state)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_ThreadTracer", 1, 3, 3, 2); __PYX_ERR(1, 1, __pyx_L3_error) + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "__pyx_unpickle_ThreadTracer") < 0)) __PYX_ERR(1, 1, __pyx_L3_error) + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 3) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + } + __pyx_v___pyx_type = values[0]; + __pyx_v___pyx_checksum = __Pyx_PyInt_As_long(values[1]); if (unlikely((__pyx_v___pyx_checksum == (long)-1) && PyErr_Occurred())) __PYX_ERR(1, 1, __pyx_L3_error) + __pyx_v___pyx_state = values[2]; + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("__pyx_unpickle_ThreadTracer", 1, 3, 3, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(1, 1, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.__pyx_unpickle_ThreadTracer", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + __pyx_r = __pyx_pf_14_pydevd_bundle_13pydevd_cython_22__pyx_unpickle_ThreadTracer(__pyx_self, __pyx_v___pyx_type, __pyx_v___pyx_checksum, __pyx_v___pyx_state); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_14_pydevd_bundle_13pydevd_cython_22__pyx_unpickle_ThreadTracer(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v___pyx_type, long __pyx_v___pyx_checksum, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_v_PickleError = NULL; + PyObject *__pyx_v_result = NULL; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + int __pyx_t_1; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + int __pyx_t_7; + __Pyx_RefNannySetupContext("__pyx_unpickle_ThreadTracer", 0); + + /* "(tree fragment)":2 + * def __pyx_unpickle_ThreadTracer(__pyx_type, long __pyx_checksum, __pyx_state): + * if __pyx_checksum != 0x3d7902a: # <<<<<<<<<<<<<< + * from pickle import PickleError + * raise PickleError("Incompatible checksums (%s vs 0x3d7902a = (_args))" % __pyx_checksum) + */ + __pyx_t_1 = ((__pyx_v___pyx_checksum != 0x3d7902a) != 0); + if (__pyx_t_1) { + + /* "(tree fragment)":3 + * def __pyx_unpickle_ThreadTracer(__pyx_type, long __pyx_checksum, __pyx_state): + * if __pyx_checksum != 0x3d7902a: + * from pickle import PickleError # <<<<<<<<<<<<<< + * raise PickleError("Incompatible checksums (%s vs 0x3d7902a = (_args))" % __pyx_checksum) + * result = ThreadTracer.__new__(__pyx_type) + */ + __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 3, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_n_s_PickleError); + __Pyx_GIVEREF(__pyx_n_s_PickleError); + PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_PickleError); + __pyx_t_3 = __Pyx_Import(__pyx_n_s_pickle, __pyx_t_2, -1); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 3, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_3, __pyx_n_s_PickleError); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 3, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_t_2); + __pyx_v_PickleError = __pyx_t_2; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "(tree fragment)":4 + * if __pyx_checksum != 0x3d7902a: + * from pickle import PickleError + * raise PickleError("Incompatible checksums (%s vs 0x3d7902a = (_args))" % __pyx_checksum) # <<<<<<<<<<<<<< + * result = ThreadTracer.__new__(__pyx_type) + * if __pyx_state is not None: + */ + __pyx_t_2 = __Pyx_PyInt_From_long(__pyx_v___pyx_checksum); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 4, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_4 = __Pyx_PyString_Format(__pyx_kp_s_Incompatible_checksums_s_vs_0x3d, __pyx_t_2); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 4, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_INCREF(__pyx_v_PickleError); + __pyx_t_2 = __pyx_v_PickleError; __pyx_t_5 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_5)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_5); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + } + } + if (!__pyx_t_5) { + __pyx_t_3 = __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_t_4); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 4, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_GOTREF(__pyx_t_3); + } else { + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_2)) { + PyObject *__pyx_temp[2] = {__pyx_t_5, __pyx_t_4}; + __pyx_t_3 = __Pyx_PyFunction_FastCall(__pyx_t_2, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 4, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_2)) { + PyObject *__pyx_temp[2] = {__pyx_t_5, __pyx_t_4}; + __pyx_t_3 = __Pyx_PyCFunction_FastCall(__pyx_t_2, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 4, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + } else + #endif + { + __pyx_t_6 = PyTuple_New(1+1); if (unlikely(!__pyx_t_6)) __PYX_ERR(1, 4, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_GIVEREF(__pyx_t_5); PyTuple_SET_ITEM(__pyx_t_6, 0, __pyx_t_5); __pyx_t_5 = NULL; + __Pyx_GIVEREF(__pyx_t_4); + PyTuple_SET_ITEM(__pyx_t_6, 0+1, __pyx_t_4); + __pyx_t_4 = 0; + __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_2, __pyx_t_6, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 4, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + } + } + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_Raise(__pyx_t_3, 0, 0, 0); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __PYX_ERR(1, 4, __pyx_L1_error) + + /* "(tree fragment)":2 + * def __pyx_unpickle_ThreadTracer(__pyx_type, long __pyx_checksum, __pyx_state): + * if __pyx_checksum != 0x3d7902a: # <<<<<<<<<<<<<< + * from pickle import PickleError + * raise PickleError("Incompatible checksums (%s vs 0x3d7902a = (_args))" % __pyx_checksum) + */ + } + + /* "(tree fragment)":5 + * from pickle import PickleError + * raise PickleError("Incompatible checksums (%s vs 0x3d7902a = (_args))" % __pyx_checksum) + * result = ThreadTracer.__new__(__pyx_type) # <<<<<<<<<<<<<< + * if __pyx_state is not None: + * __pyx_unpickle_ThreadTracer__set_state( result, __pyx_state) + */ + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_ptype_14_pydevd_bundle_13pydevd_cython_ThreadTracer), __pyx_n_s_new); if (unlikely(!__pyx_t_2)) __PYX_ERR(1, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_6 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_6 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_6)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_6); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + } + } + if (!__pyx_t_6) { + __pyx_t_3 = __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_v___pyx_type); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + } else { + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_2)) { + PyObject *__pyx_temp[2] = {__pyx_t_6, __pyx_v___pyx_type}; + __pyx_t_3 = __Pyx_PyFunction_FastCall(__pyx_t_2, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 5, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_GOTREF(__pyx_t_3); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_2)) { + PyObject *__pyx_temp[2] = {__pyx_t_6, __pyx_v___pyx_type}; + __pyx_t_3 = __Pyx_PyCFunction_FastCall(__pyx_t_2, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 5, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_GOTREF(__pyx_t_3); + } else + #endif + { + __pyx_t_4 = PyTuple_New(1+1); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_GIVEREF(__pyx_t_6); PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_6); __pyx_t_6 = NULL; + __Pyx_INCREF(__pyx_v___pyx_type); + __Pyx_GIVEREF(__pyx_v___pyx_type); + PyTuple_SET_ITEM(__pyx_t_4, 0+1, __pyx_v___pyx_type); + __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_2, __pyx_t_4, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + } + } + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_v_result = __pyx_t_3; + __pyx_t_3 = 0; + + /* "(tree fragment)":6 + * raise PickleError("Incompatible checksums (%s vs 0x3d7902a = (_args))" % __pyx_checksum) + * result = ThreadTracer.__new__(__pyx_type) + * if __pyx_state is not None: # <<<<<<<<<<<<<< + * __pyx_unpickle_ThreadTracer__set_state( result, __pyx_state) + * return result + */ + __pyx_t_1 = (__pyx_v___pyx_state != Py_None); + __pyx_t_7 = (__pyx_t_1 != 0); + if (__pyx_t_7) { + + /* "(tree fragment)":7 + * result = ThreadTracer.__new__(__pyx_type) + * if __pyx_state is not None: + * __pyx_unpickle_ThreadTracer__set_state( result, __pyx_state) # <<<<<<<<<<<<<< + * return result + * cdef __pyx_unpickle_ThreadTracer__set_state(ThreadTracer result, tuple __pyx_state): + */ + if (!(likely(PyTuple_CheckExact(__pyx_v___pyx_state))||((__pyx_v___pyx_state) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_v___pyx_state)->tp_name), 0))) __PYX_ERR(1, 7, __pyx_L1_error) + __pyx_t_3 = __pyx_f_14_pydevd_bundle_13pydevd_cython___pyx_unpickle_ThreadTracer__set_state(((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_ThreadTracer *)__pyx_v_result), ((PyObject*)__pyx_v___pyx_state)); if (unlikely(!__pyx_t_3)) __PYX_ERR(1, 7, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + + /* "(tree fragment)":6 + * raise PickleError("Incompatible checksums (%s vs 0x3d7902a = (_args))" % __pyx_checksum) + * result = ThreadTracer.__new__(__pyx_type) + * if __pyx_state is not None: # <<<<<<<<<<<<<< + * __pyx_unpickle_ThreadTracer__set_state( result, __pyx_state) + * return result + */ + } + + /* "(tree fragment)":8 + * if __pyx_state is not None: + * __pyx_unpickle_ThreadTracer__set_state( result, __pyx_state) + * return result # <<<<<<<<<<<<<< + * cdef __pyx_unpickle_ThreadTracer__set_state(ThreadTracer result, tuple __pyx_state): + * result._args = __pyx_state[0] + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(__pyx_v_result); + __pyx_r = __pyx_v_result; + goto __pyx_L0; + + /* "(tree fragment)":1 + * def __pyx_unpickle_ThreadTracer(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< + * if __pyx_checksum != 0x3d7902a: + * from pickle import PickleError + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.__pyx_unpickle_ThreadTracer", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_PickleError); + __Pyx_XDECREF(__pyx_v_result); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "(tree fragment)":9 + * __pyx_unpickle_ThreadTracer__set_state( result, __pyx_state) + * return result + * cdef __pyx_unpickle_ThreadTracer__set_state(ThreadTracer result, tuple __pyx_state): # <<<<<<<<<<<<<< + * result._args = __pyx_state[0] + * if hasattr(result, '__dict__'): + */ + +static PyObject *__pyx_f_14_pydevd_bundle_13pydevd_cython___pyx_unpickle_ThreadTracer__set_state(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_ThreadTracer *__pyx_v_result, PyObject *__pyx_v___pyx_state) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + int __pyx_t_2; + int __pyx_t_3; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + PyObject *__pyx_t_7 = NULL; + __Pyx_RefNannySetupContext("__pyx_unpickle_ThreadTracer__set_state", 0); + + /* "(tree fragment)":10 + * return result + * cdef __pyx_unpickle_ThreadTracer__set_state(ThreadTracer result, tuple __pyx_state): + * result._args = __pyx_state[0] # <<<<<<<<<<<<<< + * if hasattr(result, '__dict__'): + * result.__dict__.update(__pyx_state[1]) + */ + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 10, __pyx_L1_error) + } + __pyx_t_1 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 10, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (!(likely(PyTuple_CheckExact(__pyx_t_1))||((__pyx_t_1) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "tuple", Py_TYPE(__pyx_t_1)->tp_name), 0))) __PYX_ERR(1, 10, __pyx_L1_error) + __Pyx_GIVEREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_v_result->_args); + __Pyx_DECREF(__pyx_v_result->_args); + __pyx_v_result->_args = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "(tree fragment)":11 + * cdef __pyx_unpickle_ThreadTracer__set_state(ThreadTracer result, tuple __pyx_state): + * result._args = __pyx_state[0] + * if hasattr(result, '__dict__'): # <<<<<<<<<<<<<< + * result.__dict__.update(__pyx_state[1]) + */ + __pyx_t_2 = __Pyx_HasAttr(((PyObject *)__pyx_v_result), __pyx_n_s_dict); if (unlikely(__pyx_t_2 == -1)) __PYX_ERR(1, 11, __pyx_L1_error) + __pyx_t_3 = (__pyx_t_2 != 0); + if (__pyx_t_3) { + + /* "(tree fragment)":12 + * result._args = __pyx_state[0] + * if hasattr(result, '__dict__'): + * result.__dict__.update(__pyx_state[1]) # <<<<<<<<<<<<<< + */ + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_result), __pyx_n_s_dict); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_t_4, __pyx_n_s_update); if (unlikely(!__pyx_t_5)) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if (unlikely(__pyx_v___pyx_state == Py_None)) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not subscriptable"); + __PYX_ERR(1, 12, __pyx_L1_error) + } + __pyx_t_4 = __Pyx_GetItemInt_Tuple(__pyx_v___pyx_state, 1, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_4)) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __pyx_t_6 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_5))) { + __pyx_t_6 = PyMethod_GET_SELF(__pyx_t_5); + if (likely(__pyx_t_6)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_5); + __Pyx_INCREF(__pyx_t_6); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_5, function); + } + } + if (!__pyx_t_6) { + __pyx_t_1 = __Pyx_PyObject_CallOneArg(__pyx_t_5, __pyx_t_4); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_GOTREF(__pyx_t_1); + } else { + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_5)) { + PyObject *__pyx_temp[2] = {__pyx_t_6, __pyx_t_4}; + __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_5, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_5)) { + PyObject *__pyx_temp[2] = {__pyx_t_6, __pyx_t_4}; + __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_5, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + } else + #endif + { + __pyx_t_7 = PyTuple_New(1+1); if (unlikely(!__pyx_t_7)) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_7); + __Pyx_GIVEREF(__pyx_t_6); PyTuple_SET_ITEM(__pyx_t_7, 0, __pyx_t_6); __pyx_t_6 = NULL; + __Pyx_GIVEREF(__pyx_t_4); + PyTuple_SET_ITEM(__pyx_t_7, 0+1, __pyx_t_4); + __pyx_t_4 = 0; + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_5, __pyx_t_7, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 12, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; + } + } + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "(tree fragment)":11 + * cdef __pyx_unpickle_ThreadTracer__set_state(ThreadTracer result, tuple __pyx_state): + * result._args = __pyx_state[0] + * if hasattr(result, '__dict__'): # <<<<<<<<<<<<<< + * result.__dict__.update(__pyx_state[1]) + */ + } + + /* "(tree fragment)":9 + * __pyx_unpickle_ThreadTracer__set_state( result, __pyx_state) + * return result + * cdef __pyx_unpickle_ThreadTracer__set_state(ThreadTracer result, tuple __pyx_state): # <<<<<<<<<<<<<< + * result._args = __pyx_state[0] + * if hasattr(result, '__dict__'): + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_7); + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython.__pyx_unpickle_ThreadTracer__set_state", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_tp_new_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo(PyTypeObject *t, CYTHON_UNUSED PyObject *a, CYTHON_UNUSED PyObject *k) { + struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *p; + PyObject *o; + if (likely((t->tp_flags & Py_TPFLAGS_IS_ABSTRACT) == 0)) { + o = (*t->tp_alloc)(t, 0); + } else { + o = (PyObject *) PyBaseObject_Type.tp_new(t, __pyx_empty_tuple, 0); + } + if (unlikely(!o)) return 0; + p = ((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)o); + p->pydev_step_stop = Py_None; Py_INCREF(Py_None); + p->pydev_smart_step_stop = Py_None; Py_INCREF(Py_None); + p->pydev_call_from_jinja2 = Py_None; Py_INCREF(Py_None); + p->pydev_call_inside_jinja2 = Py_None; Py_INCREF(Py_None); + p->conditional_breakpoint_exception = ((PyObject*)Py_None); Py_INCREF(Py_None); + p->pydev_message = ((PyObject*)Py_None); Py_INCREF(Py_None); + p->pydev_func_name = ((PyObject*)Py_None); Py_INCREF(Py_None); + return o; +} + +static void __pyx_tp_dealloc_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo(PyObject *o) { + struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *p = (struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)o; + #if PY_VERSION_HEX >= 0x030400a1 + if (unlikely(PyType_HasFeature(Py_TYPE(o), Py_TPFLAGS_HAVE_FINALIZE) && Py_TYPE(o)->tp_finalize) && !_PyGC_FINALIZED(o)) { + if (PyObject_CallFinalizerFromDealloc(o)) return; + } + #endif + PyObject_GC_UnTrack(o); + Py_CLEAR(p->pydev_step_stop); + Py_CLEAR(p->pydev_smart_step_stop); + Py_CLEAR(p->pydev_call_from_jinja2); + Py_CLEAR(p->pydev_call_inside_jinja2); + Py_CLEAR(p->conditional_breakpoint_exception); + Py_CLEAR(p->pydev_message); + Py_CLEAR(p->pydev_func_name); + (*Py_TYPE(o)->tp_free)(o); +} + +static int __pyx_tp_traverse_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo(PyObject *o, visitproc v, void *a) { + int e; + struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *p = (struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)o; + if (p->pydev_step_stop) { + e = (*v)(p->pydev_step_stop, a); if (e) return e; + } + if (p->pydev_smart_step_stop) { + e = (*v)(p->pydev_smart_step_stop, a); if (e) return e; + } + if (p->pydev_call_from_jinja2) { + e = (*v)(p->pydev_call_from_jinja2, a); if (e) return e; + } + if (p->pydev_call_inside_jinja2) { + e = (*v)(p->pydev_call_inside_jinja2, a); if (e) return e; + } + if (p->conditional_breakpoint_exception) { + e = (*v)(p->conditional_breakpoint_exception, a); if (e) return e; + } + return 0; +} + +static int __pyx_tp_clear_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo(PyObject *o) { + PyObject* tmp; + struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *p = (struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo *)o; + tmp = ((PyObject*)p->pydev_step_stop); + p->pydev_step_stop = Py_None; Py_INCREF(Py_None); + Py_XDECREF(tmp); + tmp = ((PyObject*)p->pydev_smart_step_stop); + p->pydev_smart_step_stop = Py_None; Py_INCREF(Py_None); + Py_XDECREF(tmp); + tmp = ((PyObject*)p->pydev_call_from_jinja2); + p->pydev_call_from_jinja2 = Py_None; Py_INCREF(Py_None); + Py_XDECREF(tmp); + tmp = ((PyObject*)p->pydev_call_inside_jinja2); + p->pydev_call_inside_jinja2 = Py_None; Py_INCREF(Py_None); + Py_XDECREF(tmp); + tmp = ((PyObject*)p->conditional_breakpoint_exception); + p->conditional_breakpoint_exception = ((PyObject*)Py_None); Py_INCREF(Py_None); + Py_XDECREF(tmp); + return 0; +} + +static PyObject *__pyx_getprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_state(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_11pydev_state_1__get__(o); +} + +static int __pyx_setprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_state(PyObject *o, PyObject *v, CYTHON_UNUSED void *x) { + if (v) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_11pydev_state_3__set__(o, v); + } + else { + PyErr_SetString(PyExc_NotImplementedError, "__del__"); + return -1; + } +} + +static PyObject *__pyx_getprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_step_stop(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_step_stop_1__get__(o); +} + +static int __pyx_setprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_step_stop(PyObject *o, PyObject *v, CYTHON_UNUSED void *x) { + if (v) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_step_stop_3__set__(o, v); + } + else { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_step_stop_5__del__(o); + } +} + +static PyObject *__pyx_getprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_step_cmd(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_14pydev_step_cmd_1__get__(o); +} + +static int __pyx_setprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_step_cmd(PyObject *o, PyObject *v, CYTHON_UNUSED void *x) { + if (v) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_14pydev_step_cmd_3__set__(o, v); + } + else { + PyErr_SetString(PyExc_NotImplementedError, "__del__"); + return -1; + } +} + +static PyObject *__pyx_getprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_notify_kill(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_17pydev_notify_kill_1__get__(o); +} + +static int __pyx_setprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_notify_kill(PyObject *o, PyObject *v, CYTHON_UNUSED void *x) { + if (v) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_17pydev_notify_kill_3__set__(o, v); + } + else { + PyErr_SetString(PyExc_NotImplementedError, "__del__"); + return -1; + } +} + +static PyObject *__pyx_getprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_smart_step_stop(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_21pydev_smart_step_stop_1__get__(o); +} + +static int __pyx_setprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_smart_step_stop(PyObject *o, PyObject *v, CYTHON_UNUSED void *x) { + if (v) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_21pydev_smart_step_stop_3__set__(o, v); + } + else { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_21pydev_smart_step_stop_5__del__(o); + } +} + +static PyObject *__pyx_getprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_django_resolve_frame(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_26pydev_django_resolve_frame_1__get__(o); +} + +static int __pyx_setprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_django_resolve_frame(PyObject *o, PyObject *v, CYTHON_UNUSED void *x) { + if (v) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_26pydev_django_resolve_frame_3__set__(o, v); + } + else { + PyErr_SetString(PyExc_NotImplementedError, "__del__"); + return -1; + } +} + +static PyObject *__pyx_getprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_call_from_jinja2(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_22pydev_call_from_jinja2_1__get__(o); +} + +static int __pyx_setprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_call_from_jinja2(PyObject *o, PyObject *v, CYTHON_UNUSED void *x) { + if (v) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_22pydev_call_from_jinja2_3__set__(o, v); + } + else { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_22pydev_call_from_jinja2_5__del__(o); + } +} + +static PyObject *__pyx_getprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_call_inside_jinja2(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_24pydev_call_inside_jinja2_1__get__(o); +} + +static int __pyx_setprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_call_inside_jinja2(PyObject *o, PyObject *v, CYTHON_UNUSED void *x) { + if (v) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_24pydev_call_inside_jinja2_3__set__(o, v); + } + else { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_24pydev_call_inside_jinja2_5__del__(o); + } +} + +static PyObject *__pyx_getprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_is_tracing(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_10is_tracing_1__get__(o); +} + +static int __pyx_setprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_is_tracing(PyObject *o, PyObject *v, CYTHON_UNUSED void *x) { + if (v) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_10is_tracing_3__set__(o, v); + } + else { + PyErr_SetString(PyExc_NotImplementedError, "__del__"); + return -1; + } +} + +static PyObject *__pyx_getprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_conditional_breakpoint_exception(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_32conditional_breakpoint_exception_1__get__(o); +} + +static int __pyx_setprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_conditional_breakpoint_exception(PyObject *o, PyObject *v, CYTHON_UNUSED void *x) { + if (v) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_32conditional_breakpoint_exception_3__set__(o, v); + } + else { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_32conditional_breakpoint_exception_5__del__(o); + } +} + +static PyObject *__pyx_getprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_message(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_13pydev_message_1__get__(o); +} + +static int __pyx_setprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_message(PyObject *o, PyObject *v, CYTHON_UNUSED void *x) { + if (v) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_13pydev_message_3__set__(o, v); + } + else { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_13pydev_message_5__del__(o); + } +} + +static PyObject *__pyx_getprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_suspend_type(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_12suspend_type_1__get__(o); +} + +static int __pyx_setprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_suspend_type(PyObject *o, PyObject *v, CYTHON_UNUSED void *x) { + if (v) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_12suspend_type_3__set__(o, v); + } + else { + PyErr_SetString(PyExc_NotImplementedError, "__del__"); + return -1; + } +} + +static PyObject *__pyx_getprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_next_line(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_next_line_1__get__(o); +} + +static int __pyx_setprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_next_line(PyObject *o, PyObject *v, CYTHON_UNUSED void *x) { + if (v) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_next_line_3__set__(o, v); + } + else { + PyErr_SetString(PyExc_NotImplementedError, "__del__"); + return -1; + } +} + +static PyObject *__pyx_getprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_func_name(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_func_name_1__get__(o); +} + +static int __pyx_setprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_func_name(PyObject *o, PyObject *v, CYTHON_UNUSED void *x) { + if (v) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_func_name_3__set__(o, v); + } + else { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_15pydev_func_name_5__del__(o); + } +} + +static PyMethodDef __pyx_methods_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo[] = { + {"iter_frames", (PyCFunction)__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_3iter_frames, METH_O, 0}, + {"__reduce_cython__", (PyCFunction)__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_7__reduce_cython__, METH_NOARGS, 0}, + {"__setstate_cython__", (PyCFunction)__pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_9__setstate_cython__, METH_O, 0}, + {0, 0, 0, 0} +}; + +static struct PyGetSetDef __pyx_getsets_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo[] = { + {(char *)"pydev_state", __pyx_getprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_state, __pyx_setprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_state, (char *)0, 0}, + {(char *)"pydev_step_stop", __pyx_getprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_step_stop, __pyx_setprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_step_stop, (char *)0, 0}, + {(char *)"pydev_step_cmd", __pyx_getprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_step_cmd, __pyx_setprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_step_cmd, (char *)0, 0}, + {(char *)"pydev_notify_kill", __pyx_getprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_notify_kill, __pyx_setprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_notify_kill, (char *)0, 0}, + {(char *)"pydev_smart_step_stop", __pyx_getprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_smart_step_stop, __pyx_setprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_smart_step_stop, (char *)0, 0}, + {(char *)"pydev_django_resolve_frame", __pyx_getprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_django_resolve_frame, __pyx_setprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_django_resolve_frame, (char *)0, 0}, + {(char *)"pydev_call_from_jinja2", __pyx_getprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_call_from_jinja2, __pyx_setprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_call_from_jinja2, (char *)0, 0}, + {(char *)"pydev_call_inside_jinja2", __pyx_getprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_call_inside_jinja2, __pyx_setprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_call_inside_jinja2, (char *)0, 0}, + {(char *)"is_tracing", __pyx_getprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_is_tracing, __pyx_setprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_is_tracing, (char *)0, 0}, + {(char *)"conditional_breakpoint_exception", __pyx_getprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_conditional_breakpoint_exception, __pyx_setprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_conditional_breakpoint_exception, (char *)0, 0}, + {(char *)"pydev_message", __pyx_getprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_message, __pyx_setprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_message, (char *)0, 0}, + {(char *)"suspend_type", __pyx_getprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_suspend_type, __pyx_setprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_suspend_type, (char *)0, 0}, + {(char *)"pydev_next_line", __pyx_getprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_next_line, __pyx_setprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_next_line, (char *)0, 0}, + {(char *)"pydev_func_name", __pyx_getprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_func_name, __pyx_setprop_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_pydev_func_name, (char *)0, 0}, + {0, 0, 0, 0, 0} +}; + +static PyTypeObject __pyx_type_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo = { + PyVarObject_HEAD_INIT(0, 0) + "_pydevd_bundle.pydevd_cython.PyDBAdditionalThreadInfo", /*tp_name*/ + sizeof(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo), /*tp_basicsize*/ + 0, /*tp_itemsize*/ + __pyx_tp_dealloc_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo, /*tp_dealloc*/ + 0, /*tp_print*/ + 0, /*tp_getattr*/ + 0, /*tp_setattr*/ + #if PY_MAJOR_VERSION < 3 + 0, /*tp_compare*/ + #endif + #if PY_MAJOR_VERSION >= 3 + 0, /*tp_as_async*/ + #endif + 0, /*tp_repr*/ + 0, /*tp_as_number*/ + 0, /*tp_as_sequence*/ + 0, /*tp_as_mapping*/ + 0, /*tp_hash*/ + 0, /*tp_call*/ + __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_5__str__, /*tp_str*/ + 0, /*tp_getattro*/ + 0, /*tp_setattro*/ + 0, /*tp_as_buffer*/ + Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC, /*tp_flags*/ + 0, /*tp_doc*/ + __pyx_tp_traverse_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo, /*tp_traverse*/ + __pyx_tp_clear_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo, /*tp_clear*/ + 0, /*tp_richcompare*/ + 0, /*tp_weaklistoffset*/ + 0, /*tp_iter*/ + 0, /*tp_iternext*/ + __pyx_methods_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo, /*tp_methods*/ + 0, /*tp_members*/ + __pyx_getsets_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo, /*tp_getset*/ + 0, /*tp_base*/ + 0, /*tp_dict*/ + 0, /*tp_descr_get*/ + 0, /*tp_descr_set*/ + 0, /*tp_dictoffset*/ + __pyx_pw_14_pydevd_bundle_13pydevd_cython_24PyDBAdditionalThreadInfo_1__init__, /*tp_init*/ + 0, /*tp_alloc*/ + __pyx_tp_new_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo, /*tp_new*/ + 0, /*tp_free*/ + 0, /*tp_is_gc*/ + 0, /*tp_bases*/ + 0, /*tp_mro*/ + 0, /*tp_cache*/ + 0, /*tp_subclasses*/ + 0, /*tp_weaklist*/ + 0, /*tp_del*/ + 0, /*tp_version_tag*/ + #if PY_VERSION_HEX >= 0x030400a1 + 0, /*tp_finalize*/ + #endif +}; +static struct __pyx_vtabstruct_14_pydevd_bundle_13pydevd_cython_PyDBFrame __pyx_vtable_14_pydevd_bundle_13pydevd_cython_PyDBFrame; + +static PyObject *__pyx_tp_new_14_pydevd_bundle_13pydevd_cython_PyDBFrame(PyTypeObject *t, CYTHON_UNUSED PyObject *a, CYTHON_UNUSED PyObject *k) { + struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *p; + PyObject *o; + if (likely((t->tp_flags & Py_TPFLAGS_IS_ABSTRACT) == 0)) { + o = (*t->tp_alloc)(t, 0); + } else { + o = (PyObject *) PyBaseObject_Type.tp_new(t, __pyx_empty_tuple, 0); + } + if (unlikely(!o)) return 0; + p = ((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *)o); + p->__pyx_vtab = __pyx_vtabptr_14_pydevd_bundle_13pydevd_cython_PyDBFrame; + p->_args = ((PyObject*)Py_None); Py_INCREF(Py_None); + return o; +} + +static void __pyx_tp_dealloc_14_pydevd_bundle_13pydevd_cython_PyDBFrame(PyObject *o) { + struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *p = (struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *)o; + #if PY_VERSION_HEX >= 0x030400a1 + if (unlikely(PyType_HasFeature(Py_TYPE(o), Py_TPFLAGS_HAVE_FINALIZE) && Py_TYPE(o)->tp_finalize) && !_PyGC_FINALIZED(o)) { + if (PyObject_CallFinalizerFromDealloc(o)) return; + } + #endif + PyObject_GC_UnTrack(o); + Py_CLEAR(p->_args); + (*Py_TYPE(o)->tp_free)(o); +} + +static int __pyx_tp_traverse_14_pydevd_bundle_13pydevd_cython_PyDBFrame(PyObject *o, visitproc v, void *a) { + int e; + struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *p = (struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *)o; + if (p->_args) { + e = (*v)(p->_args, a); if (e) return e; + } + return 0; +} + +static int __pyx_tp_clear_14_pydevd_bundle_13pydevd_cython_PyDBFrame(PyObject *o) { + PyObject* tmp; + struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *p = (struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *)o; + tmp = ((PyObject*)p->_args); + p->_args = ((PyObject*)Py_None); Py_INCREF(Py_None); + Py_XDECREF(tmp); + return 0; +} + +static PyMethodDef __pyx_methods_14_pydevd_bundle_13pydevd_cython_PyDBFrame[] = { + {"set_suspend", (PyCFunction)__pyx_pw_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_3set_suspend, METH_VARARGS|METH_KEYWORDS, 0}, + {"do_wait_suspend", (PyCFunction)__pyx_pw_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_5do_wait_suspend, METH_VARARGS|METH_KEYWORDS, 0}, + {"trace_exception", (PyCFunction)__pyx_pw_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_7trace_exception, METH_VARARGS|METH_KEYWORDS, 0}, + {"trace_return", (PyCFunction)__pyx_pw_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_9trace_return, METH_VARARGS|METH_KEYWORDS, 0}, + {"should_stop_on_exception", (PyCFunction)__pyx_pw_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_11should_stop_on_exception, METH_VARARGS|METH_KEYWORDS, 0}, + {"handle_exception", (PyCFunction)__pyx_pw_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_13handle_exception, METH_VARARGS|METH_KEYWORDS, 0}, + {"get_func_name", (PyCFunction)__pyx_pw_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_15get_func_name, METH_O, 0}, + {"show_return_values", (PyCFunction)__pyx_pw_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_17show_return_values, METH_VARARGS|METH_KEYWORDS, 0}, + {"remove_return_values", (PyCFunction)__pyx_pw_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_19remove_return_values, METH_VARARGS|METH_KEYWORDS, 0}, + {"trace_dispatch", (PyCFunction)__pyx_pw_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_21trace_dispatch, METH_VARARGS|METH_KEYWORDS, 0}, + {"__reduce_cython__", (PyCFunction)__pyx_pw_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_23__reduce_cython__, METH_NOARGS, 0}, + {"__setstate_cython__", (PyCFunction)__pyx_pw_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_25__setstate_cython__, METH_O, 0}, + {0, 0, 0, 0} +}; + +static PyTypeObject __pyx_type_14_pydevd_bundle_13pydevd_cython_PyDBFrame = { + PyVarObject_HEAD_INIT(0, 0) + "_pydevd_bundle.pydevd_cython.PyDBFrame", /*tp_name*/ + sizeof(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame), /*tp_basicsize*/ + 0, /*tp_itemsize*/ + __pyx_tp_dealloc_14_pydevd_bundle_13pydevd_cython_PyDBFrame, /*tp_dealloc*/ + 0, /*tp_print*/ + 0, /*tp_getattr*/ + 0, /*tp_setattr*/ + #if PY_MAJOR_VERSION < 3 + 0, /*tp_compare*/ + #endif + #if PY_MAJOR_VERSION >= 3 + 0, /*tp_as_async*/ + #endif + 0, /*tp_repr*/ + 0, /*tp_as_number*/ + 0, /*tp_as_sequence*/ + 0, /*tp_as_mapping*/ + 0, /*tp_hash*/ + 0, /*tp_call*/ + 0, /*tp_str*/ + 0, /*tp_getattro*/ + 0, /*tp_setattro*/ + 0, /*tp_as_buffer*/ + Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC, /*tp_flags*/ + 0, /*tp_doc*/ + __pyx_tp_traverse_14_pydevd_bundle_13pydevd_cython_PyDBFrame, /*tp_traverse*/ + __pyx_tp_clear_14_pydevd_bundle_13pydevd_cython_PyDBFrame, /*tp_clear*/ + 0, /*tp_richcompare*/ + 0, /*tp_weaklistoffset*/ + 0, /*tp_iter*/ + 0, /*tp_iternext*/ + __pyx_methods_14_pydevd_bundle_13pydevd_cython_PyDBFrame, /*tp_methods*/ + 0, /*tp_members*/ + 0, /*tp_getset*/ + 0, /*tp_base*/ + 0, /*tp_dict*/ + 0, /*tp_descr_get*/ + 0, /*tp_descr_set*/ + 0, /*tp_dictoffset*/ + __pyx_pw_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_1__init__, /*tp_init*/ + 0, /*tp_alloc*/ + __pyx_tp_new_14_pydevd_bundle_13pydevd_cython_PyDBFrame, /*tp_new*/ + 0, /*tp_free*/ + 0, /*tp_is_gc*/ + 0, /*tp_bases*/ + 0, /*tp_mro*/ + 0, /*tp_cache*/ + 0, /*tp_subclasses*/ + 0, /*tp_weaklist*/ + 0, /*tp_del*/ + 0, /*tp_version_tag*/ + #if PY_VERSION_HEX >= 0x030400a1 + 0, /*tp_finalize*/ + #endif +}; + +static PyObject *__pyx_tp_new_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper(PyTypeObject *t, CYTHON_UNUSED PyObject *a, CYTHON_UNUSED PyObject *k) { + struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper *p; + PyObject *o; + if (likely((t->tp_flags & Py_TPFLAGS_IS_ABSTRACT) == 0)) { + o = (*t->tp_alloc)(t, 0); + } else { + o = (PyObject *) PyBaseObject_Type.tp_new(t, __pyx_empty_tuple, 0); + } + if (unlikely(!o)) return 0; + p = ((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper *)o); + p->method_object = Py_None; Py_INCREF(Py_None); + return o; +} + +static void __pyx_tp_dealloc_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper(PyObject *o) { + struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper *p = (struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper *)o; + #if PY_VERSION_HEX >= 0x030400a1 + if (unlikely(PyType_HasFeature(Py_TYPE(o), Py_TPFLAGS_HAVE_FINALIZE) && Py_TYPE(o)->tp_finalize) && !_PyGC_FINALIZED(o)) { + if (PyObject_CallFinalizerFromDealloc(o)) return; + } + #endif + PyObject_GC_UnTrack(o); + Py_CLEAR(p->method_object); + (*Py_TYPE(o)->tp_free)(o); +} + +static int __pyx_tp_traverse_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper(PyObject *o, visitproc v, void *a) { + int e; + struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper *p = (struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper *)o; + if (p->method_object) { + e = (*v)(p->method_object, a); if (e) return e; + } + return 0; +} + +static int __pyx_tp_clear_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper(PyObject *o) { + PyObject* tmp; + struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper *p = (struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper *)o; + tmp = ((PyObject*)p->method_object); + p->method_object = Py_None; Py_INCREF(Py_None); + Py_XDECREF(tmp); + return 0; +} + +static PyMethodDef __pyx_methods_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper[] = { + {"__reduce_cython__", (PyCFunction)__pyx_pw_14_pydevd_bundle_13pydevd_cython_15SafeCallWrapper_5__reduce_cython__, METH_NOARGS, 0}, + {"__setstate_cython__", (PyCFunction)__pyx_pw_14_pydevd_bundle_13pydevd_cython_15SafeCallWrapper_7__setstate_cython__, METH_O, 0}, + {0, 0, 0, 0} +}; + +static PyTypeObject __pyx_type_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper = { + PyVarObject_HEAD_INIT(0, 0) + "_pydevd_bundle.pydevd_cython.SafeCallWrapper", /*tp_name*/ + sizeof(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper), /*tp_basicsize*/ + 0, /*tp_itemsize*/ + __pyx_tp_dealloc_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper, /*tp_dealloc*/ + 0, /*tp_print*/ + 0, /*tp_getattr*/ + 0, /*tp_setattr*/ + #if PY_MAJOR_VERSION < 3 + 0, /*tp_compare*/ + #endif + #if PY_MAJOR_VERSION >= 3 + 0, /*tp_as_async*/ + #endif + 0, /*tp_repr*/ + 0, /*tp_as_number*/ + 0, /*tp_as_sequence*/ + 0, /*tp_as_mapping*/ + 0, /*tp_hash*/ + __pyx_pw_14_pydevd_bundle_13pydevd_cython_15SafeCallWrapper_3__call__, /*tp_call*/ + 0, /*tp_str*/ + 0, /*tp_getattro*/ + 0, /*tp_setattro*/ + 0, /*tp_as_buffer*/ + Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC, /*tp_flags*/ + 0, /*tp_doc*/ + __pyx_tp_traverse_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper, /*tp_traverse*/ + __pyx_tp_clear_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper, /*tp_clear*/ + 0, /*tp_richcompare*/ + 0, /*tp_weaklistoffset*/ + 0, /*tp_iter*/ + 0, /*tp_iternext*/ + __pyx_methods_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper, /*tp_methods*/ + 0, /*tp_members*/ + 0, /*tp_getset*/ + 0, /*tp_base*/ + 0, /*tp_dict*/ + 0, /*tp_descr_get*/ + 0, /*tp_descr_set*/ + 0, /*tp_dictoffset*/ + __pyx_pw_14_pydevd_bundle_13pydevd_cython_15SafeCallWrapper_1__init__, /*tp_init*/ + 0, /*tp_alloc*/ + __pyx_tp_new_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper, /*tp_new*/ + 0, /*tp_free*/ + 0, /*tp_is_gc*/ + 0, /*tp_bases*/ + 0, /*tp_mro*/ + 0, /*tp_cache*/ + 0, /*tp_subclasses*/ + 0, /*tp_weaklist*/ + 0, /*tp_del*/ + 0, /*tp_version_tag*/ + #if PY_VERSION_HEX >= 0x030400a1 + 0, /*tp_finalize*/ + #endif +}; + +static PyObject *__pyx_tp_new_14_pydevd_bundle_13pydevd_cython_ThreadTracer(PyTypeObject *t, CYTHON_UNUSED PyObject *a, CYTHON_UNUSED PyObject *k) { + struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_ThreadTracer *p; + PyObject *o; + if (likely((t->tp_flags & Py_TPFLAGS_IS_ABSTRACT) == 0)) { + o = (*t->tp_alloc)(t, 0); + } else { + o = (PyObject *) PyBaseObject_Type.tp_new(t, __pyx_empty_tuple, 0); + } + if (unlikely(!o)) return 0; + p = ((struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_ThreadTracer *)o); + p->_args = ((PyObject*)Py_None); Py_INCREF(Py_None); + return o; +} + +static void __pyx_tp_dealloc_14_pydevd_bundle_13pydevd_cython_ThreadTracer(PyObject *o) { + struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_ThreadTracer *p = (struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_ThreadTracer *)o; + #if PY_VERSION_HEX >= 0x030400a1 + if (unlikely(PyType_HasFeature(Py_TYPE(o), Py_TPFLAGS_HAVE_FINALIZE) && Py_TYPE(o)->tp_finalize) && !_PyGC_FINALIZED(o)) { + if (PyObject_CallFinalizerFromDealloc(o)) return; + } + #endif + PyObject_GC_UnTrack(o); + Py_CLEAR(p->_args); + (*Py_TYPE(o)->tp_free)(o); +} + +static int __pyx_tp_traverse_14_pydevd_bundle_13pydevd_cython_ThreadTracer(PyObject *o, visitproc v, void *a) { + int e; + struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_ThreadTracer *p = (struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_ThreadTracer *)o; + if (p->_args) { + e = (*v)(p->_args, a); if (e) return e; + } + return 0; +} + +static int __pyx_tp_clear_14_pydevd_bundle_13pydevd_cython_ThreadTracer(PyObject *o) { + PyObject* tmp; + struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_ThreadTracer *p = (struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_ThreadTracer *)o; + tmp = ((PyObject*)p->_args); + p->_args = ((PyObject*)Py_None); Py_INCREF(Py_None); + Py_XDECREF(tmp); + return 0; +} + +static PyObject *__pyx_getprop_14_pydevd_bundle_13pydevd_cython_12ThreadTracer__args(PyObject *o, CYTHON_UNUSED void *x) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_12ThreadTracer_5_args_1__get__(o); +} + +static int __pyx_setprop_14_pydevd_bundle_13pydevd_cython_12ThreadTracer__args(PyObject *o, PyObject *v, CYTHON_UNUSED void *x) { + if (v) { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_12ThreadTracer_5_args_3__set__(o, v); + } + else { + return __pyx_pw_14_pydevd_bundle_13pydevd_cython_12ThreadTracer_5_args_5__del__(o); + } +} + +static PyMethodDef __pyx_methods_14_pydevd_bundle_13pydevd_cython_ThreadTracer[] = { + {"__reduce_cython__", (PyCFunction)__pyx_pw_14_pydevd_bundle_13pydevd_cython_12ThreadTracer_5__reduce_cython__, METH_NOARGS, 0}, + {"__setstate_cython__", (PyCFunction)__pyx_pw_14_pydevd_bundle_13pydevd_cython_12ThreadTracer_7__setstate_cython__, METH_O, 0}, + {0, 0, 0, 0} +}; + +static struct PyGetSetDef __pyx_getsets_14_pydevd_bundle_13pydevd_cython_ThreadTracer[] = { + {(char *)"_args", __pyx_getprop_14_pydevd_bundle_13pydevd_cython_12ThreadTracer__args, __pyx_setprop_14_pydevd_bundle_13pydevd_cython_12ThreadTracer__args, (char *)0, 0}, + {0, 0, 0, 0, 0} +}; + +static PyTypeObject __pyx_type_14_pydevd_bundle_13pydevd_cython_ThreadTracer = { + PyVarObject_HEAD_INIT(0, 0) + "_pydevd_bundle.pydevd_cython.ThreadTracer", /*tp_name*/ + sizeof(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_ThreadTracer), /*tp_basicsize*/ + 0, /*tp_itemsize*/ + __pyx_tp_dealloc_14_pydevd_bundle_13pydevd_cython_ThreadTracer, /*tp_dealloc*/ + 0, /*tp_print*/ + 0, /*tp_getattr*/ + 0, /*tp_setattr*/ + #if PY_MAJOR_VERSION < 3 + 0, /*tp_compare*/ + #endif + #if PY_MAJOR_VERSION >= 3 + 0, /*tp_as_async*/ + #endif + 0, /*tp_repr*/ + 0, /*tp_as_number*/ + 0, /*tp_as_sequence*/ + 0, /*tp_as_mapping*/ + 0, /*tp_hash*/ + __pyx_pw_14_pydevd_bundle_13pydevd_cython_12ThreadTracer_3__call__, /*tp_call*/ + 0, /*tp_str*/ + 0, /*tp_getattro*/ + 0, /*tp_setattro*/ + 0, /*tp_as_buffer*/ + Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC, /*tp_flags*/ + 0, /*tp_doc*/ + __pyx_tp_traverse_14_pydevd_bundle_13pydevd_cython_ThreadTracer, /*tp_traverse*/ + __pyx_tp_clear_14_pydevd_bundle_13pydevd_cython_ThreadTracer, /*tp_clear*/ + 0, /*tp_richcompare*/ + 0, /*tp_weaklistoffset*/ + 0, /*tp_iter*/ + 0, /*tp_iternext*/ + __pyx_methods_14_pydevd_bundle_13pydevd_cython_ThreadTracer, /*tp_methods*/ + 0, /*tp_members*/ + __pyx_getsets_14_pydevd_bundle_13pydevd_cython_ThreadTracer, /*tp_getset*/ + 0, /*tp_base*/ + 0, /*tp_dict*/ + 0, /*tp_descr_get*/ + 0, /*tp_descr_set*/ + 0, /*tp_dictoffset*/ + __pyx_pw_14_pydevd_bundle_13pydevd_cython_12ThreadTracer_1__init__, /*tp_init*/ + 0, /*tp_alloc*/ + __pyx_tp_new_14_pydevd_bundle_13pydevd_cython_ThreadTracer, /*tp_new*/ + 0, /*tp_free*/ + 0, /*tp_is_gc*/ + 0, /*tp_bases*/ + 0, /*tp_mro*/ + 0, /*tp_cache*/ + 0, /*tp_subclasses*/ + 0, /*tp_weaklist*/ + 0, /*tp_del*/ + 0, /*tp_version_tag*/ + #if PY_VERSION_HEX >= 0x030400a1 + 0, /*tp_finalize*/ + #endif +}; + +static PyMethodDef __pyx_methods[] = { + {0, 0, 0, 0} +}; + +#if PY_MAJOR_VERSION >= 3 +static struct PyModuleDef __pyx_moduledef = { + #if PY_VERSION_HEX < 0x03020000 + { PyObject_HEAD_INIT(NULL) NULL, 0, NULL }, + #else + PyModuleDef_HEAD_INIT, + #endif + "pydevd_cython", + 0, /* m_doc */ + -1, /* m_size */ + __pyx_methods /* m_methods */, + NULL, /* m_reload */ + NULL, /* m_traverse */ + NULL, /* m_clear */ + NULL /* m_free */ +}; +#endif + +static __Pyx_StringTabEntry __pyx_string_tab[] = { + {&__pyx_kp_s_, __pyx_k_, sizeof(__pyx_k_), 0, 0, 1, 0}, + {&__pyx_n_s_ALL, __pyx_k_ALL, sizeof(__pyx_k_ALL), 0, 0, 1, 1}, + {&__pyx_n_s_AttributeError, __pyx_k_AttributeError, sizeof(__pyx_k_AttributeError), 0, 0, 1, 1}, + {&__pyx_n_s_CMD_RUN_TO_LINE, __pyx_k_CMD_RUN_TO_LINE, sizeof(__pyx_k_CMD_RUN_TO_LINE), 0, 0, 1, 1}, + {&__pyx_n_s_CMD_SET_BREAK, __pyx_k_CMD_SET_BREAK, sizeof(__pyx_k_CMD_SET_BREAK), 0, 0, 1, 1}, + {&__pyx_n_s_CMD_SET_NEXT_STATEMENT, __pyx_k_CMD_SET_NEXT_STATEMENT, sizeof(__pyx_k_CMD_SET_NEXT_STATEMENT), 0, 0, 1, 1}, + {&__pyx_n_s_CMD_SMART_STEP_INTO, __pyx_k_CMD_SMART_STEP_INTO, sizeof(__pyx_k_CMD_SMART_STEP_INTO), 0, 0, 1, 1}, + {&__pyx_n_s_CMD_STEP_CAUGHT_EXCEPTION, __pyx_k_CMD_STEP_CAUGHT_EXCEPTION, sizeof(__pyx_k_CMD_STEP_CAUGHT_EXCEPTION), 0, 0, 1, 1}, + {&__pyx_n_s_CMD_STEP_INTO, __pyx_k_CMD_STEP_INTO, sizeof(__pyx_k_CMD_STEP_INTO), 0, 0, 1, 1}, + {&__pyx_n_s_CMD_STEP_INTO_MY_CODE, __pyx_k_CMD_STEP_INTO_MY_CODE, sizeof(__pyx_k_CMD_STEP_INTO_MY_CODE), 0, 0, 1, 1}, + {&__pyx_n_s_CMD_STEP_OVER, __pyx_k_CMD_STEP_OVER, sizeof(__pyx_k_CMD_STEP_OVER), 0, 0, 1, 1}, + {&__pyx_n_s_CMD_STEP_RETURN, __pyx_k_CMD_STEP_RETURN, sizeof(__pyx_k_CMD_STEP_RETURN), 0, 0, 1, 1}, + {&__pyx_n_s_CO_GENERATOR, __pyx_k_CO_GENERATOR, sizeof(__pyx_k_CO_GENERATOR), 0, 0, 1, 1}, + {&__pyx_kp_s_Condition, __pyx_k_Condition, sizeof(__pyx_k_Condition), 0, 0, 1, 0}, + {&__pyx_n_s_DEBUG_START, __pyx_k_DEBUG_START, sizeof(__pyx_k_DEBUG_START), 0, 0, 1, 1}, + {&__pyx_n_s_DEBUG_START_PY3K, __pyx_k_DEBUG_START_PY3K, sizeof(__pyx_k_DEBUG_START_PY3K), 0, 0, 1, 1}, + {&__pyx_n_s_DONT_TRACE, __pyx_k_DONT_TRACE, sizeof(__pyx_k_DONT_TRACE), 0, 0, 1, 1}, + {&__pyx_kp_s_Error, __pyx_k_Error, sizeof(__pyx_k_Error), 0, 0, 1, 0}, + {&__pyx_kp_s_Error_while_evaluating_expressio, __pyx_k_Error_while_evaluating_expressio, sizeof(__pyx_k_Error_while_evaluating_expressio), 0, 0, 1, 0}, + {&__pyx_n_s_GeneratorExit, __pyx_k_GeneratorExit, sizeof(__pyx_k_GeneratorExit), 0, 0, 1, 1}, + {&__pyx_n_s_IGNORE_EXCEPTION_TAG, __pyx_k_IGNORE_EXCEPTION_TAG, sizeof(__pyx_k_IGNORE_EXCEPTION_TAG), 0, 0, 1, 1}, + {&__pyx_n_s_IS_IRONPYTHON, __pyx_k_IS_IRONPYTHON, sizeof(__pyx_k_IS_IRONPYTHON), 0, 0, 1, 1}, + {&__pyx_n_s_IS_JYTHON, __pyx_k_IS_JYTHON, sizeof(__pyx_k_IS_JYTHON), 0, 0, 1, 1}, + {&__pyx_n_s_IS_PY3K, __pyx_k_IS_PY3K, sizeof(__pyx_k_IS_PY3K), 0, 0, 1, 1}, + {&__pyx_kp_s_IgnoreException, __pyx_k_IgnoreException, sizeof(__pyx_k_IgnoreException), 0, 0, 1, 0}, + {&__pyx_kp_s_Ignore_exception_s_in_library_s, __pyx_k_Ignore_exception_s_in_library_s, sizeof(__pyx_k_Ignore_exception_s_in_library_s), 0, 0, 1, 0}, + {&__pyx_n_s_ImportError, __pyx_k_ImportError, sizeof(__pyx_k_ImportError), 0, 0, 1, 1}, + {&__pyx_kp_s_Incompatible_checksums_s_vs_0x3d, __pyx_k_Incompatible_checksums_s_vs_0x3d, sizeof(__pyx_k_Incompatible_checksums_s_vs_0x3d), 0, 0, 1, 0}, + {&__pyx_kp_s_Incompatible_checksums_s_vs_0x77, __pyx_k_Incompatible_checksums_s_vs_0x77, sizeof(__pyx_k_Incompatible_checksums_s_vs_0x77), 0, 0, 1, 0}, + {&__pyx_kp_s_Incompatible_checksums_s_vs_0xa9, __pyx_k_Incompatible_checksums_s_vs_0xa9, sizeof(__pyx_k_Incompatible_checksums_s_vs_0xa9), 0, 0, 1, 0}, + {&__pyx_kp_s_Incompatible_checksums_s_vs_0xfa, __pyx_k_Incompatible_checksums_s_vs_0xfa, sizeof(__pyx_k_Incompatible_checksums_s_vs_0xfa), 0, 0, 1, 0}, + {&__pyx_n_s_KeyboardInterrupt, __pyx_k_KeyboardInterrupt, sizeof(__pyx_k_KeyboardInterrupt), 0, 0, 1, 1}, + {&__pyx_n_s_NORM_PATHS_AND_BASE_CONTAINER, __pyx_k_NORM_PATHS_AND_BASE_CONTAINER, sizeof(__pyx_k_NORM_PATHS_AND_BASE_CONTAINER), 0, 0, 1, 1}, + {&__pyx_n_s_NoSuchFieldException, __pyx_k_NoSuchFieldException, sizeof(__pyx_k_NoSuchFieldException), 0, 0, 1, 1}, + {&__pyx_n_s_None, __pyx_k_None, sizeof(__pyx_k_None), 0, 0, 1, 1}, + {&__pyx_n_s_PYDEV_FILE, __pyx_k_PYDEV_FILE, sizeof(__pyx_k_PYDEV_FILE), 0, 0, 1, 1}, + {&__pyx_n_s_PYTHON_SUSPEND, __pyx_k_PYTHON_SUSPEND, sizeof(__pyx_k_PYTHON_SUSPEND), 0, 0, 1, 1}, + {&__pyx_n_s_PickleError, __pyx_k_PickleError, sizeof(__pyx_k_PickleError), 0, 0, 1, 1}, + {&__pyx_n_s_RETURN_VALUES_DICT, __pyx_k_RETURN_VALUES_DICT, sizeof(__pyx_k_RETURN_VALUES_DICT), 0, 0, 1, 1}, + {&__pyx_n_s_RuntimeError, __pyx_k_RuntimeError, sizeof(__pyx_k_RuntimeError), 0, 0, 1, 1}, + {&__pyx_n_s_STATE_RUN, __pyx_k_STATE_RUN, sizeof(__pyx_k_STATE_RUN), 0, 0, 1, 1}, + {&__pyx_n_s_STATE_SUSPEND, __pyx_k_STATE_SUSPEND, sizeof(__pyx_k_STATE_SUSPEND), 0, 0, 1, 1}, + {&__pyx_n_s_SetTrace, __pyx_k_SetTrace, sizeof(__pyx_k_SetTrace), 0, 0, 1, 1}, + {&__pyx_kp_s_State_s_Stop_s_Cmd_s_Kill_s, __pyx_k_State_s_Stop_s_Cmd_s_Kill_s, sizeof(__pyx_k_State_s_Stop_s_Cmd_s_Kill_s), 0, 0, 1, 0}, + {&__pyx_n_s_StopIteration, __pyx_k_StopIteration, sizeof(__pyx_k_StopIteration), 0, 0, 1, 1}, + {&__pyx_n_s_SystemExit, __pyx_k_SystemExit, sizeof(__pyx_k_SystemExit), 0, 0, 1, 1}, + {&__pyx_n_s_TRACE_PROPERTY, __pyx_k_TRACE_PROPERTY, sizeof(__pyx_k_TRACE_PROPERTY), 0, 0, 1, 1}, + {&__pyx_n_s_ThreadStateMapping, __pyx_k_ThreadStateMapping, sizeof(__pyx_k_ThreadStateMapping), 0, 0, 1, 1}, + {&__pyx_kp_s_Unable_to_proceed_sys__current_f, __pyx_k_Unable_to_proceed_sys__current_f, sizeof(__pyx_k_Unable_to_proceed_sys__current_f), 0, 0, 1, 0}, + {&__pyx_kp_s__5, __pyx_k__5, sizeof(__pyx_k__5), 0, 0, 1, 0}, + {&__pyx_n_s_accessible, __pyx_k_accessible, sizeof(__pyx_k_accessible), 0, 0, 1, 1}, + {&__pyx_n_s_add_additional_frame_by_id, __pyx_k_add_additional_frame_by_id, sizeof(__pyx_k_add_additional_frame_by_id), 0, 0, 1, 1}, + {&__pyx_n_s_add_exception_to_frame, __pyx_k_add_exception_to_frame, sizeof(__pyx_k_add_exception_to_frame), 0, 0, 1, 1}, + {&__pyx_n_s_additional_info, __pyx_k_additional_info, sizeof(__pyx_k_additional_info), 0, 0, 1, 1}, + {&__pyx_n_s_arg, __pyx_k_arg, sizeof(__pyx_k_arg), 0, 0, 1, 1}, + {&__pyx_n_s_args, __pyx_k_args, sizeof(__pyx_k_args), 0, 0, 1, 1}, + {&__pyx_n_s_args_2, __pyx_k_args_2, sizeof(__pyx_k_args_2), 0, 0, 1, 1}, + {&__pyx_n_s_as_array, __pyx_k_as_array, sizeof(__pyx_k_as_array), 0, 0, 1, 1}, + {&__pyx_n_s_asyncio_analyser, __pyx_k_asyncio_analyser, sizeof(__pyx_k_asyncio_analyser), 0, 0, 1, 1}, + {&__pyx_n_s_basename, __pyx_k_basename, sizeof(__pyx_k_basename), 0, 0, 1, 1}, + {&__pyx_n_s_break_on_caught_exceptions, __pyx_k_break_on_caught_exceptions, sizeof(__pyx_k_break_on_caught_exceptions), 0, 0, 1, 1}, + {&__pyx_n_s_break_on_exceptions_thrown_in_sa, __pyx_k_break_on_exceptions_thrown_in_sa, sizeof(__pyx_k_break_on_exceptions_thrown_in_sa), 0, 0, 1, 1}, + {&__pyx_n_s_breakpoint, __pyx_k_breakpoint, sizeof(__pyx_k_breakpoint), 0, 0, 1, 1}, + {&__pyx_n_s_breakpoints, __pyx_k_breakpoints, sizeof(__pyx_k_breakpoints), 0, 0, 1, 1}, + {&__pyx_n_s_cachedThreadState, __pyx_k_cachedThreadState, sizeof(__pyx_k_cachedThreadState), 0, 0, 1, 1}, + {&__pyx_n_s_call, __pyx_k_call, sizeof(__pyx_k_call), 0, 0, 1, 1}, + {&__pyx_n_s_call_2, __pyx_k_call_2, sizeof(__pyx_k_call_2), 0, 0, 1, 1}, + {&__pyx_n_s_can_not_skip, __pyx_k_can_not_skip, sizeof(__pyx_k_can_not_skip), 0, 0, 1, 1}, + {&__pyx_n_s_checkcache, __pyx_k_checkcache, sizeof(__pyx_k_checkcache), 0, 0, 1, 1}, + {&__pyx_n_s_clear, __pyx_k_clear, sizeof(__pyx_k_clear), 0, 0, 1, 1}, + {&__pyx_n_s_cline_in_traceback, __pyx_k_cline_in_traceback, sizeof(__pyx_k_cline_in_traceback), 0, 0, 1, 1}, + {&__pyx_n_s_cmd_step_into, __pyx_k_cmd_step_into, sizeof(__pyx_k_cmd_step_into), 0, 0, 1, 1}, + {&__pyx_n_s_cmd_step_over, __pyx_k_cmd_step_over, sizeof(__pyx_k_cmd_step_over), 0, 0, 1, 1}, + {&__pyx_n_s_co_filename, __pyx_k_co_filename, sizeof(__pyx_k_co_filename), 0, 0, 1, 1}, + {&__pyx_n_s_co_firstlineno, __pyx_k_co_firstlineno, sizeof(__pyx_k_co_firstlineno), 0, 0, 1, 1}, + {&__pyx_n_s_co_flags, __pyx_k_co_flags, sizeof(__pyx_k_co_flags), 0, 0, 1, 1}, + {&__pyx_n_s_co_name, __pyx_k_co_name, sizeof(__pyx_k_co_name), 0, 0, 1, 1}, + {&__pyx_n_s_compile, __pyx_k_compile, sizeof(__pyx_k_compile), 0, 0, 1, 1}, + {&__pyx_n_s_condition, __pyx_k_condition, sizeof(__pyx_k_condition), 0, 0, 1, 1}, + {&__pyx_n_s_conditional_breakpoint_exception, __pyx_k_conditional_breakpoint_exception, sizeof(__pyx_k_conditional_breakpoint_exception), 0, 0, 1, 1}, + {&__pyx_n_s_currentThread, __pyx_k_currentThread, sizeof(__pyx_k_currentThread), 0, 0, 1, 1}, + {&__pyx_n_s_current_frames, __pyx_k_current_frames, sizeof(__pyx_k_current_frames), 0, 0, 1, 1}, + {&__pyx_n_s_debug, __pyx_k_debug, sizeof(__pyx_k_debug), 0, 0, 1, 1}, + {&__pyx_n_s_default_return_value, __pyx_k_default_return_value, sizeof(__pyx_k_default_return_value), 0, 0, 1, 1}, + {&__pyx_n_s_dict, __pyx_k_dict, sizeof(__pyx_k_dict), 0, 0, 1, 1}, + {&__pyx_n_s_dict_iter_values, __pyx_k_dict_iter_values, sizeof(__pyx_k_dict_iter_values), 0, 0, 1, 1}, + {&__pyx_n_s_do_wait_suspend, __pyx_k_do_wait_suspend, sizeof(__pyx_k_do_wait_suspend), 0, 0, 1, 1}, + {&__pyx_n_s_encode, __pyx_k_encode, sizeof(__pyx_k_encode), 0, 0, 1, 1}, + {&__pyx_n_s_entrySet, __pyx_k_entrySet, sizeof(__pyx_k_entrySet), 0, 0, 1, 1}, + {&__pyx_n_s_error, __pyx_k_error, sizeof(__pyx_k_error), 0, 0, 1, 1}, + {&__pyx_n_s_etype, __pyx_k_etype, sizeof(__pyx_k_etype), 0, 0, 1, 1}, + {&__pyx_n_s_eval, __pyx_k_eval, sizeof(__pyx_k_eval), 0, 0, 1, 1}, + {&__pyx_n_s_event, __pyx_k_event, sizeof(__pyx_k_event), 0, 0, 1, 1}, + {&__pyx_n_s_exc_info, __pyx_k_exc_info, sizeof(__pyx_k_exc_info), 0, 0, 1, 1}, + {&__pyx_n_s_exception, __pyx_k_exception, sizeof(__pyx_k_exception), 0, 0, 1, 1}, + {&__pyx_n_s_exception_break, __pyx_k_exception_break, sizeof(__pyx_k_exception_break), 0, 0, 1, 1}, + {&__pyx_n_s_execfile, __pyx_k_execfile, sizeof(__pyx_k_execfile), 0, 0, 1, 1}, + {&__pyx_n_s_expression, __pyx_k_expression, sizeof(__pyx_k_expression), 0, 0, 1, 1}, + {&__pyx_n_s_extract_stack, __pyx_k_extract_stack, sizeof(__pyx_k_extract_stack), 0, 0, 1, 1}, + {&__pyx_n_s_f, __pyx_k_f, sizeof(__pyx_k_f), 0, 0, 1, 1}, + {&__pyx_n_s_f_back, __pyx_k_f_back, sizeof(__pyx_k_f_back), 0, 0, 1, 1}, + {&__pyx_n_s_f_code, __pyx_k_f_code, sizeof(__pyx_k_f_code), 0, 0, 1, 1}, + {&__pyx_n_s_f_globals, __pyx_k_f_globals, sizeof(__pyx_k_f_globals), 0, 0, 1, 1}, + {&__pyx_n_s_f_lineno, __pyx_k_f_lineno, sizeof(__pyx_k_f_lineno), 0, 0, 1, 1}, + {&__pyx_n_s_f_locals, __pyx_k_f_locals, sizeof(__pyx_k_f_locals), 0, 0, 1, 1}, + {&__pyx_n_s_f_trace, __pyx_k_f_trace, sizeof(__pyx_k_f_trace), 0, 0, 1, 1}, + {&__pyx_n_s_filename_to_lines_where_exceptio, __pyx_k_filename_to_lines_where_exceptio, sizeof(__pyx_k_filename_to_lines_where_exceptio), 0, 0, 1, 1}, + {&__pyx_n_s_filename_to_stat_info, __pyx_k_filename_to_stat_info, sizeof(__pyx_k_filename_to_stat_info), 0, 0, 1, 1}, + {&__pyx_n_s_finish_debugging_session, __pyx_k_finish_debugging_session, sizeof(__pyx_k_finish_debugging_session), 0, 0, 1, 1}, + {&__pyx_n_s_first_appearance_in_scope, __pyx_k_first_appearance_in_scope, sizeof(__pyx_k_first_appearance_in_scope), 0, 0, 1, 1}, + {&__pyx_n_s_first_breakpoint_reached, __pyx_k_first_breakpoint_reached, sizeof(__pyx_k_first_breakpoint_reached), 0, 0, 1, 1}, + {&__pyx_n_s_format_exception_only, __pyx_k_format_exception_only, sizeof(__pyx_k_format_exception_only), 0, 0, 1, 1}, + {&__pyx_n_s_frame, __pyx_k_frame, sizeof(__pyx_k_frame), 0, 0, 1, 1}, + {&__pyx_n_s_func_name, __pyx_k_func_name, sizeof(__pyx_k_func_name), 0, 0, 1, 1}, + {&__pyx_n_s_get, __pyx_k_get, sizeof(__pyx_k_get), 0, 0, 1, 1}, + {&__pyx_n_s_getDeclaredField, __pyx_k_getDeclaredField, sizeof(__pyx_k_getDeclaredField), 0, 0, 1, 1}, + {&__pyx_n_s_getId, __pyx_k_getId, sizeof(__pyx_k_getId), 0, 0, 1, 1}, + {&__pyx_n_s_getKey, __pyx_k_getKey, sizeof(__pyx_k_getKey), 0, 0, 1, 1}, + {&__pyx_n_s_getValue, __pyx_k_getValue, sizeof(__pyx_k_getValue), 0, 0, 1, 1}, + {&__pyx_n_s_get_abs_path_real_path_and_base, __pyx_k_get_abs_path_real_path_and_base, sizeof(__pyx_k_get_abs_path_real_path_and_base), 0, 0, 1, 1}, + {&__pyx_n_s_get_breakpoint, __pyx_k_get_breakpoint, sizeof(__pyx_k_get_breakpoint), 0, 0, 1, 1}, + {&__pyx_n_s_get_clsname_for_code, __pyx_k_get_clsname_for_code, sizeof(__pyx_k_get_clsname_for_code), 0, 0, 1, 1}, + {&__pyx_n_s_get_exception_breakpoint, __pyx_k_get_exception_breakpoint, sizeof(__pyx_k_get_exception_breakpoint), 0, 0, 1, 1}, + {&__pyx_n_s_get_file_type, __pyx_k_get_file_type, sizeof(__pyx_k_get_file_type), 0, 0, 1, 1}, + {&__pyx_n_s_get_func_name, __pyx_k_get_func_name, sizeof(__pyx_k_get_func_name), 0, 0, 1, 1}, + {&__pyx_n_s_get_thread_id, __pyx_k_get_thread_id, sizeof(__pyx_k_get_thread_id), 0, 0, 1, 1}, + {&__pyx_n_s_getline, __pyx_k_getline, sizeof(__pyx_k_getline), 0, 0, 1, 1}, + {&__pyx_n_s_globalThreadStates, __pyx_k_globalThreadStates, sizeof(__pyx_k_globalThreadStates), 0, 0, 1, 1}, + {&__pyx_n_s_global_cache_frame_skips, __pyx_k_global_cache_frame_skips, sizeof(__pyx_k_global_cache_frame_skips), 0, 0, 1, 1}, + {&__pyx_n_s_global_cache_skips, __pyx_k_global_cache_skips, sizeof(__pyx_k_global_cache_skips), 0, 0, 1, 1}, + {&__pyx_n_s_handle_breakpoint_condition, __pyx_k_handle_breakpoint_condition, sizeof(__pyx_k_handle_breakpoint_condition), 0, 0, 1, 1}, + {&__pyx_n_s_handle_breakpoint_expression, __pyx_k_handle_breakpoint_expression, sizeof(__pyx_k_handle_breakpoint_expression), 0, 0, 1, 1}, + {&__pyx_n_s_handle_exception, __pyx_k_handle_exception, sizeof(__pyx_k_handle_exception), 0, 0, 1, 1}, + {&__pyx_n_s_has_plugin_exception_breaks, __pyx_k_has_plugin_exception_breaks, sizeof(__pyx_k_has_plugin_exception_breaks), 0, 0, 1, 1}, + {&__pyx_n_s_has_plugin_line_breaks, __pyx_k_has_plugin_line_breaks, sizeof(__pyx_k_has_plugin_line_breaks), 0, 0, 1, 1}, + {&__pyx_n_s_id, __pyx_k_id, sizeof(__pyx_k_id), 0, 0, 1, 1}, + {&__pyx_n_s_ident, __pyx_k_ident, sizeof(__pyx_k_ident), 0, 0, 1, 1}, + {&__pyx_n_s_ignore_exceptions_thrown_in_line, __pyx_k_ignore_exceptions_thrown_in_line, sizeof(__pyx_k_ignore_exceptions_thrown_in_line), 0, 0, 1, 1}, + {&__pyx_n_s_ignore_libraries, __pyx_k_ignore_libraries, sizeof(__pyx_k_ignore_libraries), 0, 0, 1, 1}, + {&__pyx_n_s_import, __pyx_k_import, sizeof(__pyx_k_import), 0, 0, 1, 1}, + {&__pyx_n_s_info, __pyx_k_info, sizeof(__pyx_k_info), 0, 0, 1, 1}, + {&__pyx_n_s_inspect, __pyx_k_inspect, sizeof(__pyx_k_inspect), 0, 0, 1, 1}, + {&__pyx_kp_s_invalid, __pyx_k_invalid, sizeof(__pyx_k_invalid), 0, 0, 1, 0}, + {&__pyx_n_s_is_filter_enabled, __pyx_k_is_filter_enabled, sizeof(__pyx_k_is_filter_enabled), 0, 0, 1, 1}, + {&__pyx_n_s_is_filter_libraries, __pyx_k_is_filter_libraries, sizeof(__pyx_k_is_filter_libraries), 0, 0, 1, 1}, + {&__pyx_n_s_is_ignored_by_filters, __pyx_k_is_ignored_by_filters, sizeof(__pyx_k_is_ignored_by_filters), 0, 0, 1, 1}, + {&__pyx_n_s_is_thread_alive, __pyx_k_is_thread_alive, sizeof(__pyx_k_is_thread_alive), 0, 0, 1, 1}, + {&__pyx_n_s_java_lang, __pyx_k_java_lang, sizeof(__pyx_k_java_lang), 0, 0, 1, 1}, + {&__pyx_n_s_join, __pyx_k_join, sizeof(__pyx_k_join), 0, 0, 1, 1}, + {&__pyx_n_s_just_raised, __pyx_k_just_raised, sizeof(__pyx_k_just_raised), 0, 0, 1, 1}, + {&__pyx_n_s_kill_all_pydev_threads, __pyx_k_kill_all_pydev_threads, sizeof(__pyx_k_kill_all_pydev_threads), 0, 0, 1, 1}, + {&__pyx_n_s_kwargs, __pyx_k_kwargs, sizeof(__pyx_k_kwargs), 0, 0, 1, 1}, + {&__pyx_n_s_line, __pyx_k_line, sizeof(__pyx_k_line), 0, 0, 1, 1}, + {&__pyx_n_s_linecache, __pyx_k_linecache, sizeof(__pyx_k_linecache), 0, 0, 1, 1}, + {&__pyx_n_s_log_event, __pyx_k_log_event, sizeof(__pyx_k_log_event), 0, 0, 1, 1}, + {&__pyx_n_s_main, __pyx_k_main, sizeof(__pyx_k_main), 0, 0, 1, 1}, + {&__pyx_n_s_main_debugger, __pyx_k_main_debugger, sizeof(__pyx_k_main_debugger), 0, 0, 1, 1}, + {&__pyx_n_s_match, __pyx_k_match, sizeof(__pyx_k_match), 0, 0, 1, 1}, + {&__pyx_n_s_method_object, __pyx_k_method_object, sizeof(__pyx_k_method_object), 0, 0, 1, 1}, + {&__pyx_kp_s_module, __pyx_k_module, sizeof(__pyx_k_module), 0, 0, 1, 0}, + {&__pyx_n_s_msg, __pyx_k_msg, sizeof(__pyx_k_msg), 0, 0, 1, 1}, + {&__pyx_n_s_new, __pyx_k_new, sizeof(__pyx_k_new), 0, 0, 1, 1}, + {&__pyx_n_s_new_frame, __pyx_k_new_frame, sizeof(__pyx_k_new_frame), 0, 0, 1, 1}, + {&__pyx_n_s_not_in_scope, __pyx_k_not_in_scope, sizeof(__pyx_k_not_in_scope), 0, 0, 1, 1}, + {&__pyx_n_s_notify_on_first_raise_only, __pyx_k_notify_on_first_raise_only, sizeof(__pyx_k_notify_on_first_raise_only), 0, 0, 1, 1}, + {&__pyx_n_s_org_python_core, __pyx_k_org_python_core, sizeof(__pyx_k_org_python_core), 0, 0, 1, 1}, + {&__pyx_n_s_original_call, __pyx_k_original_call, sizeof(__pyx_k_original_call), 0, 0, 1, 1}, + {&__pyx_n_s_os, __pyx_k_os, sizeof(__pyx_k_os), 0, 0, 1, 1}, + {&__pyx_n_s_os_path, __pyx_k_os_path, sizeof(__pyx_k_os_path), 0, 0, 1, 1}, + {&__pyx_n_s_output_checker, __pyx_k_output_checker, sizeof(__pyx_k_output_checker), 0, 0, 1, 1}, + {&__pyx_n_s_overwrite_prev_trace, __pyx_k_overwrite_prev_trace, sizeof(__pyx_k_overwrite_prev_trace), 0, 0, 1, 1}, + {&__pyx_n_s_path, __pyx_k_path, sizeof(__pyx_k_path), 0, 0, 1, 1}, + {&__pyx_n_s_pickle, __pyx_k_pickle, sizeof(__pyx_k_pickle), 0, 0, 1, 1}, + {&__pyx_n_s_plugin, __pyx_k_plugin, sizeof(__pyx_k_plugin), 0, 0, 1, 1}, + {&__pyx_n_s_pop, __pyx_k_pop, sizeof(__pyx_k_pop), 0, 0, 1, 1}, + {&__pyx_n_s_print_exc, __pyx_k_print_exc, sizeof(__pyx_k_print_exc), 0, 0, 1, 1}, + {&__pyx_n_s_process_thread_not_alive, __pyx_k_process_thread_not_alive, sizeof(__pyx_k_process_thread_not_alive), 0, 0, 1, 1}, + {&__pyx_n_s_py_db, __pyx_k_py_db, sizeof(__pyx_k_py_db), 0, 0, 1, 1}, + {&__pyx_n_s_pydev_bundle, __pyx_k_pydev_bundle, sizeof(__pyx_k_pydev_bundle), 0, 0, 1, 1}, + {&__pyx_n_s_pydev_bundle_pydev_is_thread_al, __pyx_k_pydev_bundle_pydev_is_thread_al, sizeof(__pyx_k_pydev_bundle_pydev_is_thread_al), 0, 0, 1, 1}, + {&__pyx_n_s_pydev_do_not_trace, __pyx_k_pydev_do_not_trace, sizeof(__pyx_k_pydev_do_not_trace), 0, 0, 1, 1}, + {&__pyx_kp_s_pydev_execfile_py, __pyx_k_pydev_execfile_py, sizeof(__pyx_k_pydev_execfile_py), 0, 0, 1, 0}, + {&__pyx_n_s_pydev_imps__pydev_saved_modules, __pyx_k_pydev_imps__pydev_saved_modules, sizeof(__pyx_k_pydev_imps__pydev_saved_modules), 0, 0, 1, 1}, + {&__pyx_n_s_pydev_log, __pyx_k_pydev_log, sizeof(__pyx_k_pydev_log), 0, 0, 1, 1}, + {&__pyx_n_s_pydev_message, __pyx_k_pydev_message, sizeof(__pyx_k_pydev_message), 0, 0, 1, 1}, + {&__pyx_n_s_pydevd_bundle, __pyx_k_pydevd_bundle, sizeof(__pyx_k_pydevd_bundle), 0, 0, 1, 1}, + {&__pyx_n_s_pydevd_bundle_pydevd_additional, __pyx_k_pydevd_bundle_pydevd_additional, sizeof(__pyx_k_pydevd_bundle_pydevd_additional), 0, 0, 1, 1}, + {&__pyx_n_s_pydevd_bundle_pydevd_breakpoint, __pyx_k_pydevd_bundle_pydevd_breakpoint, sizeof(__pyx_k_pydevd_bundle_pydevd_breakpoint), 0, 0, 1, 1}, + {&__pyx_n_s_pydevd_bundle_pydevd_comm, __pyx_k_pydevd_bundle_pydevd_comm, sizeof(__pyx_k_pydevd_bundle_pydevd_comm), 0, 0, 1, 1}, + {&__pyx_n_s_pydevd_bundle_pydevd_constants, __pyx_k_pydevd_bundle_pydevd_constants, sizeof(__pyx_k_pydevd_bundle_pydevd_constants), 0, 0, 1, 1}, + {&__pyx_n_s_pydevd_bundle_pydevd_cython, __pyx_k_pydevd_bundle_pydevd_cython, sizeof(__pyx_k_pydevd_bundle_pydevd_cython), 0, 0, 1, 1}, + {&__pyx_kp_s_pydevd_bundle_pydevd_cython_pyx, __pyx_k_pydevd_bundle_pydevd_cython_pyx, sizeof(__pyx_k_pydevd_bundle_pydevd_cython_pyx), 0, 0, 1, 0}, + {&__pyx_n_s_pydevd_bundle_pydevd_dont_trace, __pyx_k_pydevd_bundle_pydevd_dont_trace, sizeof(__pyx_k_pydevd_bundle_pydevd_dont_trace), 0, 0, 1, 1}, + {&__pyx_n_s_pydevd_bundle_pydevd_frame_util, __pyx_k_pydevd_bundle_pydevd_frame_util, sizeof(__pyx_k_pydevd_bundle_pydevd_frame_util), 0, 0, 1, 1}, + {&__pyx_n_s_pydevd_bundle_pydevd_kill_all_p, __pyx_k_pydevd_bundle_pydevd_kill_all_p, sizeof(__pyx_k_pydevd_bundle_pydevd_kill_all_p), 0, 0, 1, 1}, + {&__pyx_n_s_pydevd_bundle_pydevd_signature, __pyx_k_pydevd_bundle_pydevd_signature, sizeof(__pyx_k_pydevd_bundle_pydevd_signature), 0, 0, 1, 1}, + {&__pyx_n_s_pydevd_bundle_pydevd_utils, __pyx_k_pydevd_bundle_pydevd_utils, sizeof(__pyx_k_pydevd_bundle_pydevd_utils), 0, 0, 1, 1}, + {&__pyx_n_s_pydevd_dont_trace, __pyx_k_pydevd_dont_trace, sizeof(__pyx_k_pydevd_dont_trace), 0, 0, 1, 1}, + {&__pyx_n_s_pydevd_file_utils, __pyx_k_pydevd_file_utils, sizeof(__pyx_k_pydevd_file_utils), 0, 0, 1, 1}, + {&__pyx_kp_s_pydevd_py, __pyx_k_pydevd_py, sizeof(__pyx_k_pydevd_py), 0, 0, 1, 0}, + {&__pyx_kp_s_pydevd_traceproperty_py, __pyx_k_pydevd_traceproperty_py, sizeof(__pyx_k_pydevd_traceproperty_py), 0, 0, 1, 0}, + {&__pyx_n_s_pydevd_tracing, __pyx_k_pydevd_tracing, sizeof(__pyx_k_pydevd_tracing), 0, 0, 1, 1}, + {&__pyx_n_s_pydevd_vars, __pyx_k_pydevd_vars, sizeof(__pyx_k_pydevd_vars), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_checksum, __pyx_k_pyx_checksum, sizeof(__pyx_k_pyx_checksum), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_state, __pyx_k_pyx_state, sizeof(__pyx_k_pyx_state), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_type, __pyx_k_pyx_type, sizeof(__pyx_k_pyx_type), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_unpickle_PyDBAdditionalThr, __pyx_k_pyx_unpickle_PyDBAdditionalThr, sizeof(__pyx_k_pyx_unpickle_PyDBAdditionalThr), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_unpickle_PyDBFrame, __pyx_k_pyx_unpickle_PyDBFrame, sizeof(__pyx_k_pyx_unpickle_PyDBFrame), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_unpickle_SafeCallWrapper, __pyx_k_pyx_unpickle_SafeCallWrapper, sizeof(__pyx_k_pyx_unpickle_SafeCallWrapper), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_unpickle_ThreadTracer, __pyx_k_pyx_unpickle_ThreadTracer, sizeof(__pyx_k_pyx_unpickle_ThreadTracer), 0, 0, 1, 1}, + {&__pyx_n_s_pyx_vtable, __pyx_k_pyx_vtable, sizeof(__pyx_k_pyx_vtable), 0, 0, 1, 1}, + {&__pyx_n_s_qname, __pyx_k_qname, sizeof(__pyx_k_qname), 0, 0, 1, 1}, + {&__pyx_n_s_quitting, __pyx_k_quitting, sizeof(__pyx_k_quitting), 0, 0, 1, 1}, + {&__pyx_n_s_re, __pyx_k_re, sizeof(__pyx_k_re), 0, 0, 1, 1}, + {&__pyx_n_s_reduce_cython, __pyx_k_reduce_cython, sizeof(__pyx_k_reduce_cython), 0, 0, 1, 1}, + {&__pyx_n_s_remove_additional_frame_by_id, __pyx_k_remove_additional_frame_by_id, sizeof(__pyx_k_remove_additional_frame_by_id), 0, 0, 1, 1}, + {&__pyx_n_s_remove_return_values, __pyx_k_remove_return_values, sizeof(__pyx_k_remove_return_values), 0, 0, 1, 1}, + {&__pyx_n_s_remove_return_values_flag, __pyx_k_remove_return_values_flag, sizeof(__pyx_k_remove_return_values_flag), 0, 0, 1, 1}, + {&__pyx_n_s_result, __pyx_k_result, sizeof(__pyx_k_result), 0, 0, 1, 1}, + {&__pyx_n_s_ret, __pyx_k_ret, sizeof(__pyx_k_ret), 0, 0, 1, 1}, + {&__pyx_n_s_return, __pyx_k_return, sizeof(__pyx_k_return), 0, 0, 1, 1}, + {&__pyx_n_s_run, __pyx_k_run, sizeof(__pyx_k_run), 0, 0, 1, 1}, + {&__pyx_kp_s_s_s, __pyx_k_s_s, sizeof(__pyx_k_s_s), 0, 0, 1, 0}, + {&__pyx_n_s_self, __pyx_k_self, sizeof(__pyx_k_self), 0, 0, 1, 1}, + {&__pyx_n_s_send_caught_exception_stack, __pyx_k_send_caught_exception_stack, sizeof(__pyx_k_send_caught_exception_stack), 0, 0, 1, 1}, + {&__pyx_n_s_send_caught_exception_stack_proc, __pyx_k_send_caught_exception_stack_proc, sizeof(__pyx_k_send_caught_exception_stack_proc), 0, 0, 1, 1}, + {&__pyx_n_s_send_signature_call_trace, __pyx_k_send_signature_call_trace, sizeof(__pyx_k_send_signature_call_trace), 0, 0, 1, 1}, + {&__pyx_n_s_send_signature_return_trace, __pyx_k_send_signature_return_trace, sizeof(__pyx_k_send_signature_return_trace), 0, 0, 1, 1}, + {&__pyx_n_s_set_suspend, __pyx_k_set_suspend, sizeof(__pyx_k_set_suspend), 0, 0, 1, 1}, + {&__pyx_n_s_set_trace_for_frame_and_parents, __pyx_k_set_trace_for_frame_and_parents, sizeof(__pyx_k_set_trace_for_frame_and_parents), 0, 0, 1, 1}, + {&__pyx_n_s_setstate_cython, __pyx_k_setstate_cython, sizeof(__pyx_k_setstate_cython), 0, 0, 1, 1}, + {&__pyx_n_s_should_stop_on_exception, __pyx_k_should_stop_on_exception, sizeof(__pyx_k_should_stop_on_exception), 0, 0, 1, 1}, + {&__pyx_n_s_should_trace_hook, __pyx_k_should_trace_hook, sizeof(__pyx_k_should_trace_hook), 0, 0, 1, 1}, + {&__pyx_n_s_show_return_values, __pyx_k_show_return_values, sizeof(__pyx_k_show_return_values), 0, 0, 1, 1}, + {&__pyx_n_s_signature_factory, __pyx_k_signature_factory, sizeof(__pyx_k_signature_factory), 0, 0, 1, 1}, + {&__pyx_n_s_st_mtime, __pyx_k_st_mtime, sizeof(__pyx_k_st_mtime), 0, 0, 1, 1}, + {&__pyx_n_s_st_size, __pyx_k_st_size, sizeof(__pyx_k_st_size), 0, 0, 1, 1}, + {&__pyx_n_s_stack, __pyx_k_stack, sizeof(__pyx_k_stack), 0, 0, 1, 1}, + {&__pyx_n_s_stat, __pyx_k_stat, sizeof(__pyx_k_stat), 0, 0, 1, 1}, + {&__pyx_n_s_stderr, __pyx_k_stderr, sizeof(__pyx_k_stderr), 0, 0, 1, 1}, + {&__pyx_n_s_stop, __pyx_k_stop, sizeof(__pyx_k_stop), 0, 0, 1, 1}, + {&__pyx_kp_s_stringsource, __pyx_k_stringsource, sizeof(__pyx_k_stringsource), 0, 0, 1, 0}, + {&__pyx_n_s_suspend, __pyx_k_suspend, sizeof(__pyx_k_suspend), 0, 0, 1, 1}, + {&__pyx_n_s_suspend_all_other_threads, __pyx_k_suspend_all_other_threads, sizeof(__pyx_k_suspend_all_other_threads), 0, 0, 1, 1}, + {&__pyx_n_s_suspend_on_breakpoint_exception, __pyx_k_suspend_on_breakpoint_exception, sizeof(__pyx_k_suspend_on_breakpoint_exception), 0, 0, 1, 1}, + {&__pyx_n_s_suspend_policy, __pyx_k_suspend_policy, sizeof(__pyx_k_suspend_policy), 0, 0, 1, 1}, + {&__pyx_n_s_sys, __pyx_k_sys, sizeof(__pyx_k_sys), 0, 0, 1, 1}, + {&__pyx_n_s_t, __pyx_k_t, sizeof(__pyx_k_t), 0, 0, 1, 1}, + {&__pyx_n_s_tb, __pyx_k_tb, sizeof(__pyx_k_tb), 0, 0, 1, 1}, + {&__pyx_n_s_tb_frame, __pyx_k_tb_frame, sizeof(__pyx_k_tb_frame), 0, 0, 1, 1}, + {&__pyx_n_s_tb_lineno, __pyx_k_tb_lineno, sizeof(__pyx_k_tb_lineno), 0, 0, 1, 1}, + {&__pyx_n_s_tb_next, __pyx_k_tb_next, sizeof(__pyx_k_tb_next), 0, 0, 1, 1}, + {&__pyx_n_s_termination_event_set, __pyx_k_termination_event_set, sizeof(__pyx_k_termination_event_set), 0, 0, 1, 1}, + {&__pyx_n_s_test, __pyx_k_test, sizeof(__pyx_k_test), 0, 0, 1, 1}, + {&__pyx_n_s_thread, __pyx_k_thread, sizeof(__pyx_k_thread), 0, 0, 1, 1}, + {&__pyx_n_s_thread_analyser, __pyx_k_thread_analyser, sizeof(__pyx_k_thread_analyser), 0, 0, 1, 1}, + {&__pyx_n_s_thread_state, __pyx_k_thread_state, sizeof(__pyx_k_thread_state), 0, 0, 1, 1}, + {&__pyx_n_s_thread_states, __pyx_k_thread_states, sizeof(__pyx_k_thread_states), 0, 0, 1, 1}, + {&__pyx_n_s_thread_to_state, __pyx_k_thread_to_state, sizeof(__pyx_k_thread_to_state), 0, 0, 1, 1}, + {&__pyx_n_s_thread_tracer, __pyx_k_thread_tracer, sizeof(__pyx_k_thread_tracer), 0, 0, 1, 1}, + {&__pyx_n_s_threading, __pyx_k_threading, sizeof(__pyx_k_threading), 0, 0, 1, 1}, + {&__pyx_n_s_threadingCurrentThread, __pyx_k_threadingCurrentThread, sizeof(__pyx_k_threadingCurrentThread), 0, 0, 1, 1}, + {&__pyx_n_s_tid_to_last_frame, __pyx_k_tid_to_last_frame, sizeof(__pyx_k_tid_to_last_frame), 0, 0, 1, 1}, + {&__pyx_n_s_toArray, __pyx_k_toArray, sizeof(__pyx_k_toArray), 0, 0, 1, 1}, + {&__pyx_n_s_trace_dispatch, __pyx_k_trace_dispatch, sizeof(__pyx_k_trace_dispatch), 0, 0, 1, 1}, + {&__pyx_n_s_trace_exception, __pyx_k_trace_exception, sizeof(__pyx_k_trace_exception), 0, 0, 1, 1}, + {&__pyx_n_s_trace_return, __pyx_k_trace_return, sizeof(__pyx_k_trace_return), 0, 0, 1, 1}, + {&__pyx_n_s_traceback, __pyx_k_traceback, sizeof(__pyx_k_traceback), 0, 0, 1, 1}, + {&__pyx_n_s_tracer, __pyx_k_tracer, sizeof(__pyx_k_tracer), 0, 0, 1, 1}, + {&__pyx_n_s_update, __pyx_k_update, sizeof(__pyx_k_update), 0, 0, 1, 1}, + {&__pyx_kp_s_utf_8, __pyx_k_utf_8, sizeof(__pyx_k_utf_8), 0, 0, 1, 0}, + {&__pyx_n_s_val, __pyx_k_val, sizeof(__pyx_k_val), 0, 0, 1, 1}, + {&__pyx_n_s_value, __pyx_k_value, sizeof(__pyx_k_value), 0, 0, 1, 1}, + {&__pyx_n_s_version, __pyx_k_version, sizeof(__pyx_k_version), 0, 0, 1, 1}, + {&__pyx_n_s_write, __pyx_k_write, sizeof(__pyx_k_write), 0, 0, 1, 1}, + {0, 0, 0, 0, 0, 0, 0} +}; +static int __Pyx_InitCachedBuiltins(void) { + __pyx_builtin_RuntimeError = __Pyx_GetBuiltinName(__pyx_n_s_RuntimeError); if (!__pyx_builtin_RuntimeError) __PYX_ERR(0, 53, __pyx_L1_error) + __pyx_builtin_ImportError = __Pyx_GetBuiltinName(__pyx_n_s_ImportError); if (!__pyx_builtin_ImportError) __PYX_ERR(0, 154, __pyx_L1_error) + __pyx_builtin_eval = __Pyx_GetBuiltinName(__pyx_n_s_eval); if (!__pyx_builtin_eval) __PYX_ERR(0, 170, __pyx_L1_error) + __pyx_builtin_id = __Pyx_GetBuiltinName(__pyx_n_s_id); if (!__pyx_builtin_id) __PYX_ERR(0, 422, __pyx_L1_error) + __pyx_builtin_StopIteration = __Pyx_GetBuiltinName(__pyx_n_s_StopIteration); if (!__pyx_builtin_StopIteration) __PYX_ERR(0, 566, __pyx_L1_error) + __pyx_builtin_GeneratorExit = __Pyx_GetBuiltinName(__pyx_n_s_GeneratorExit); if (!__pyx_builtin_GeneratorExit) __PYX_ERR(0, 566, __pyx_L1_error) + __pyx_builtin_KeyboardInterrupt = __Pyx_GetBuiltinName(__pyx_n_s_KeyboardInterrupt); if (!__pyx_builtin_KeyboardInterrupt) __PYX_ERR(0, 890, __pyx_L1_error) + __pyx_builtin_AttributeError = __Pyx_GetBuiltinName(__pyx_n_s_AttributeError); if (!__pyx_builtin_AttributeError) __PYX_ERR(0, 960, __pyx_L1_error) + __pyx_builtin_SystemExit = __Pyx_GetBuiltinName(__pyx_n_s_SystemExit); if (!__pyx_builtin_SystemExit) __PYX_ERR(0, 1103, __pyx_L1_error) + return 0; + __pyx_L1_error:; + return -1; +} + +static int __Pyx_InitCachedConstants(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_InitCachedConstants", 0); + + /* "_pydevd_bundle/pydevd_cython.pyx":177 + * if type(condition) != type(''): + * if hasattr(condition, 'encode'): + * condition = condition.encode('utf-8') # <<<<<<<<<<<<<< + * + * msg = 'Error while evaluating expression: %s\n' % (condition,) + */ + __pyx_tuple__2 = PyTuple_Pack(1, __pyx_kp_s_utf_8); if (unlikely(!__pyx_tuple__2)) __PYX_ERR(0, 177, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__2); + __Pyx_GIVEREF(__pyx_tuple__2); + + /* "_pydevd_bundle/pydevd_cython.pyx":306 + * info.pydev_message = exception_breakpoint.qname + * except: + * info.pydev_message = exception_breakpoint.qname.encode('utf-8') # <<<<<<<<<<<<<< + * flag = True + * else: + */ + __pyx_tuple__3 = PyTuple_Pack(1, __pyx_kp_s_utf_8); if (unlikely(!__pyx_tuple__3)) __PYX_ERR(0, 306, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__3); + __Pyx_GIVEREF(__pyx_tuple__3); + + /* "_pydevd_bundle/pydevd_cython.pyx":317 + * info.pydev_message = exception_breakpoint.qname + * except: + * info.pydev_message = exception_breakpoint.qname.encode('utf-8') # <<<<<<<<<<<<<< + * flag = True + * else: + */ + __pyx_tuple__4 = PyTuple_Pack(1, __pyx_kp_s_utf_8); if (unlikely(!__pyx_tuple__4)) __PYX_ERR(0, 317, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__4); + __Pyx_GIVEREF(__pyx_tuple__4); + + /* "_pydevd_bundle/pydevd_cython.pyx":21 + * from org.python.core import ThreadStateMapping + * try: + * cachedThreadState = ThreadStateMapping.getDeclaredField('globalThreadStates') # Dev version # <<<<<<<<<<<<<< + * except NoSuchFieldException: + * cachedThreadState = ThreadStateMapping.getDeclaredField('cachedThreadState') # Release Jython 2.7.0 + */ + __pyx_tuple__6 = PyTuple_Pack(1, __pyx_n_s_globalThreadStates); if (unlikely(!__pyx_tuple__6)) __PYX_ERR(0, 21, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__6); + __Pyx_GIVEREF(__pyx_tuple__6); + + /* "_pydevd_bundle/pydevd_cython.pyx":23 + * cachedThreadState = ThreadStateMapping.getDeclaredField('globalThreadStates') # Dev version + * except NoSuchFieldException: + * cachedThreadState = ThreadStateMapping.getDeclaredField('cachedThreadState') # Release Jython 2.7.0 # <<<<<<<<<<<<<< + * cachedThreadState.accessible = True + * thread_states = cachedThreadState.get(ThreadStateMapping) + */ + __pyx_tuple__7 = PyTuple_Pack(1, __pyx_n_s_cachedThreadState); if (unlikely(!__pyx_tuple__7)) __PYX_ERR(0, 23, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__7); + __Pyx_GIVEREF(__pyx_tuple__7); + + /* "_pydevd_bundle/pydevd_cython.pyx":27 + * thread_states = cachedThreadState.get(ThreadStateMapping) + * + * def _current_frames(): # <<<<<<<<<<<<<< + * as_array = thread_states.entrySet().toArray() + * ret = {} + */ + __pyx_tuple__8 = PyTuple_Pack(6, __pyx_n_s_as_array, __pyx_n_s_ret, __pyx_n_s_thread_to_state, __pyx_n_s_thread, __pyx_n_s_thread_state, __pyx_n_s_frame); if (unlikely(!__pyx_tuple__8)) __PYX_ERR(0, 27, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__8); + __Pyx_GIVEREF(__pyx_tuple__8); + __pyx_codeobj__9 = (PyObject*)__Pyx_PyCode_New(0, 0, 6, 0, 0, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__8, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_pydevd_bundle_pydevd_cython_pyx, __pyx_n_s_current_frames, 27, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__9)) __PYX_ERR(0, 27, __pyx_L1_error) + + /* "_pydevd_bundle/pydevd_cython.pyx":49 + * + * # IronPython doesn't have it. Let's use our workaround... + * def _current_frames(): # <<<<<<<<<<<<<< + * return _tid_to_last_frame + * + */ + __pyx_codeobj__10 = (PyObject*)__Pyx_PyCode_New(0, 0, 0, 0, 0, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_pydevd_bundle_pydevd_cython_pyx, __pyx_n_s_current_frames, 49, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__10)) __PYX_ERR(0, 49, __pyx_L1_error) + + /* "_pydevd_bundle/pydevd_cython.pyx":53 + * + * else: + * raise RuntimeError('Unable to proceed (sys._current_frames not available in this Python implementation).') # <<<<<<<<<<<<<< + * else: + * _current_frames = sys._current_frames + */ + __pyx_tuple__11 = PyTuple_Pack(1, __pyx_kp_s_Unable_to_proceed_sys__current_f); if (unlikely(!__pyx_tuple__11)) __PYX_ERR(0, 53, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__11); + __Pyx_GIVEREF(__pyx_tuple__11); + + /* "_pydevd_bundle/pydevd_cython.pyx":155 + * from _pydevd_bundle.pydevd_signature import send_signature_call_trace, send_signature_return_trace + * except ImportError: + * def send_signature_call_trace(*args, **kwargs): # <<<<<<<<<<<<<< + * pass + * + */ + __pyx_tuple__12 = PyTuple_Pack(2, __pyx_n_s_args, __pyx_n_s_kwargs); if (unlikely(!__pyx_tuple__12)) __PYX_ERR(0, 155, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__12); + __Pyx_GIVEREF(__pyx_tuple__12); + __pyx_codeobj__13 = (PyObject*)__Pyx_PyCode_New(0, 0, 2, 0, CO_VARARGS|CO_VARKEYWORDS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__12, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_pydevd_bundle_pydevd_cython_pyx, __pyx_n_s_send_signature_call_trace, 155, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__13)) __PYX_ERR(0, 155, __pyx_L1_error) + + /* "_pydevd_bundle/pydevd_cython.pyx":160 + * basename = os.path.basename + * + * IGNORE_EXCEPTION_TAG = re.compile('[^#]*#.*@IgnoreException') # <<<<<<<<<<<<<< + * DEBUG_START = ('pydevd.py', 'run') + * DEBUG_START_PY3K = ('_pydev_execfile.py', 'execfile') + */ + __pyx_tuple__14 = PyTuple_Pack(1, __pyx_kp_s_IgnoreException); if (unlikely(!__pyx_tuple__14)) __PYX_ERR(0, 160, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__14); + __Pyx_GIVEREF(__pyx_tuple__14); + + /* "_pydevd_bundle/pydevd_cython.pyx":161 + * + * IGNORE_EXCEPTION_TAG = re.compile('[^#]*#.*@IgnoreException') + * DEBUG_START = ('pydevd.py', 'run') # <<<<<<<<<<<<<< + * DEBUG_START_PY3K = ('_pydev_execfile.py', 'execfile') + * TRACE_PROPERTY = 'pydevd_traceproperty.py' + */ + __pyx_tuple__15 = PyTuple_Pack(2, __pyx_kp_s_pydevd_py, __pyx_n_s_run); if (unlikely(!__pyx_tuple__15)) __PYX_ERR(0, 161, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__15); + __Pyx_GIVEREF(__pyx_tuple__15); + + /* "_pydevd_bundle/pydevd_cython.pyx":162 + * IGNORE_EXCEPTION_TAG = re.compile('[^#]*#.*@IgnoreException') + * DEBUG_START = ('pydevd.py', 'run') + * DEBUG_START_PY3K = ('_pydev_execfile.py', 'execfile') # <<<<<<<<<<<<<< + * TRACE_PROPERTY = 'pydevd_traceproperty.py' + * get_file_type = DONT_TRACE.get + */ + __pyx_tuple__16 = PyTuple_Pack(2, __pyx_kp_s_pydev_execfile_py, __pyx_n_s_execfile); if (unlikely(!__pyx_tuple__16)) __PYX_ERR(0, 162, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__16); + __Pyx_GIVEREF(__pyx_tuple__16); + + /* "_pydevd_bundle/pydevd_cython.pyx":167 + * + * + * def handle_breakpoint_condition(py_db, info, breakpoint, new_frame, default_return_value): # <<<<<<<<<<<<<< + * condition = breakpoint.condition + * try: + */ + __pyx_tuple__17 = PyTuple_Pack(14, __pyx_n_s_py_db, __pyx_n_s_info, __pyx_n_s_breakpoint, __pyx_n_s_new_frame, __pyx_n_s_default_return_value, __pyx_n_s_condition, __pyx_n_s_val, __pyx_n_s_msg, __pyx_n_s_stop, __pyx_n_s_etype, __pyx_n_s_value, __pyx_n_s_tb, __pyx_n_s_error, __pyx_n_s_stack); if (unlikely(!__pyx_tuple__17)) __PYX_ERR(0, 167, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__17); + __Pyx_GIVEREF(__pyx_tuple__17); + __pyx_codeobj__18 = (PyObject*)__Pyx_PyCode_New(5, 0, 14, 0, 0, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__17, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_pydevd_bundle_pydevd_cython_pyx, __pyx_n_s_handle_breakpoint_condition, 167, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__18)) __PYX_ERR(0, 167, __pyx_L1_error) + + /* "_pydevd_bundle/pydevd_cython.pyx":203 + * + * + * def handle_breakpoint_expression(breakpoint, info, new_frame): # <<<<<<<<<<<<<< + * try: + * try: + */ + __pyx_tuple__19 = PyTuple_Pack(4, __pyx_n_s_breakpoint, __pyx_n_s_info, __pyx_n_s_new_frame, __pyx_n_s_val); if (unlikely(!__pyx_tuple__19)) __PYX_ERR(0, 203, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__19); + __Pyx_GIVEREF(__pyx_tuple__19); + __pyx_codeobj__20 = (PyObject*)__Pyx_PyCode_New(3, 0, 4, 0, 0, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__19, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_pydevd_bundle_pydevd_cython_pyx, __pyx_n_s_handle_breakpoint_expression, 203, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__20)) __PYX_ERR(0, 203, __pyx_L1_error) + + /* "_pydevd_bundle/pydevd_cython.pyx":931 + * from _pydevd_bundle.pydevd_signature import send_signature_call_trace + * except ImportError: + * def send_signature_call_trace(*args, **kwargs): # <<<<<<<<<<<<<< + * pass + * + */ + __pyx_tuple__21 = PyTuple_Pack(2, __pyx_n_s_args, __pyx_n_s_kwargs); if (unlikely(!__pyx_tuple__21)) __PYX_ERR(0, 931, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__21); + __Pyx_GIVEREF(__pyx_tuple__21); + __pyx_codeobj__22 = (PyObject*)__Pyx_PyCode_New(0, 0, 2, 0, CO_VARARGS|CO_VARKEYWORDS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__21, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_pydevd_bundle_pydevd_cython_pyx, __pyx_n_s_send_signature_call_trace, 931, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__22)) __PYX_ERR(0, 931, __pyx_L1_error) + + /* "_pydevd_bundle/pydevd_cython.pyx":951 + * global_cache_frame_skips = {} + * + * def trace_dispatch(py_db, frame, event, arg): # <<<<<<<<<<<<<< + * t = threadingCurrentThread() + * + */ + __pyx_tuple__23 = PyTuple_Pack(7, __pyx_n_s_py_db, __pyx_n_s_frame, __pyx_n_s_event, __pyx_n_s_arg, __pyx_n_s_t, __pyx_n_s_additional_info, __pyx_n_s_thread_tracer); if (unlikely(!__pyx_tuple__23)) __PYX_ERR(0, 951, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__23); + __Pyx_GIVEREF(__pyx_tuple__23); + __pyx_codeobj__24 = (PyObject*)__Pyx_PyCode_New(4, 0, 7, 0, 0, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__23, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_pydevd_bundle_pydevd_cython_pyx, __pyx_n_s_trace_dispatch, 951, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__24)) __PYX_ERR(0, 951, __pyx_L1_error) + + /* "_pydevd_bundle/pydevd_cython.pyx":1133 + * _original_call = ThreadTracer.__call__ + * + * def __call__(self, frame, event, arg): # <<<<<<<<<<<<<< + * _tid_to_last_frame[self._args[1].ident] = frame + * return _original_call(self, frame, event, arg) + */ + __pyx_tuple__25 = PyTuple_Pack(4, __pyx_n_s_self, __pyx_n_s_frame, __pyx_n_s_event, __pyx_n_s_arg); if (unlikely(!__pyx_tuple__25)) __PYX_ERR(0, 1133, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__25); + __Pyx_GIVEREF(__pyx_tuple__25); + __pyx_codeobj__26 = (PyObject*)__Pyx_PyCode_New(4, 0, 4, 0, 0, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__25, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_pydevd_bundle_pydevd_cython_pyx, __pyx_n_s_call_2, 1133, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__26)) __PYX_ERR(0, 1133, __pyx_L1_error) + + /* "(tree fragment)":1 + * def __pyx_unpickle_PyDBAdditionalThreadInfo(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< + * if __pyx_checksum != 0xa9a4341: + * from pickle import PickleError + */ + __pyx_tuple__27 = PyTuple_Pack(5, __pyx_n_s_pyx_type, __pyx_n_s_pyx_checksum, __pyx_n_s_pyx_state, __pyx_n_s_PickleError, __pyx_n_s_result); if (unlikely(!__pyx_tuple__27)) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__27); + __Pyx_GIVEREF(__pyx_tuple__27); + __pyx_codeobj__28 = (PyObject*)__Pyx_PyCode_New(3, 0, 5, 0, 0, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__27, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_pyx_unpickle_PyDBAdditionalThr, 1, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__28)) __PYX_ERR(1, 1, __pyx_L1_error) + __pyx_tuple__29 = PyTuple_Pack(5, __pyx_n_s_pyx_type, __pyx_n_s_pyx_checksum, __pyx_n_s_pyx_state, __pyx_n_s_PickleError, __pyx_n_s_result); if (unlikely(!__pyx_tuple__29)) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__29); + __Pyx_GIVEREF(__pyx_tuple__29); + __pyx_codeobj__30 = (PyObject*)__Pyx_PyCode_New(3, 0, 5, 0, 0, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__29, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_pyx_unpickle_PyDBFrame, 1, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__30)) __PYX_ERR(1, 1, __pyx_L1_error) + __pyx_tuple__31 = PyTuple_Pack(5, __pyx_n_s_pyx_type, __pyx_n_s_pyx_checksum, __pyx_n_s_pyx_state, __pyx_n_s_PickleError, __pyx_n_s_result); if (unlikely(!__pyx_tuple__31)) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__31); + __Pyx_GIVEREF(__pyx_tuple__31); + __pyx_codeobj__32 = (PyObject*)__Pyx_PyCode_New(3, 0, 5, 0, 0, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__31, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_pyx_unpickle_SafeCallWrapper, 1, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__32)) __PYX_ERR(1, 1, __pyx_L1_error) + __pyx_tuple__33 = PyTuple_Pack(5, __pyx_n_s_pyx_type, __pyx_n_s_pyx_checksum, __pyx_n_s_pyx_state, __pyx_n_s_PickleError, __pyx_n_s_result); if (unlikely(!__pyx_tuple__33)) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__33); + __Pyx_GIVEREF(__pyx_tuple__33); + __pyx_codeobj__34 = (PyObject*)__Pyx_PyCode_New(3, 0, 5, 0, 0, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__33, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_stringsource, __pyx_n_s_pyx_unpickle_ThreadTracer, 1, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__34)) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_RefNannyFinishContext(); + return 0; + __pyx_L1_error:; + __Pyx_RefNannyFinishContext(); + return -1; +} + +static int __Pyx_InitGlobals(void) { + if (__Pyx_InitStrings(__pyx_string_tab) < 0) __PYX_ERR(0, 1, __pyx_L1_error); + __pyx_int_0 = PyInt_FromLong(0); if (unlikely(!__pyx_int_0)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_int_1 = PyInt_FromLong(1); if (unlikely(!__pyx_int_1)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_int_4 = PyInt_FromLong(4); if (unlikely(!__pyx_int_4)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_int_32 = PyInt_FromLong(32); if (unlikely(!__pyx_int_32)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_int_64458794 = PyInt_FromLong(64458794L); if (unlikely(!__pyx_int_64458794)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_int_125568891 = PyInt_FromLong(125568891L); if (unlikely(!__pyx_int_125568891)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_int_177881921 = PyInt_FromLong(177881921L); if (unlikely(!__pyx_int_177881921)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_int_262582659 = PyInt_FromLong(262582659L); if (unlikely(!__pyx_int_262582659)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_int_neg_1 = PyInt_FromLong(-1); if (unlikely(!__pyx_int_neg_1)) __PYX_ERR(0, 1, __pyx_L1_error) + return 0; + __pyx_L1_error:; + return -1; +} + +#if PY_MAJOR_VERSION < 3 +PyMODINIT_FUNC initpydevd_cython(void); /*proto*/ +PyMODINIT_FUNC initpydevd_cython(void) +#else +PyMODINIT_FUNC PyInit_pydevd_cython(void); /*proto*/ +PyMODINIT_FUNC PyInit_pydevd_cython(void) +#endif +{ + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + int __pyx_t_3; + int __pyx_t_4; + PyObject *__pyx_t_5 = NULL; + PyObject *__pyx_t_6 = NULL; + PyObject *__pyx_t_7 = NULL; + int __pyx_t_8; + PyObject *__pyx_t_9 = NULL; + PyObject *__pyx_t_10 = NULL; + PyObject *__pyx_t_11 = NULL; + __Pyx_RefNannyDeclarations + #if CYTHON_REFNANNY + __Pyx_RefNanny = __Pyx_RefNannyImportAPI("refnanny"); + if (!__Pyx_RefNanny) { + PyErr_Clear(); + __Pyx_RefNanny = __Pyx_RefNannyImportAPI("Cython.Runtime.refnanny"); + if (!__Pyx_RefNanny) + Py_FatalError("failed to import 'refnanny' module"); + } + #endif + __Pyx_RefNannySetupContext("PyMODINIT_FUNC PyInit_pydevd_cython(void)", 0); + if (__Pyx_check_binary_version() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_empty_tuple = PyTuple_New(0); if (unlikely(!__pyx_empty_tuple)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_empty_bytes = PyBytes_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_bytes)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_empty_unicode = PyUnicode_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_unicode)) __PYX_ERR(0, 1, __pyx_L1_error) + #ifdef __Pyx_CyFunction_USED + if (__pyx_CyFunction_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_FusedFunction_USED + if (__pyx_FusedFunction_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_Coroutine_USED + if (__pyx_Coroutine_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_Generator_USED + if (__pyx_Generator_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_StopAsyncIteration_USED + if (__pyx_StopAsyncIteration_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + /*--- Library function declarations ---*/ + /*--- Threads initialization code ---*/ + #if defined(__PYX_FORCE_INIT_THREADS) && __PYX_FORCE_INIT_THREADS + #ifdef WITH_THREAD /* Python build with threading support? */ + PyEval_InitThreads(); + #endif + #endif + /*--- Module creation code ---*/ + #if PY_MAJOR_VERSION < 3 + __pyx_m = Py_InitModule4("pydevd_cython", __pyx_methods, 0, 0, PYTHON_API_VERSION); Py_XINCREF(__pyx_m); + #else + __pyx_m = PyModule_Create(&__pyx_moduledef); + #endif + if (unlikely(!__pyx_m)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_d = PyModule_GetDict(__pyx_m); if (unlikely(!__pyx_d)) __PYX_ERR(0, 1, __pyx_L1_error) + Py_INCREF(__pyx_d); + __pyx_b = PyImport_AddModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_b)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_cython_runtime = PyImport_AddModule((char *) "cython_runtime"); if (unlikely(!__pyx_cython_runtime)) __PYX_ERR(0, 1, __pyx_L1_error) + #if CYTHON_COMPILING_IN_PYPY + Py_INCREF(__pyx_b); + #endif + if (PyObject_SetAttrString(__pyx_m, "__builtins__", __pyx_b) < 0) __PYX_ERR(0, 1, __pyx_L1_error); + /*--- Initialize various global constants etc. ---*/ + if (__Pyx_InitGlobals() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #if PY_MAJOR_VERSION < 3 && (__PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT) + if (__Pyx_init_sys_getdefaultencoding_params() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + if (__pyx_module_is_main__pydevd_bundle__pydevd_cython) { + if (PyObject_SetAttrString(__pyx_m, "__name__", __pyx_n_s_main) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + } + #if PY_MAJOR_VERSION >= 3 + { + PyObject *modules = PyImport_GetModuleDict(); if (unlikely(!modules)) __PYX_ERR(0, 1, __pyx_L1_error) + if (!PyDict_GetItemString(modules, "_pydevd_bundle.pydevd_cython")) { + if (unlikely(PyDict_SetItemString(modules, "_pydevd_bundle.pydevd_cython", __pyx_m) < 0)) __PYX_ERR(0, 1, __pyx_L1_error) + } + } + #endif + /*--- Builtin init code ---*/ + if (__Pyx_InitCachedBuiltins() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + /*--- Constants init code ---*/ + if (__Pyx_InitCachedConstants() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + /*--- Global init code ---*/ + /*--- Variable export code ---*/ + /*--- Function export code ---*/ + /*--- Type init code ---*/ + if (PyType_Ready(&__pyx_type_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo) < 0) __PYX_ERR(0, 61, __pyx_L1_error) + __pyx_type_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo.tp_print = 0; + if (PyObject_SetAttrString(__pyx_m, "PyDBAdditionalThreadInfo", (PyObject *)&__pyx_type_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo) < 0) __PYX_ERR(0, 61, __pyx_L1_error) + if (__Pyx_setup_reduce((PyObject*)&__pyx_type_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo) < 0) __PYX_ERR(0, 61, __pyx_L1_error) + __pyx_ptype_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo = &__pyx_type_14_pydevd_bundle_13pydevd_cython_PyDBAdditionalThreadInfo; + __pyx_vtabptr_14_pydevd_bundle_13pydevd_cython_PyDBFrame = &__pyx_vtable_14_pydevd_bundle_13pydevd_cython_PyDBFrame; + __pyx_vtable_14_pydevd_bundle_13pydevd_cython_PyDBFrame.trace_dispatch = (PyObject *(*)(struct __pyx_obj_14_pydevd_bundle_13pydevd_cython_PyDBFrame *, PyObject *, PyObject *, PyObject *, int __pyx_skip_dispatch))__pyx_f_14_pydevd_bundle_13pydevd_cython_9PyDBFrame_trace_dispatch; + if (PyType_Ready(&__pyx_type_14_pydevd_bundle_13pydevd_cython_PyDBFrame) < 0) __PYX_ERR(0, 218, __pyx_L1_error) + __pyx_type_14_pydevd_bundle_13pydevd_cython_PyDBFrame.tp_print = 0; + if (__Pyx_SetVtable(__pyx_type_14_pydevd_bundle_13pydevd_cython_PyDBFrame.tp_dict, __pyx_vtabptr_14_pydevd_bundle_13pydevd_cython_PyDBFrame) < 0) __PYX_ERR(0, 218, __pyx_L1_error) + if (PyObject_SetAttrString(__pyx_m, "PyDBFrame", (PyObject *)&__pyx_type_14_pydevd_bundle_13pydevd_cython_PyDBFrame) < 0) __PYX_ERR(0, 218, __pyx_L1_error) + if (__Pyx_setup_reduce((PyObject*)&__pyx_type_14_pydevd_bundle_13pydevd_cython_PyDBFrame) < 0) __PYX_ERR(0, 218, __pyx_L1_error) + __pyx_ptype_14_pydevd_bundle_13pydevd_cython_PyDBFrame = &__pyx_type_14_pydevd_bundle_13pydevd_cython_PyDBFrame; + if (PyType_Ready(&__pyx_type_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper) < 0) __PYX_ERR(0, 973, __pyx_L1_error) + __pyx_type_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper.tp_print = 0; + if (PyObject_SetAttrString(__pyx_m, "SafeCallWrapper", (PyObject *)&__pyx_type_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper) < 0) __PYX_ERR(0, 973, __pyx_L1_error) + if (__Pyx_setup_reduce((PyObject*)&__pyx_type_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper) < 0) __PYX_ERR(0, 973, __pyx_L1_error) + __pyx_ptype_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper = &__pyx_type_14_pydevd_bundle_13pydevd_cython_SafeCallWrapper; + if (PyType_Ready(&__pyx_type_14_pydevd_bundle_13pydevd_cython_ThreadTracer) < 0) __PYX_ERR(0, 985, __pyx_L1_error) + __pyx_type_14_pydevd_bundle_13pydevd_cython_ThreadTracer.tp_print = 0; + #if CYTHON_COMPILING_IN_CPYTHON + { + PyObject *wrapper = PyObject_GetAttrString((PyObject *)&__pyx_type_14_pydevd_bundle_13pydevd_cython_ThreadTracer, "__call__"); if (unlikely(!wrapper)) __PYX_ERR(0, 985, __pyx_L1_error) + if (Py_TYPE(wrapper) == &PyWrapperDescr_Type) { + __pyx_wrapperbase_14_pydevd_bundle_13pydevd_cython_12ThreadTracer_2__call__ = *((PyWrapperDescrObject *)wrapper)->d_base; + __pyx_wrapperbase_14_pydevd_bundle_13pydevd_cython_12ThreadTracer_2__call__.doc = __pyx_doc_14_pydevd_bundle_13pydevd_cython_12ThreadTracer_2__call__; + ((PyWrapperDescrObject *)wrapper)->d_base = &__pyx_wrapperbase_14_pydevd_bundle_13pydevd_cython_12ThreadTracer_2__call__; + } + } + #endif + if (PyObject_SetAttrString(__pyx_m, "ThreadTracer", (PyObject *)&__pyx_type_14_pydevd_bundle_13pydevd_cython_ThreadTracer) < 0) __PYX_ERR(0, 985, __pyx_L1_error) + if (__Pyx_setup_reduce((PyObject*)&__pyx_type_14_pydevd_bundle_13pydevd_cython_ThreadTracer) < 0) __PYX_ERR(0, 985, __pyx_L1_error) + __pyx_ptype_14_pydevd_bundle_13pydevd_cython_ThreadTracer = &__pyx_type_14_pydevd_bundle_13pydevd_cython_ThreadTracer; + /*--- Type import code ---*/ + __pyx_ptype_7cpython_4type_type = __Pyx_ImportType(__Pyx_BUILTIN_MODULE_NAME, "type", + #if CYTHON_COMPILING_IN_PYPY + sizeof(PyTypeObject), + #else + sizeof(PyHeapTypeObject), + #endif + 0); if (unlikely(!__pyx_ptype_7cpython_4type_type)) __PYX_ERR(2, 9, __pyx_L1_error) + /*--- Variable import code ---*/ + /*--- Function import code ---*/ + /*--- Execution code ---*/ + #if defined(__Pyx_Generator_USED) || defined(__Pyx_Coroutine_USED) + if (__Pyx_patch_abc() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + + /* "_pydevd_bundle/pydevd_cython.pyx":5 + * # DO NOT edit manually! + * # DO NOT edit manually! + * import sys # <<<<<<<<<<<<<< + * from _pydevd_bundle.pydevd_constants import STATE_RUN, PYTHON_SUSPEND, IS_JYTHON, IS_IRONPYTHON + * # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + */ + __pyx_t_1 = __Pyx_Import(__pyx_n_s_sys, 0, -1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_sys, __pyx_t_1) < 0) __PYX_ERR(0, 5, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":6 + * # DO NOT edit manually! + * import sys + * from _pydevd_bundle.pydevd_constants import STATE_RUN, PYTHON_SUSPEND, IS_JYTHON, IS_IRONPYTHON # <<<<<<<<<<<<<< + * # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + * # ELSE + */ + __pyx_t_1 = PyList_New(4); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_n_s_STATE_RUN); + __Pyx_GIVEREF(__pyx_n_s_STATE_RUN); + PyList_SET_ITEM(__pyx_t_1, 0, __pyx_n_s_STATE_RUN); + __Pyx_INCREF(__pyx_n_s_PYTHON_SUSPEND); + __Pyx_GIVEREF(__pyx_n_s_PYTHON_SUSPEND); + PyList_SET_ITEM(__pyx_t_1, 1, __pyx_n_s_PYTHON_SUSPEND); + __Pyx_INCREF(__pyx_n_s_IS_JYTHON); + __Pyx_GIVEREF(__pyx_n_s_IS_JYTHON); + PyList_SET_ITEM(__pyx_t_1, 2, __pyx_n_s_IS_JYTHON); + __Pyx_INCREF(__pyx_n_s_IS_IRONPYTHON); + __Pyx_GIVEREF(__pyx_n_s_IS_IRONPYTHON); + PyList_SET_ITEM(__pyx_t_1, 3, __pyx_n_s_IS_IRONPYTHON); + __pyx_t_2 = __Pyx_Import(__pyx_n_s_pydevd_bundle_pydevd_constants, __pyx_t_1, -1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_STATE_RUN); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_STATE_RUN, __pyx_t_1) < 0) __PYX_ERR(0, 6, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_PYTHON_SUSPEND); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_PYTHON_SUSPEND, __pyx_t_1) < 0) __PYX_ERR(0, 6, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_IS_JYTHON); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_IS_JYTHON, __pyx_t_1) < 0) __PYX_ERR(0, 6, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_IS_IRONPYTHON); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_IS_IRONPYTHON, __pyx_t_1) < 0) __PYX_ERR(0, 6, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":12 + * # ENDIF + * + * version = 4 # <<<<<<<<<<<<<< + * + * if not hasattr(sys, '_current_frames'): + */ + if (PyDict_SetItem(__pyx_d, __pyx_n_s_version, __pyx_int_4) < 0) __PYX_ERR(0, 12, __pyx_L1_error) + + /* "_pydevd_bundle/pydevd_cython.pyx":14 + * version = 4 + * + * if not hasattr(sys, '_current_frames'): # <<<<<<<<<<<<<< + * + * # Some versions of Jython don't have it (but we can provide a replacement) + */ + __pyx_t_2 = __Pyx_GetModuleGlobalName(__pyx_n_s_sys); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 14, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = __Pyx_HasAttr(__pyx_t_2, __pyx_n_s_current_frames); if (unlikely(__pyx_t_3 == -1)) __PYX_ERR(0, 14, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_4 = ((!(__pyx_t_3 != 0)) != 0); + if (__pyx_t_4) { + + /* "_pydevd_bundle/pydevd_cython.pyx":17 + * + * # Some versions of Jython don't have it (but we can provide a replacement) + * if IS_JYTHON: # <<<<<<<<<<<<<< + * from java.lang import NoSuchFieldException + * from org.python.core import ThreadStateMapping + */ + __pyx_t_2 = __Pyx_GetModuleGlobalName(__pyx_n_s_IS_JYTHON); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 17, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_4 = __Pyx_PyObject_IsTrue(__pyx_t_2); if (unlikely(__pyx_t_4 < 0)) __PYX_ERR(0, 17, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + if (__pyx_t_4) { + + /* "_pydevd_bundle/pydevd_cython.pyx":18 + * # Some versions of Jython don't have it (but we can provide a replacement) + * if IS_JYTHON: + * from java.lang import NoSuchFieldException # <<<<<<<<<<<<<< + * from org.python.core import ThreadStateMapping + * try: + */ + __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 18, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_n_s_NoSuchFieldException); + __Pyx_GIVEREF(__pyx_n_s_NoSuchFieldException); + PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_NoSuchFieldException); + __pyx_t_1 = __Pyx_Import(__pyx_n_s_java_lang, __pyx_t_2, -1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 18, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_1, __pyx_n_s_NoSuchFieldException); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 18, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_NoSuchFieldException, __pyx_t_2) < 0) __PYX_ERR(0, 18, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":19 + * if IS_JYTHON: + * from java.lang import NoSuchFieldException + * from org.python.core import ThreadStateMapping # <<<<<<<<<<<<<< + * try: + * cachedThreadState = ThreadStateMapping.getDeclaredField('globalThreadStates') # Dev version + */ + __pyx_t_1 = PyList_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 19, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_n_s_ThreadStateMapping); + __Pyx_GIVEREF(__pyx_n_s_ThreadStateMapping); + PyList_SET_ITEM(__pyx_t_1, 0, __pyx_n_s_ThreadStateMapping); + __pyx_t_2 = __Pyx_Import(__pyx_n_s_org_python_core, __pyx_t_1, -1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 19, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_ThreadStateMapping); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 19, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_ThreadStateMapping, __pyx_t_1) < 0) __PYX_ERR(0, 19, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":20 + * from java.lang import NoSuchFieldException + * from org.python.core import ThreadStateMapping + * try: # <<<<<<<<<<<<<< + * cachedThreadState = ThreadStateMapping.getDeclaredField('globalThreadStates') # Dev version + * except NoSuchFieldException: + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_5, &__pyx_t_6, &__pyx_t_7); + __Pyx_XGOTREF(__pyx_t_5); + __Pyx_XGOTREF(__pyx_t_6); + __Pyx_XGOTREF(__pyx_t_7); + /*try:*/ { + + /* "_pydevd_bundle/pydevd_cython.pyx":21 + * from org.python.core import ThreadStateMapping + * try: + * cachedThreadState = ThreadStateMapping.getDeclaredField('globalThreadStates') # Dev version # <<<<<<<<<<<<<< + * except NoSuchFieldException: + * cachedThreadState = ThreadStateMapping.getDeclaredField('cachedThreadState') # Release Jython 2.7.0 + */ + __pyx_t_2 = __Pyx_GetModuleGlobalName(__pyx_n_s_ThreadStateMapping); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 21, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_getDeclaredField); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 21, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_tuple__6, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 21, __pyx_L4_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + if (PyDict_SetItem(__pyx_d, __pyx_n_s_cachedThreadState, __pyx_t_2) < 0) __PYX_ERR(0, 21, __pyx_L4_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":20 + * from java.lang import NoSuchFieldException + * from org.python.core import ThreadStateMapping + * try: # <<<<<<<<<<<<<< + * cachedThreadState = ThreadStateMapping.getDeclaredField('globalThreadStates') # Dev version + * except NoSuchFieldException: + */ + } + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + goto __pyx_L9_try_end; + __pyx_L4_error:; + __Pyx_PyThreadState_assign + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":22 + * try: + * cachedThreadState = ThreadStateMapping.getDeclaredField('globalThreadStates') # Dev version + * except NoSuchFieldException: # <<<<<<<<<<<<<< + * cachedThreadState = ThreadStateMapping.getDeclaredField('cachedThreadState') # Release Jython 2.7.0 + * cachedThreadState.accessible = True + */ + __pyx_t_2 = __Pyx_GetModuleGlobalName(__pyx_n_s_NoSuchFieldException); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 22, __pyx_L6_except_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_8 = __Pyx_PyErr_ExceptionMatches(__pyx_t_2); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + if (__pyx_t_8) { + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_2, &__pyx_t_1, &__pyx_t_9) < 0) __PYX_ERR(0, 22, __pyx_L6_except_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_t_9); + + /* "_pydevd_bundle/pydevd_cython.pyx":23 + * cachedThreadState = ThreadStateMapping.getDeclaredField('globalThreadStates') # Dev version + * except NoSuchFieldException: + * cachedThreadState = ThreadStateMapping.getDeclaredField('cachedThreadState') # Release Jython 2.7.0 # <<<<<<<<<<<<<< + * cachedThreadState.accessible = True + * thread_states = cachedThreadState.get(ThreadStateMapping) + */ + __pyx_t_10 = __Pyx_GetModuleGlobalName(__pyx_n_s_ThreadStateMapping); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 23, __pyx_L6_except_error) + __Pyx_GOTREF(__pyx_t_10); + __pyx_t_11 = __Pyx_PyObject_GetAttrStr(__pyx_t_10, __pyx_n_s_getDeclaredField); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 23, __pyx_L6_except_error) + __Pyx_GOTREF(__pyx_t_11); + __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; + __pyx_t_10 = __Pyx_PyObject_Call(__pyx_t_11, __pyx_tuple__7, NULL); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 23, __pyx_L6_except_error) + __Pyx_GOTREF(__pyx_t_10); + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; + if (PyDict_SetItem(__pyx_d, __pyx_n_s_cachedThreadState, __pyx_t_10) < 0) __PYX_ERR(0, 23, __pyx_L6_except_error) + __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + goto __pyx_L5_exception_handled; + } + goto __pyx_L6_except_error; + __pyx_L6_except_error:; + + /* "_pydevd_bundle/pydevd_cython.pyx":20 + * from java.lang import NoSuchFieldException + * from org.python.core import ThreadStateMapping + * try: # <<<<<<<<<<<<<< + * cachedThreadState = ThreadStateMapping.getDeclaredField('globalThreadStates') # Dev version + * except NoSuchFieldException: + */ + __Pyx_PyThreadState_assign + __Pyx_XGIVEREF(__pyx_t_5); + __Pyx_XGIVEREF(__pyx_t_6); + __Pyx_XGIVEREF(__pyx_t_7); + __Pyx_ExceptionReset(__pyx_t_5, __pyx_t_6, __pyx_t_7); + goto __pyx_L1_error; + __pyx_L5_exception_handled:; + __Pyx_PyThreadState_assign + __Pyx_XGIVEREF(__pyx_t_5); + __Pyx_XGIVEREF(__pyx_t_6); + __Pyx_XGIVEREF(__pyx_t_7); + __Pyx_ExceptionReset(__pyx_t_5, __pyx_t_6, __pyx_t_7); + __pyx_L9_try_end:; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":24 + * except NoSuchFieldException: + * cachedThreadState = ThreadStateMapping.getDeclaredField('cachedThreadState') # Release Jython 2.7.0 + * cachedThreadState.accessible = True # <<<<<<<<<<<<<< + * thread_states = cachedThreadState.get(ThreadStateMapping) + * + */ + __pyx_t_9 = __Pyx_GetModuleGlobalName(__pyx_n_s_cachedThreadState); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 24, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + if (__Pyx_PyObject_SetAttrStr(__pyx_t_9, __pyx_n_s_accessible, Py_True) < 0) __PYX_ERR(0, 24, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":25 + * cachedThreadState = ThreadStateMapping.getDeclaredField('cachedThreadState') # Release Jython 2.7.0 + * cachedThreadState.accessible = True + * thread_states = cachedThreadState.get(ThreadStateMapping) # <<<<<<<<<<<<<< + * + * def _current_frames(): + */ + __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_cachedThreadState); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 25, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_get); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 25, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_ThreadStateMapping); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 25, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_10 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_10 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_10)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_10); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + } + } + if (!__pyx_t_10) { + __pyx_t_9 = __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_t_1); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 25, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_GOTREF(__pyx_t_9); + } else { + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_2)) { + PyObject *__pyx_temp[2] = {__pyx_t_10, __pyx_t_1}; + __pyx_t_9 = __Pyx_PyFunction_FastCall(__pyx_t_2, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 25, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_10); __pyx_t_10 = 0; + __Pyx_GOTREF(__pyx_t_9); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_2)) { + PyObject *__pyx_temp[2] = {__pyx_t_10, __pyx_t_1}; + __pyx_t_9 = __Pyx_PyCFunction_FastCall(__pyx_t_2, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 25, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_10); __pyx_t_10 = 0; + __Pyx_GOTREF(__pyx_t_9); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + } else + #endif + { + __pyx_t_11 = PyTuple_New(1+1); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 25, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_11); + __Pyx_GIVEREF(__pyx_t_10); PyTuple_SET_ITEM(__pyx_t_11, 0, __pyx_t_10); __pyx_t_10 = NULL; + __Pyx_GIVEREF(__pyx_t_1); + PyTuple_SET_ITEM(__pyx_t_11, 0+1, __pyx_t_1); + __pyx_t_1 = 0; + __pyx_t_9 = __Pyx_PyObject_Call(__pyx_t_2, __pyx_t_11, NULL); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 25, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; + } + } + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + if (PyDict_SetItem(__pyx_d, __pyx_n_s_thread_states, __pyx_t_9) < 0) __PYX_ERR(0, 25, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":27 + * thread_states = cachedThreadState.get(ThreadStateMapping) + * + * def _current_frames(): # <<<<<<<<<<<<<< + * as_array = thread_states.entrySet().toArray() + * ret = {} + */ + __pyx_t_9 = PyCFunction_NewEx(&__pyx_mdef_14_pydevd_bundle_13pydevd_cython_1_current_frames, NULL, __pyx_n_s_pydevd_bundle_pydevd_cython); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 27, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_current_frames, __pyx_t_9) < 0) __PYX_ERR(0, 27, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":17 + * + * # Some versions of Jython don't have it (but we can provide a replacement) + * if IS_JYTHON: # <<<<<<<<<<<<<< + * from java.lang import NoSuchFieldException + * from org.python.core import ThreadStateMapping + */ + goto __pyx_L3; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":45 + * return ret + * + * elif IS_IRONPYTHON: # <<<<<<<<<<<<<< + * _tid_to_last_frame = {} + * + */ + __pyx_t_9 = __Pyx_GetModuleGlobalName(__pyx_n_s_IS_IRONPYTHON); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 45, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + __pyx_t_4 = __Pyx_PyObject_IsTrue(__pyx_t_9); if (unlikely(__pyx_t_4 < 0)) __PYX_ERR(0, 45, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + if (__pyx_t_4) { + + /* "_pydevd_bundle/pydevd_cython.pyx":46 + * + * elif IS_IRONPYTHON: + * _tid_to_last_frame = {} # <<<<<<<<<<<<<< + * + * # IronPython doesn't have it. Let's use our workaround... + */ + __pyx_t_9 = PyDict_New(); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 46, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_tid_to_last_frame, __pyx_t_9) < 0) __PYX_ERR(0, 46, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":49 + * + * # IronPython doesn't have it. Let's use our workaround... + * def _current_frames(): # <<<<<<<<<<<<<< + * return _tid_to_last_frame + * + */ + __pyx_t_9 = PyCFunction_NewEx(&__pyx_mdef_14_pydevd_bundle_13pydevd_cython_3_current_frames, NULL, __pyx_n_s_pydevd_bundle_pydevd_cython); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 49, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_current_frames, __pyx_t_9) < 0) __PYX_ERR(0, 49, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":45 + * return ret + * + * elif IS_IRONPYTHON: # <<<<<<<<<<<<<< + * _tid_to_last_frame = {} + * + */ + goto __pyx_L3; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":53 + * + * else: + * raise RuntimeError('Unable to proceed (sys._current_frames not available in this Python implementation).') # <<<<<<<<<<<<<< + * else: + * _current_frames = sys._current_frames + */ + /*else*/ { + __pyx_t_9 = __Pyx_PyObject_Call(__pyx_builtin_RuntimeError, __pyx_tuple__11, NULL); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 53, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + __Pyx_Raise(__pyx_t_9, 0, 0, 0); + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + __PYX_ERR(0, 53, __pyx_L1_error) + } + __pyx_L3:; + + /* "_pydevd_bundle/pydevd_cython.pyx":14 + * version = 4 + * + * if not hasattr(sys, '_current_frames'): # <<<<<<<<<<<<<< + * + * # Some versions of Jython don't have it (but we can provide a replacement) + */ + goto __pyx_L2; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":55 + * raise RuntimeError('Unable to proceed (sys._current_frames not available in this Python implementation).') + * else: + * _current_frames = sys._current_frames # <<<<<<<<<<<<<< + * + * #======================================================================================================================= + */ + /*else*/ { + __pyx_t_9 = __Pyx_GetModuleGlobalName(__pyx_n_s_sys); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 55, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_9, __pyx_n_s_current_frames); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 55, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + if (PyDict_SetItem(__pyx_d, __pyx_n_s_current_frames, __pyx_t_2) < 0) __PYX_ERR(0, 55, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + } + __pyx_L2:; + + /* "_pydevd_bundle/pydevd_cython.pyx":129 + * self.pydev_state, self.pydev_step_stop, self.pydev_step_cmd, self.pydev_notify_kill) + * + * import linecache # <<<<<<<<<<<<<< + * import os.path + * import re + */ + __pyx_t_2 = __Pyx_Import(__pyx_n_s_linecache, 0, -1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 129, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_linecache, __pyx_t_2) < 0) __PYX_ERR(0, 129, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":130 + * + * import linecache + * import os.path # <<<<<<<<<<<<<< + * import re + * import sys + */ + __pyx_t_2 = __Pyx_Import(__pyx_n_s_os_path, 0, -1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 130, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_os, __pyx_t_2) < 0) __PYX_ERR(0, 130, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":131 + * import linecache + * import os.path + * import re # <<<<<<<<<<<<<< + * import sys + * import traceback # @Reimport + */ + __pyx_t_2 = __Pyx_Import(__pyx_n_s_re, 0, -1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 131, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_re, __pyx_t_2) < 0) __PYX_ERR(0, 131, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":132 + * import os.path + * import re + * import sys # <<<<<<<<<<<<<< + * import traceback # @Reimport + * + */ + __pyx_t_2 = __Pyx_Import(__pyx_n_s_sys, 0, -1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 132, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_sys, __pyx_t_2) < 0) __PYX_ERR(0, 132, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":133 + * import re + * import sys + * import traceback # @Reimport # <<<<<<<<<<<<<< + * + * from _pydev_bundle import pydev_log + */ + __pyx_t_2 = __Pyx_Import(__pyx_n_s_traceback, 0, -1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 133, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_traceback, __pyx_t_2) < 0) __PYX_ERR(0, 133, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":135 + * import traceback # @Reimport + * + * from _pydev_bundle import pydev_log # <<<<<<<<<<<<<< + * from _pydevd_bundle import pydevd_dont_trace + * from _pydevd_bundle import pydevd_vars + */ + __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 135, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_n_s_pydev_log); + __Pyx_GIVEREF(__pyx_n_s_pydev_log); + PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_pydev_log); + __pyx_t_9 = __Pyx_Import(__pyx_n_s_pydev_bundle, __pyx_t_2, -1); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 135, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_9, __pyx_n_s_pydev_log); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 135, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_pydev_log, __pyx_t_2) < 0) __PYX_ERR(0, 135, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":136 + * + * from _pydev_bundle import pydev_log + * from _pydevd_bundle import pydevd_dont_trace # <<<<<<<<<<<<<< + * from _pydevd_bundle import pydevd_vars + * from _pydevd_bundle.pydevd_breakpoints import get_exception_breakpoint + */ + __pyx_t_9 = PyList_New(1); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 136, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + __Pyx_INCREF(__pyx_n_s_pydevd_dont_trace); + __Pyx_GIVEREF(__pyx_n_s_pydevd_dont_trace); + PyList_SET_ITEM(__pyx_t_9, 0, __pyx_n_s_pydevd_dont_trace); + __pyx_t_2 = __Pyx_Import(__pyx_n_s_pydevd_bundle, __pyx_t_9, -1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 136, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + __pyx_t_9 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_pydevd_dont_trace); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 136, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_pydevd_dont_trace, __pyx_t_9) < 0) __PYX_ERR(0, 136, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":137 + * from _pydev_bundle import pydev_log + * from _pydevd_bundle import pydevd_dont_trace + * from _pydevd_bundle import pydevd_vars # <<<<<<<<<<<<<< + * from _pydevd_bundle.pydevd_breakpoints import get_exception_breakpoint + * from _pydevd_bundle.pydevd_comm import CMD_STEP_CAUGHT_EXCEPTION, CMD_STEP_RETURN, CMD_STEP_OVER, CMD_SET_BREAK, \ + */ + __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 137, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_n_s_pydevd_vars); + __Pyx_GIVEREF(__pyx_n_s_pydevd_vars); + PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_pydevd_vars); + __pyx_t_9 = __Pyx_Import(__pyx_n_s_pydevd_bundle, __pyx_t_2, -1); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 137, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_9, __pyx_n_s_pydevd_vars); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 137, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_pydevd_vars, __pyx_t_2) < 0) __PYX_ERR(0, 137, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":138 + * from _pydevd_bundle import pydevd_dont_trace + * from _pydevd_bundle import pydevd_vars + * from _pydevd_bundle.pydevd_breakpoints import get_exception_breakpoint # <<<<<<<<<<<<<< + * from _pydevd_bundle.pydevd_comm import CMD_STEP_CAUGHT_EXCEPTION, CMD_STEP_RETURN, CMD_STEP_OVER, CMD_SET_BREAK, \ + * CMD_STEP_INTO, CMD_SMART_STEP_INTO, CMD_RUN_TO_LINE, CMD_SET_NEXT_STATEMENT, CMD_STEP_INTO_MY_CODE + */ + __pyx_t_9 = PyList_New(1); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 138, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + __Pyx_INCREF(__pyx_n_s_get_exception_breakpoint); + __Pyx_GIVEREF(__pyx_n_s_get_exception_breakpoint); + PyList_SET_ITEM(__pyx_t_9, 0, __pyx_n_s_get_exception_breakpoint); + __pyx_t_2 = __Pyx_Import(__pyx_n_s_pydevd_bundle_pydevd_breakpoint, __pyx_t_9, -1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 138, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + __pyx_t_9 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_get_exception_breakpoint); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 138, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_get_exception_breakpoint, __pyx_t_9) < 0) __PYX_ERR(0, 138, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":139 + * from _pydevd_bundle import pydevd_vars + * from _pydevd_bundle.pydevd_breakpoints import get_exception_breakpoint + * from _pydevd_bundle.pydevd_comm import CMD_STEP_CAUGHT_EXCEPTION, CMD_STEP_RETURN, CMD_STEP_OVER, CMD_SET_BREAK, \ # <<<<<<<<<<<<<< + * CMD_STEP_INTO, CMD_SMART_STEP_INTO, CMD_RUN_TO_LINE, CMD_SET_NEXT_STATEMENT, CMD_STEP_INTO_MY_CODE + * from _pydevd_bundle.pydevd_constants import STATE_SUSPEND, get_thread_id, STATE_RUN, dict_iter_values, IS_PY3K, \ + */ + __pyx_t_2 = PyList_New(9); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 139, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_n_s_CMD_STEP_CAUGHT_EXCEPTION); + __Pyx_GIVEREF(__pyx_n_s_CMD_STEP_CAUGHT_EXCEPTION); + PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_CMD_STEP_CAUGHT_EXCEPTION); + __Pyx_INCREF(__pyx_n_s_CMD_STEP_RETURN); + __Pyx_GIVEREF(__pyx_n_s_CMD_STEP_RETURN); + PyList_SET_ITEM(__pyx_t_2, 1, __pyx_n_s_CMD_STEP_RETURN); + __Pyx_INCREF(__pyx_n_s_CMD_STEP_OVER); + __Pyx_GIVEREF(__pyx_n_s_CMD_STEP_OVER); + PyList_SET_ITEM(__pyx_t_2, 2, __pyx_n_s_CMD_STEP_OVER); + __Pyx_INCREF(__pyx_n_s_CMD_SET_BREAK); + __Pyx_GIVEREF(__pyx_n_s_CMD_SET_BREAK); + PyList_SET_ITEM(__pyx_t_2, 3, __pyx_n_s_CMD_SET_BREAK); + __Pyx_INCREF(__pyx_n_s_CMD_STEP_INTO); + __Pyx_GIVEREF(__pyx_n_s_CMD_STEP_INTO); + PyList_SET_ITEM(__pyx_t_2, 4, __pyx_n_s_CMD_STEP_INTO); + __Pyx_INCREF(__pyx_n_s_CMD_SMART_STEP_INTO); + __Pyx_GIVEREF(__pyx_n_s_CMD_SMART_STEP_INTO); + PyList_SET_ITEM(__pyx_t_2, 5, __pyx_n_s_CMD_SMART_STEP_INTO); + __Pyx_INCREF(__pyx_n_s_CMD_RUN_TO_LINE); + __Pyx_GIVEREF(__pyx_n_s_CMD_RUN_TO_LINE); + PyList_SET_ITEM(__pyx_t_2, 6, __pyx_n_s_CMD_RUN_TO_LINE); + __Pyx_INCREF(__pyx_n_s_CMD_SET_NEXT_STATEMENT); + __Pyx_GIVEREF(__pyx_n_s_CMD_SET_NEXT_STATEMENT); + PyList_SET_ITEM(__pyx_t_2, 7, __pyx_n_s_CMD_SET_NEXT_STATEMENT); + __Pyx_INCREF(__pyx_n_s_CMD_STEP_INTO_MY_CODE); + __Pyx_GIVEREF(__pyx_n_s_CMD_STEP_INTO_MY_CODE); + PyList_SET_ITEM(__pyx_t_2, 8, __pyx_n_s_CMD_STEP_INTO_MY_CODE); + __pyx_t_9 = __Pyx_Import(__pyx_n_s_pydevd_bundle_pydevd_comm, __pyx_t_2, -1); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 139, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_9, __pyx_n_s_CMD_STEP_CAUGHT_EXCEPTION); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 139, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_CMD_STEP_CAUGHT_EXCEPTION, __pyx_t_2) < 0) __PYX_ERR(0, 139, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_9, __pyx_n_s_CMD_STEP_RETURN); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 139, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_CMD_STEP_RETURN, __pyx_t_2) < 0) __PYX_ERR(0, 139, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_9, __pyx_n_s_CMD_STEP_OVER); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 139, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_CMD_STEP_OVER, __pyx_t_2) < 0) __PYX_ERR(0, 139, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_9, __pyx_n_s_CMD_SET_BREAK); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 139, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_CMD_SET_BREAK, __pyx_t_2) < 0) __PYX_ERR(0, 139, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_9, __pyx_n_s_CMD_STEP_INTO); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 139, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_CMD_STEP_INTO, __pyx_t_2) < 0) __PYX_ERR(0, 140, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_9, __pyx_n_s_CMD_SMART_STEP_INTO); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 139, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_CMD_SMART_STEP_INTO, __pyx_t_2) < 0) __PYX_ERR(0, 140, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_9, __pyx_n_s_CMD_RUN_TO_LINE); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 139, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_CMD_RUN_TO_LINE, __pyx_t_2) < 0) __PYX_ERR(0, 140, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_9, __pyx_n_s_CMD_SET_NEXT_STATEMENT); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 139, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_CMD_SET_NEXT_STATEMENT, __pyx_t_2) < 0) __PYX_ERR(0, 140, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_9, __pyx_n_s_CMD_STEP_INTO_MY_CODE); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 139, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_CMD_STEP_INTO_MY_CODE, __pyx_t_2) < 0) __PYX_ERR(0, 140, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":141 + * from _pydevd_bundle.pydevd_comm import CMD_STEP_CAUGHT_EXCEPTION, CMD_STEP_RETURN, CMD_STEP_OVER, CMD_SET_BREAK, \ + * CMD_STEP_INTO, CMD_SMART_STEP_INTO, CMD_RUN_TO_LINE, CMD_SET_NEXT_STATEMENT, CMD_STEP_INTO_MY_CODE + * from _pydevd_bundle.pydevd_constants import STATE_SUSPEND, get_thread_id, STATE_RUN, dict_iter_values, IS_PY3K, \ # <<<<<<<<<<<<<< + * RETURN_VALUES_DICT + * from _pydevd_bundle.pydevd_dont_trace_files import DONT_TRACE, PYDEV_FILE + */ + __pyx_t_9 = PyList_New(6); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 141, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + __Pyx_INCREF(__pyx_n_s_STATE_SUSPEND); + __Pyx_GIVEREF(__pyx_n_s_STATE_SUSPEND); + PyList_SET_ITEM(__pyx_t_9, 0, __pyx_n_s_STATE_SUSPEND); + __Pyx_INCREF(__pyx_n_s_get_thread_id); + __Pyx_GIVEREF(__pyx_n_s_get_thread_id); + PyList_SET_ITEM(__pyx_t_9, 1, __pyx_n_s_get_thread_id); + __Pyx_INCREF(__pyx_n_s_STATE_RUN); + __Pyx_GIVEREF(__pyx_n_s_STATE_RUN); + PyList_SET_ITEM(__pyx_t_9, 2, __pyx_n_s_STATE_RUN); + __Pyx_INCREF(__pyx_n_s_dict_iter_values); + __Pyx_GIVEREF(__pyx_n_s_dict_iter_values); + PyList_SET_ITEM(__pyx_t_9, 3, __pyx_n_s_dict_iter_values); + __Pyx_INCREF(__pyx_n_s_IS_PY3K); + __Pyx_GIVEREF(__pyx_n_s_IS_PY3K); + PyList_SET_ITEM(__pyx_t_9, 4, __pyx_n_s_IS_PY3K); + __Pyx_INCREF(__pyx_n_s_RETURN_VALUES_DICT); + __Pyx_GIVEREF(__pyx_n_s_RETURN_VALUES_DICT); + PyList_SET_ITEM(__pyx_t_9, 5, __pyx_n_s_RETURN_VALUES_DICT); + __pyx_t_2 = __Pyx_Import(__pyx_n_s_pydevd_bundle_pydevd_constants, __pyx_t_9, -1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 141, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + __pyx_t_9 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_STATE_SUSPEND); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 141, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_STATE_SUSPEND, __pyx_t_9) < 0) __PYX_ERR(0, 141, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + __pyx_t_9 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_get_thread_id); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 141, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_get_thread_id, __pyx_t_9) < 0) __PYX_ERR(0, 141, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + __pyx_t_9 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_STATE_RUN); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 141, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_STATE_RUN, __pyx_t_9) < 0) __PYX_ERR(0, 141, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + __pyx_t_9 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_dict_iter_values); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 141, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_dict_iter_values, __pyx_t_9) < 0) __PYX_ERR(0, 141, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + __pyx_t_9 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_IS_PY3K); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 141, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_IS_PY3K, __pyx_t_9) < 0) __PYX_ERR(0, 141, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + __pyx_t_9 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_RETURN_VALUES_DICT); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 141, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_RETURN_VALUES_DICT, __pyx_t_9) < 0) __PYX_ERR(0, 142, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":143 + * from _pydevd_bundle.pydevd_constants import STATE_SUSPEND, get_thread_id, STATE_RUN, dict_iter_values, IS_PY3K, \ + * RETURN_VALUES_DICT + * from _pydevd_bundle.pydevd_dont_trace_files import DONT_TRACE, PYDEV_FILE # <<<<<<<<<<<<<< + * from _pydevd_bundle.pydevd_frame_utils import add_exception_to_frame, just_raised + * from _pydevd_bundle.pydevd_utils import get_clsname_for_code + */ + __pyx_t_2 = PyList_New(2); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 143, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_n_s_DONT_TRACE); + __Pyx_GIVEREF(__pyx_n_s_DONT_TRACE); + PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_DONT_TRACE); + __Pyx_INCREF(__pyx_n_s_PYDEV_FILE); + __Pyx_GIVEREF(__pyx_n_s_PYDEV_FILE); + PyList_SET_ITEM(__pyx_t_2, 1, __pyx_n_s_PYDEV_FILE); + __pyx_t_9 = __Pyx_Import(__pyx_n_s_pydevd_bundle_pydevd_dont_trace, __pyx_t_2, -1); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 143, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_9, __pyx_n_s_DONT_TRACE); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 143, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_DONT_TRACE, __pyx_t_2) < 0) __PYX_ERR(0, 143, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_9, __pyx_n_s_PYDEV_FILE); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 143, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_PYDEV_FILE, __pyx_t_2) < 0) __PYX_ERR(0, 143, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":144 + * RETURN_VALUES_DICT + * from _pydevd_bundle.pydevd_dont_trace_files import DONT_TRACE, PYDEV_FILE + * from _pydevd_bundle.pydevd_frame_utils import add_exception_to_frame, just_raised # <<<<<<<<<<<<<< + * from _pydevd_bundle.pydevd_utils import get_clsname_for_code + * from pydevd_file_utils import get_abs_path_real_path_and_base_from_frame + */ + __pyx_t_9 = PyList_New(2); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 144, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + __Pyx_INCREF(__pyx_n_s_add_exception_to_frame); + __Pyx_GIVEREF(__pyx_n_s_add_exception_to_frame); + PyList_SET_ITEM(__pyx_t_9, 0, __pyx_n_s_add_exception_to_frame); + __Pyx_INCREF(__pyx_n_s_just_raised); + __Pyx_GIVEREF(__pyx_n_s_just_raised); + PyList_SET_ITEM(__pyx_t_9, 1, __pyx_n_s_just_raised); + __pyx_t_2 = __Pyx_Import(__pyx_n_s_pydevd_bundle_pydevd_frame_util, __pyx_t_9, -1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 144, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + __pyx_t_9 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_add_exception_to_frame); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 144, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_add_exception_to_frame, __pyx_t_9) < 0) __PYX_ERR(0, 144, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + __pyx_t_9 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_just_raised); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 144, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_just_raised, __pyx_t_9) < 0) __PYX_ERR(0, 144, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":145 + * from _pydevd_bundle.pydevd_dont_trace_files import DONT_TRACE, PYDEV_FILE + * from _pydevd_bundle.pydevd_frame_utils import add_exception_to_frame, just_raised + * from _pydevd_bundle.pydevd_utils import get_clsname_for_code # <<<<<<<<<<<<<< + * from pydevd_file_utils import get_abs_path_real_path_and_base_from_frame + * try: + */ + __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 145, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_n_s_get_clsname_for_code); + __Pyx_GIVEREF(__pyx_n_s_get_clsname_for_code); + PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_get_clsname_for_code); + __pyx_t_9 = __Pyx_Import(__pyx_n_s_pydevd_bundle_pydevd_utils, __pyx_t_2, -1); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 145, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_9, __pyx_n_s_get_clsname_for_code); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 145, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_get_clsname_for_code, __pyx_t_2) < 0) __PYX_ERR(0, 145, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":146 + * from _pydevd_bundle.pydevd_frame_utils import add_exception_to_frame, just_raised + * from _pydevd_bundle.pydevd_utils import get_clsname_for_code + * from pydevd_file_utils import get_abs_path_real_path_and_base_from_frame # <<<<<<<<<<<<<< + * try: + * from inspect import CO_GENERATOR + */ + __pyx_t_9 = PyList_New(1); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 146, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + __Pyx_INCREF(__pyx_n_s_get_abs_path_real_path_and_base); + __Pyx_GIVEREF(__pyx_n_s_get_abs_path_real_path_and_base); + PyList_SET_ITEM(__pyx_t_9, 0, __pyx_n_s_get_abs_path_real_path_and_base); + __pyx_t_2 = __Pyx_Import(__pyx_n_s_pydevd_file_utils, __pyx_t_9, -1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 146, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + __pyx_t_9 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_get_abs_path_real_path_and_base); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 146, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_get_abs_path_real_path_and_base, __pyx_t_9) < 0) __PYX_ERR(0, 146, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":147 + * from _pydevd_bundle.pydevd_utils import get_clsname_for_code + * from pydevd_file_utils import get_abs_path_real_path_and_base_from_frame + * try: # <<<<<<<<<<<<<< + * from inspect import CO_GENERATOR + * except: + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_7, &__pyx_t_6, &__pyx_t_5); + __Pyx_XGOTREF(__pyx_t_7); + __Pyx_XGOTREF(__pyx_t_6); + __Pyx_XGOTREF(__pyx_t_5); + /*try:*/ { + + /* "_pydevd_bundle/pydevd_cython.pyx":148 + * from pydevd_file_utils import get_abs_path_real_path_and_base_from_frame + * try: + * from inspect import CO_GENERATOR # <<<<<<<<<<<<<< + * except: + * CO_GENERATOR = 0 + */ + __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 148, __pyx_L12_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_n_s_CO_GENERATOR); + __Pyx_GIVEREF(__pyx_n_s_CO_GENERATOR); + PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_CO_GENERATOR); + __pyx_t_9 = __Pyx_patch_inspect(__Pyx_Import(__pyx_n_s_inspect, __pyx_t_2, -1)); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 148, __pyx_L12_error) + __Pyx_GOTREF(__pyx_t_9); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_9, __pyx_n_s_CO_GENERATOR); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 148, __pyx_L12_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_CO_GENERATOR, __pyx_t_2) < 0) __PYX_ERR(0, 148, __pyx_L12_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":147 + * from _pydevd_bundle.pydevd_utils import get_clsname_for_code + * from pydevd_file_utils import get_abs_path_real_path_and_base_from_frame + * try: # <<<<<<<<<<<<<< + * from inspect import CO_GENERATOR + * except: + */ + } + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + goto __pyx_L17_try_end; + __pyx_L12_error:; + __Pyx_PyThreadState_assign + __Pyx_XDECREF(__pyx_t_10); __pyx_t_10 = 0; + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_11); __pyx_t_11 = 0; + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":149 + * try: + * from inspect import CO_GENERATOR + * except: # <<<<<<<<<<<<<< + * CO_GENERATOR = 0 + * + */ + /*except:*/ { + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_9, &__pyx_t_2, &__pyx_t_11) < 0) __PYX_ERR(0, 149, __pyx_L14_except_error) + __Pyx_GOTREF(__pyx_t_9); + __Pyx_GOTREF(__pyx_t_2); + __Pyx_GOTREF(__pyx_t_11); + + /* "_pydevd_bundle/pydevd_cython.pyx":150 + * from inspect import CO_GENERATOR + * except: + * CO_GENERATOR = 0 # <<<<<<<<<<<<<< + * + * try: + */ + if (PyDict_SetItem(__pyx_d, __pyx_n_s_CO_GENERATOR, __pyx_int_0) < 0) __PYX_ERR(0, 150, __pyx_L14_except_error) + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; + goto __pyx_L13_exception_handled; + } + __pyx_L14_except_error:; + + /* "_pydevd_bundle/pydevd_cython.pyx":147 + * from _pydevd_bundle.pydevd_utils import get_clsname_for_code + * from pydevd_file_utils import get_abs_path_real_path_and_base_from_frame + * try: # <<<<<<<<<<<<<< + * from inspect import CO_GENERATOR + * except: + */ + __Pyx_PyThreadState_assign + __Pyx_XGIVEREF(__pyx_t_7); + __Pyx_XGIVEREF(__pyx_t_6); + __Pyx_XGIVEREF(__pyx_t_5); + __Pyx_ExceptionReset(__pyx_t_7, __pyx_t_6, __pyx_t_5); + goto __pyx_L1_error; + __pyx_L13_exception_handled:; + __Pyx_PyThreadState_assign + __Pyx_XGIVEREF(__pyx_t_7); + __Pyx_XGIVEREF(__pyx_t_6); + __Pyx_XGIVEREF(__pyx_t_5); + __Pyx_ExceptionReset(__pyx_t_7, __pyx_t_6, __pyx_t_5); + __pyx_L17_try_end:; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":152 + * CO_GENERATOR = 0 + * + * try: # <<<<<<<<<<<<<< + * from _pydevd_bundle.pydevd_signature import send_signature_call_trace, send_signature_return_trace + * except ImportError: + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_5, &__pyx_t_6, &__pyx_t_7); + __Pyx_XGOTREF(__pyx_t_5); + __Pyx_XGOTREF(__pyx_t_6); + __Pyx_XGOTREF(__pyx_t_7); + /*try:*/ { + + /* "_pydevd_bundle/pydevd_cython.pyx":153 + * + * try: + * from _pydevd_bundle.pydevd_signature import send_signature_call_trace, send_signature_return_trace # <<<<<<<<<<<<<< + * except ImportError: + * def send_signature_call_trace(*args, **kwargs): + */ + __pyx_t_11 = PyList_New(2); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 153, __pyx_L20_error) + __Pyx_GOTREF(__pyx_t_11); + __Pyx_INCREF(__pyx_n_s_send_signature_call_trace); + __Pyx_GIVEREF(__pyx_n_s_send_signature_call_trace); + PyList_SET_ITEM(__pyx_t_11, 0, __pyx_n_s_send_signature_call_trace); + __Pyx_INCREF(__pyx_n_s_send_signature_return_trace); + __Pyx_GIVEREF(__pyx_n_s_send_signature_return_trace); + PyList_SET_ITEM(__pyx_t_11, 1, __pyx_n_s_send_signature_return_trace); + __pyx_t_2 = __Pyx_Import(__pyx_n_s_pydevd_bundle_pydevd_signature, __pyx_t_11, -1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 153, __pyx_L20_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; + __pyx_t_11 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_send_signature_call_trace); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 153, __pyx_L20_error) + __Pyx_GOTREF(__pyx_t_11); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_send_signature_call_trace, __pyx_t_11) < 0) __PYX_ERR(0, 153, __pyx_L20_error) + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; + __pyx_t_11 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_send_signature_return_trace); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 153, __pyx_L20_error) + __Pyx_GOTREF(__pyx_t_11); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_send_signature_return_trace, __pyx_t_11) < 0) __PYX_ERR(0, 153, __pyx_L20_error) + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":152 + * CO_GENERATOR = 0 + * + * try: # <<<<<<<<<<<<<< + * from _pydevd_bundle.pydevd_signature import send_signature_call_trace, send_signature_return_trace + * except ImportError: + */ + } + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + goto __pyx_L25_try_end; + __pyx_L20_error:; + __Pyx_PyThreadState_assign + __Pyx_XDECREF(__pyx_t_10); __pyx_t_10 = 0; + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_XDECREF(__pyx_t_11); __pyx_t_11 = 0; + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":154 + * try: + * from _pydevd_bundle.pydevd_signature import send_signature_call_trace, send_signature_return_trace + * except ImportError: # <<<<<<<<<<<<<< + * def send_signature_call_trace(*args, **kwargs): + * pass + */ + __pyx_t_8 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_ImportError); + if (__pyx_t_8) { + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_2, &__pyx_t_11, &__pyx_t_9) < 0) __PYX_ERR(0, 154, __pyx_L22_except_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_GOTREF(__pyx_t_11); + __Pyx_GOTREF(__pyx_t_9); + + /* "_pydevd_bundle/pydevd_cython.pyx":155 + * from _pydevd_bundle.pydevd_signature import send_signature_call_trace, send_signature_return_trace + * except ImportError: + * def send_signature_call_trace(*args, **kwargs): # <<<<<<<<<<<<<< + * pass + * + */ + __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_14_pydevd_bundle_13pydevd_cython_5send_signature_call_trace, NULL, __pyx_n_s_pydevd_bundle_pydevd_cython); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 155, __pyx_L22_except_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_send_signature_call_trace, __pyx_t_1) < 0) __PYX_ERR(0, 155, __pyx_L22_except_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + goto __pyx_L21_exception_handled; + } + goto __pyx_L22_except_error; + __pyx_L22_except_error:; + + /* "_pydevd_bundle/pydevd_cython.pyx":152 + * CO_GENERATOR = 0 + * + * try: # <<<<<<<<<<<<<< + * from _pydevd_bundle.pydevd_signature import send_signature_call_trace, send_signature_return_trace + * except ImportError: + */ + __Pyx_PyThreadState_assign + __Pyx_XGIVEREF(__pyx_t_5); + __Pyx_XGIVEREF(__pyx_t_6); + __Pyx_XGIVEREF(__pyx_t_7); + __Pyx_ExceptionReset(__pyx_t_5, __pyx_t_6, __pyx_t_7); + goto __pyx_L1_error; + __pyx_L21_exception_handled:; + __Pyx_PyThreadState_assign + __Pyx_XGIVEREF(__pyx_t_5); + __Pyx_XGIVEREF(__pyx_t_6); + __Pyx_XGIVEREF(__pyx_t_7); + __Pyx_ExceptionReset(__pyx_t_5, __pyx_t_6, __pyx_t_7); + __pyx_L25_try_end:; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":158 + * pass + * + * basename = os.path.basename # <<<<<<<<<<<<<< + * + * IGNORE_EXCEPTION_TAG = re.compile('[^#]*#.*@IgnoreException') + */ + __pyx_t_9 = __Pyx_GetModuleGlobalName(__pyx_n_s_os); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 158, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + __pyx_t_11 = __Pyx_PyObject_GetAttrStr(__pyx_t_9, __pyx_n_s_path); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 158, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_11); + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + __pyx_t_9 = __Pyx_PyObject_GetAttrStr(__pyx_t_11, __pyx_n_s_basename); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 158, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; + if (PyDict_SetItem(__pyx_d, __pyx_n_s_basename, __pyx_t_9) < 0) __PYX_ERR(0, 158, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":160 + * basename = os.path.basename + * + * IGNORE_EXCEPTION_TAG = re.compile('[^#]*#.*@IgnoreException') # <<<<<<<<<<<<<< + * DEBUG_START = ('pydevd.py', 'run') + * DEBUG_START_PY3K = ('_pydev_execfile.py', 'execfile') + */ + __pyx_t_9 = __Pyx_GetModuleGlobalName(__pyx_n_s_re); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 160, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + __pyx_t_11 = __Pyx_PyObject_GetAttrStr(__pyx_t_9, __pyx_n_s_compile); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 160, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_11); + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + __pyx_t_9 = __Pyx_PyObject_Call(__pyx_t_11, __pyx_tuple__14, NULL); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 160, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; + if (PyDict_SetItem(__pyx_d, __pyx_n_s_IGNORE_EXCEPTION_TAG, __pyx_t_9) < 0) __PYX_ERR(0, 160, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":161 + * + * IGNORE_EXCEPTION_TAG = re.compile('[^#]*#.*@IgnoreException') + * DEBUG_START = ('pydevd.py', 'run') # <<<<<<<<<<<<<< + * DEBUG_START_PY3K = ('_pydev_execfile.py', 'execfile') + * TRACE_PROPERTY = 'pydevd_traceproperty.py' + */ + if (PyDict_SetItem(__pyx_d, __pyx_n_s_DEBUG_START, __pyx_tuple__15) < 0) __PYX_ERR(0, 161, __pyx_L1_error) + + /* "_pydevd_bundle/pydevd_cython.pyx":162 + * IGNORE_EXCEPTION_TAG = re.compile('[^#]*#.*@IgnoreException') + * DEBUG_START = ('pydevd.py', 'run') + * DEBUG_START_PY3K = ('_pydev_execfile.py', 'execfile') # <<<<<<<<<<<<<< + * TRACE_PROPERTY = 'pydevd_traceproperty.py' + * get_file_type = DONT_TRACE.get + */ + if (PyDict_SetItem(__pyx_d, __pyx_n_s_DEBUG_START_PY3K, __pyx_tuple__16) < 0) __PYX_ERR(0, 162, __pyx_L1_error) + + /* "_pydevd_bundle/pydevd_cython.pyx":163 + * DEBUG_START = ('pydevd.py', 'run') + * DEBUG_START_PY3K = ('_pydev_execfile.py', 'execfile') + * TRACE_PROPERTY = 'pydevd_traceproperty.py' # <<<<<<<<<<<<<< + * get_file_type = DONT_TRACE.get + * + */ + if (PyDict_SetItem(__pyx_d, __pyx_n_s_TRACE_PROPERTY, __pyx_kp_s_pydevd_traceproperty_py) < 0) __PYX_ERR(0, 163, __pyx_L1_error) + + /* "_pydevd_bundle/pydevd_cython.pyx":164 + * DEBUG_START_PY3K = ('_pydev_execfile.py', 'execfile') + * TRACE_PROPERTY = 'pydevd_traceproperty.py' + * get_file_type = DONT_TRACE.get # <<<<<<<<<<<<<< + * + * + */ + __pyx_t_9 = __Pyx_GetModuleGlobalName(__pyx_n_s_DONT_TRACE); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 164, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + __pyx_t_11 = __Pyx_PyObject_GetAttrStr(__pyx_t_9, __pyx_n_s_get); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 164, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_11); + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + if (PyDict_SetItem(__pyx_d, __pyx_n_s_get_file_type, __pyx_t_11) < 0) __PYX_ERR(0, 164, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":167 + * + * + * def handle_breakpoint_condition(py_db, info, breakpoint, new_frame, default_return_value): # <<<<<<<<<<<<<< + * condition = breakpoint.condition + * try: + */ + __pyx_t_11 = PyCFunction_NewEx(&__pyx_mdef_14_pydevd_bundle_13pydevd_cython_7handle_breakpoint_condition, NULL, __pyx_n_s_pydevd_bundle_pydevd_cython); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 167, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_11); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_handle_breakpoint_condition, __pyx_t_11) < 0) __PYX_ERR(0, 167, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":203 + * + * + * def handle_breakpoint_expression(breakpoint, info, new_frame): # <<<<<<<<<<<<<< + * try: + * try: + */ + __pyx_t_11 = PyCFunction_NewEx(&__pyx_mdef_14_pydevd_bundle_13pydevd_cython_9handle_breakpoint_expression, NULL, __pyx_n_s_pydevd_bundle_pydevd_cython); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 203, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_11); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_handle_breakpoint_expression, __pyx_t_11) < 0) __PYX_ERR(0, 203, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":232 + * #Same thing in the main debugger but only considering the file contents, while the one in the main debugger + * #considers the user input (so, the actual result must be a join of both). + * filename_to_lines_where_exceptions_are_ignored = {} # <<<<<<<<<<<<<< + * filename_to_stat_info = {} + * + */ + __pyx_t_11 = PyDict_New(); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 232, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_11); + if (PyDict_SetItem((PyObject *)__pyx_ptype_14_pydevd_bundle_13pydevd_cython_PyDBFrame->tp_dict, __pyx_n_s_filename_to_lines_where_exceptio, __pyx_t_11) < 0) __PYX_ERR(0, 232, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; + PyType_Modified(__pyx_ptype_14_pydevd_bundle_13pydevd_cython_PyDBFrame); + + /* "_pydevd_bundle/pydevd_cython.pyx":233 + * #considers the user input (so, the actual result must be a join of both). + * filename_to_lines_where_exceptions_are_ignored = {} + * filename_to_stat_info = {} # <<<<<<<<<<<<<< + * + * # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + */ + __pyx_t_11 = PyDict_New(); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 233, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_11); + if (PyDict_SetItem((PyObject *)__pyx_ptype_14_pydevd_bundle_13pydevd_cython_PyDBFrame->tp_dict, __pyx_n_s_filename_to_stat_info, __pyx_t_11) < 0) __PYX_ERR(0, 233, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; + PyType_Modified(__pyx_ptype_14_pydevd_bundle_13pydevd_cython_PyDBFrame); + + /* "_pydevd_bundle/pydevd_cython.pyx":910 + * #end trace_dispatch + * + * import traceback # <<<<<<<<<<<<<< + * + * from _pydev_bundle.pydev_is_thread_alive import is_thread_alive + */ + __pyx_t_11 = __Pyx_Import(__pyx_n_s_traceback, 0, -1); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 910, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_11); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_traceback, __pyx_t_11) < 0) __PYX_ERR(0, 910, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":912 + * import traceback + * + * from _pydev_bundle.pydev_is_thread_alive import is_thread_alive # <<<<<<<<<<<<<< + * from _pydev_imps._pydev_saved_modules import threading + * from _pydevd_bundle.pydevd_constants import get_thread_id, IS_IRONPYTHON + */ + __pyx_t_11 = PyList_New(1); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 912, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_11); + __Pyx_INCREF(__pyx_n_s_is_thread_alive); + __Pyx_GIVEREF(__pyx_n_s_is_thread_alive); + PyList_SET_ITEM(__pyx_t_11, 0, __pyx_n_s_is_thread_alive); + __pyx_t_9 = __Pyx_Import(__pyx_n_s_pydev_bundle_pydev_is_thread_al, __pyx_t_11, -1); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 912, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; + __pyx_t_11 = __Pyx_ImportFrom(__pyx_t_9, __pyx_n_s_is_thread_alive); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 912, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_11); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_is_thread_alive, __pyx_t_11) < 0) __PYX_ERR(0, 912, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":913 + * + * from _pydev_bundle.pydev_is_thread_alive import is_thread_alive + * from _pydev_imps._pydev_saved_modules import threading # <<<<<<<<<<<<<< + * from _pydevd_bundle.pydevd_constants import get_thread_id, IS_IRONPYTHON + * from _pydevd_bundle.pydevd_dont_trace_files import DONT_TRACE + */ + __pyx_t_9 = PyList_New(1); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 913, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + __Pyx_INCREF(__pyx_n_s_threading); + __Pyx_GIVEREF(__pyx_n_s_threading); + PyList_SET_ITEM(__pyx_t_9, 0, __pyx_n_s_threading); + __pyx_t_11 = __Pyx_Import(__pyx_n_s_pydev_imps__pydev_saved_modules, __pyx_t_9, -1); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 913, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_11); + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + __pyx_t_9 = __Pyx_ImportFrom(__pyx_t_11, __pyx_n_s_threading); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 913, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_threading, __pyx_t_9) < 0) __PYX_ERR(0, 913, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":914 + * from _pydev_bundle.pydev_is_thread_alive import is_thread_alive + * from _pydev_imps._pydev_saved_modules import threading + * from _pydevd_bundle.pydevd_constants import get_thread_id, IS_IRONPYTHON # <<<<<<<<<<<<<< + * from _pydevd_bundle.pydevd_dont_trace_files import DONT_TRACE + * from _pydevd_bundle.pydevd_kill_all_pydevd_threads import kill_all_pydev_threads + */ + __pyx_t_11 = PyList_New(2); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 914, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_11); + __Pyx_INCREF(__pyx_n_s_get_thread_id); + __Pyx_GIVEREF(__pyx_n_s_get_thread_id); + PyList_SET_ITEM(__pyx_t_11, 0, __pyx_n_s_get_thread_id); + __Pyx_INCREF(__pyx_n_s_IS_IRONPYTHON); + __Pyx_GIVEREF(__pyx_n_s_IS_IRONPYTHON); + PyList_SET_ITEM(__pyx_t_11, 1, __pyx_n_s_IS_IRONPYTHON); + __pyx_t_9 = __Pyx_Import(__pyx_n_s_pydevd_bundle_pydevd_constants, __pyx_t_11, -1); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 914, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; + __pyx_t_11 = __Pyx_ImportFrom(__pyx_t_9, __pyx_n_s_get_thread_id); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 914, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_11); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_get_thread_id, __pyx_t_11) < 0) __PYX_ERR(0, 914, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; + __pyx_t_11 = __Pyx_ImportFrom(__pyx_t_9, __pyx_n_s_IS_IRONPYTHON); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 914, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_11); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_IS_IRONPYTHON, __pyx_t_11) < 0) __PYX_ERR(0, 914, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":915 + * from _pydev_imps._pydev_saved_modules import threading + * from _pydevd_bundle.pydevd_constants import get_thread_id, IS_IRONPYTHON + * from _pydevd_bundle.pydevd_dont_trace_files import DONT_TRACE # <<<<<<<<<<<<<< + * from _pydevd_bundle.pydevd_kill_all_pydevd_threads import kill_all_pydev_threads + * from pydevd_file_utils import get_abs_path_real_path_and_base_from_frame, NORM_PATHS_AND_BASE_CONTAINER + */ + __pyx_t_9 = PyList_New(1); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 915, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + __Pyx_INCREF(__pyx_n_s_DONT_TRACE); + __Pyx_GIVEREF(__pyx_n_s_DONT_TRACE); + PyList_SET_ITEM(__pyx_t_9, 0, __pyx_n_s_DONT_TRACE); + __pyx_t_11 = __Pyx_Import(__pyx_n_s_pydevd_bundle_pydevd_dont_trace, __pyx_t_9, -1); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 915, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_11); + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + __pyx_t_9 = __Pyx_ImportFrom(__pyx_t_11, __pyx_n_s_DONT_TRACE); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 915, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_DONT_TRACE, __pyx_t_9) < 0) __PYX_ERR(0, 915, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":916 + * from _pydevd_bundle.pydevd_constants import get_thread_id, IS_IRONPYTHON + * from _pydevd_bundle.pydevd_dont_trace_files import DONT_TRACE + * from _pydevd_bundle.pydevd_kill_all_pydevd_threads import kill_all_pydev_threads # <<<<<<<<<<<<<< + * from pydevd_file_utils import get_abs_path_real_path_and_base_from_frame, NORM_PATHS_AND_BASE_CONTAINER + * from pydevd_tracing import SetTrace + */ + __pyx_t_11 = PyList_New(1); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 916, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_11); + __Pyx_INCREF(__pyx_n_s_kill_all_pydev_threads); + __Pyx_GIVEREF(__pyx_n_s_kill_all_pydev_threads); + PyList_SET_ITEM(__pyx_t_11, 0, __pyx_n_s_kill_all_pydev_threads); + __pyx_t_9 = __Pyx_Import(__pyx_n_s_pydevd_bundle_pydevd_kill_all_p, __pyx_t_11, -1); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 916, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; + __pyx_t_11 = __Pyx_ImportFrom(__pyx_t_9, __pyx_n_s_kill_all_pydev_threads); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 916, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_11); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_kill_all_pydev_threads, __pyx_t_11) < 0) __PYX_ERR(0, 916, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":917 + * from _pydevd_bundle.pydevd_dont_trace_files import DONT_TRACE + * from _pydevd_bundle.pydevd_kill_all_pydevd_threads import kill_all_pydev_threads + * from pydevd_file_utils import get_abs_path_real_path_and_base_from_frame, NORM_PATHS_AND_BASE_CONTAINER # <<<<<<<<<<<<<< + * from pydevd_tracing import SetTrace + * # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + */ + __pyx_t_9 = PyList_New(2); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 917, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + __Pyx_INCREF(__pyx_n_s_get_abs_path_real_path_and_base); + __Pyx_GIVEREF(__pyx_n_s_get_abs_path_real_path_and_base); + PyList_SET_ITEM(__pyx_t_9, 0, __pyx_n_s_get_abs_path_real_path_and_base); + __Pyx_INCREF(__pyx_n_s_NORM_PATHS_AND_BASE_CONTAINER); + __Pyx_GIVEREF(__pyx_n_s_NORM_PATHS_AND_BASE_CONTAINER); + PyList_SET_ITEM(__pyx_t_9, 1, __pyx_n_s_NORM_PATHS_AND_BASE_CONTAINER); + __pyx_t_11 = __Pyx_Import(__pyx_n_s_pydevd_file_utils, __pyx_t_9, -1); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 917, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_11); + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + __pyx_t_9 = __Pyx_ImportFrom(__pyx_t_11, __pyx_n_s_get_abs_path_real_path_and_base); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 917, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_get_abs_path_real_path_and_base, __pyx_t_9) < 0) __PYX_ERR(0, 917, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + __pyx_t_9 = __Pyx_ImportFrom(__pyx_t_11, __pyx_n_s_NORM_PATHS_AND_BASE_CONTAINER); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 917, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_NORM_PATHS_AND_BASE_CONTAINER, __pyx_t_9) < 0) __PYX_ERR(0, 917, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":918 + * from _pydevd_bundle.pydevd_kill_all_pydevd_threads import kill_all_pydev_threads + * from pydevd_file_utils import get_abs_path_real_path_and_base_from_frame, NORM_PATHS_AND_BASE_CONTAINER + * from pydevd_tracing import SetTrace # <<<<<<<<<<<<<< + * # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + * # In Cython, PyDBAdditionalThreadInfo is bundled in the file. + */ + __pyx_t_11 = PyList_New(1); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 918, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_11); + __Pyx_INCREF(__pyx_n_s_SetTrace); + __Pyx_GIVEREF(__pyx_n_s_SetTrace); + PyList_SET_ITEM(__pyx_t_11, 0, __pyx_n_s_SetTrace); + __pyx_t_9 = __Pyx_Import(__pyx_n_s_pydevd_tracing, __pyx_t_11, -1); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 918, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; + __pyx_t_11 = __Pyx_ImportFrom(__pyx_t_9, __pyx_n_s_SetTrace); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 918, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_11); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_SetTrace, __pyx_t_11) < 0) __PYX_ERR(0, 918, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":928 + * # ENDIF + * + * try: # <<<<<<<<<<<<<< + * from _pydevd_bundle.pydevd_signature import send_signature_call_trace + * except ImportError: + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_7, &__pyx_t_6, &__pyx_t_5); + __Pyx_XGOTREF(__pyx_t_7); + __Pyx_XGOTREF(__pyx_t_6); + __Pyx_XGOTREF(__pyx_t_5); + /*try:*/ { + + /* "_pydevd_bundle/pydevd_cython.pyx":929 + * + * try: + * from _pydevd_bundle.pydevd_signature import send_signature_call_trace # <<<<<<<<<<<<<< + * except ImportError: + * def send_signature_call_trace(*args, **kwargs): + */ + __pyx_t_9 = PyList_New(1); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 929, __pyx_L28_error) + __Pyx_GOTREF(__pyx_t_9); + __Pyx_INCREF(__pyx_n_s_send_signature_call_trace); + __Pyx_GIVEREF(__pyx_n_s_send_signature_call_trace); + PyList_SET_ITEM(__pyx_t_9, 0, __pyx_n_s_send_signature_call_trace); + __pyx_t_11 = __Pyx_Import(__pyx_n_s_pydevd_bundle_pydevd_signature, __pyx_t_9, -1); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 929, __pyx_L28_error) + __Pyx_GOTREF(__pyx_t_11); + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + __pyx_t_9 = __Pyx_ImportFrom(__pyx_t_11, __pyx_n_s_send_signature_call_trace); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 929, __pyx_L28_error) + __Pyx_GOTREF(__pyx_t_9); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_send_signature_call_trace, __pyx_t_9) < 0) __PYX_ERR(0, 929, __pyx_L28_error) + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":928 + * # ENDIF + * + * try: # <<<<<<<<<<<<<< + * from _pydevd_bundle.pydevd_signature import send_signature_call_trace + * except ImportError: + */ + } + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; + goto __pyx_L33_try_end; + __pyx_L28_error:; + __Pyx_PyThreadState_assign + __Pyx_XDECREF(__pyx_t_10); __pyx_t_10 = 0; + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_XDECREF(__pyx_t_11); __pyx_t_11 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":930 + * try: + * from _pydevd_bundle.pydevd_signature import send_signature_call_trace + * except ImportError: # <<<<<<<<<<<<<< + * def send_signature_call_trace(*args, **kwargs): + * pass + */ + __pyx_t_8 = __Pyx_PyErr_ExceptionMatches(__pyx_builtin_ImportError); + if (__pyx_t_8) { + __Pyx_AddTraceback("_pydevd_bundle.pydevd_cython", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_11, &__pyx_t_9, &__pyx_t_2) < 0) __PYX_ERR(0, 930, __pyx_L30_except_error) + __Pyx_GOTREF(__pyx_t_11); + __Pyx_GOTREF(__pyx_t_9); + __Pyx_GOTREF(__pyx_t_2); + + /* "_pydevd_bundle/pydevd_cython.pyx":931 + * from _pydevd_bundle.pydevd_signature import send_signature_call_trace + * except ImportError: + * def send_signature_call_trace(*args, **kwargs): # <<<<<<<<<<<<<< + * pass + * + */ + __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_14_pydevd_bundle_13pydevd_cython_11send_signature_call_trace, NULL, __pyx_n_s_pydevd_bundle_pydevd_cython); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 931, __pyx_L30_except_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_send_signature_call_trace, __pyx_t_1) < 0) __PYX_ERR(0, 931, __pyx_L30_except_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0; + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + goto __pyx_L29_exception_handled; + } + goto __pyx_L30_except_error; + __pyx_L30_except_error:; + + /* "_pydevd_bundle/pydevd_cython.pyx":928 + * # ENDIF + * + * try: # <<<<<<<<<<<<<< + * from _pydevd_bundle.pydevd_signature import send_signature_call_trace + * except ImportError: + */ + __Pyx_PyThreadState_assign + __Pyx_XGIVEREF(__pyx_t_7); + __Pyx_XGIVEREF(__pyx_t_6); + __Pyx_XGIVEREF(__pyx_t_5); + __Pyx_ExceptionReset(__pyx_t_7, __pyx_t_6, __pyx_t_5); + goto __pyx_L1_error; + __pyx_L29_exception_handled:; + __Pyx_PyThreadState_assign + __Pyx_XGIVEREF(__pyx_t_7); + __Pyx_XGIVEREF(__pyx_t_6); + __Pyx_XGIVEREF(__pyx_t_5); + __Pyx_ExceptionReset(__pyx_t_7, __pyx_t_6, __pyx_t_5); + __pyx_L33_try_end:; + } + + /* "_pydevd_bundle/pydevd_cython.pyx":934 + * pass + * + * threadingCurrentThread = threading.currentThread # <<<<<<<<<<<<<< + * get_file_type = DONT_TRACE.get + * + */ + __pyx_t_2 = __Pyx_GetModuleGlobalName(__pyx_n_s_threading); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 934, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_9 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_currentThread); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 934, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + if (PyDict_SetItem(__pyx_d, __pyx_n_s_threadingCurrentThread, __pyx_t_9) < 0) __PYX_ERR(0, 934, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":935 + * + * threadingCurrentThread = threading.currentThread + * get_file_type = DONT_TRACE.get # <<<<<<<<<<<<<< + * + * # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + */ + __pyx_t_9 = __Pyx_GetModuleGlobalName(__pyx_n_s_DONT_TRACE); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 935, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_9, __pyx_n_s_get); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 935, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + if (PyDict_SetItem(__pyx_d, __pyx_n_s_get_file_type, __pyx_t_2) < 0) __PYX_ERR(0, 935, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":948 + * # - Breakpoints are changed + * # It can be used when running regularly (without step over/step in/step return) + * global_cache_skips = {} # <<<<<<<<<<<<<< + * global_cache_frame_skips = {} + * + */ + __pyx_t_2 = PyDict_New(); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 948, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_global_cache_skips, __pyx_t_2) < 0) __PYX_ERR(0, 948, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":949 + * # It can be used when running regularly (without step over/step in/step return) + * global_cache_skips = {} + * global_cache_frame_skips = {} # <<<<<<<<<<<<<< + * + * def trace_dispatch(py_db, frame, event, arg): + */ + __pyx_t_2 = PyDict_New(); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 949, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_global_cache_frame_skips, __pyx_t_2) < 0) __PYX_ERR(0, 949, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":951 + * global_cache_frame_skips = {} + * + * def trace_dispatch(py_db, frame, event, arg): # <<<<<<<<<<<<<< + * t = threadingCurrentThread() + * + */ + __pyx_t_2 = PyCFunction_NewEx(&__pyx_mdef_14_pydevd_bundle_13pydevd_cython_13trace_dispatch, NULL, __pyx_n_s_pydevd_bundle_pydevd_cython); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 951, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_trace_dispatch, __pyx_t_2) < 0) __PYX_ERR(0, 951, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1121 + * + * + * if IS_IRONPYTHON: # <<<<<<<<<<<<<< + * # This is far from ideal, as we'll leak frames (we'll always have the last created frame, not really + * # the last topmost frame saved -- this should be Ok for our usage, but it may leak frames and things + */ + __pyx_t_2 = __Pyx_GetModuleGlobalName(__pyx_n_s_IS_IRONPYTHON); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1121, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_4 = __Pyx_PyObject_IsTrue(__pyx_t_2); if (unlikely(__pyx_t_4 < 0)) __PYX_ERR(0, 1121, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + if (__pyx_t_4) { + + /* "_pydevd_bundle/pydevd_cython.pyx":1129 + * # + * # See: https://github.com/IronLanguages/main/issues/1630 + * from _pydevd_bundle.pydevd_additional_thread_info_regular import _tid_to_last_frame # <<<<<<<<<<<<<< + * + * _original_call = ThreadTracer.__call__ + */ + __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1129, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_n_s_tid_to_last_frame); + __Pyx_GIVEREF(__pyx_n_s_tid_to_last_frame); + PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_tid_to_last_frame); + __pyx_t_9 = __Pyx_Import(__pyx_n_s_pydevd_bundle_pydevd_additional, __pyx_t_2, -1); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 1129, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_9, __pyx_n_s_tid_to_last_frame); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 1129, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_tid_to_last_frame, __pyx_t_2) < 0) __PYX_ERR(0, 1129, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1131 + * from _pydevd_bundle.pydevd_additional_thread_info_regular import _tid_to_last_frame + * + * _original_call = ThreadTracer.__call__ # <<<<<<<<<<<<<< + * + * def __call__(self, frame, event, arg): + */ + __pyx_t_9 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_ptype_14_pydevd_bundle_13pydevd_cython_ThreadTracer), __pyx_n_s_call_2); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 1131, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_original_call, __pyx_t_9) < 0) __PYX_ERR(0, 1131, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1133 + * _original_call = ThreadTracer.__call__ + * + * def __call__(self, frame, event, arg): # <<<<<<<<<<<<<< + * _tid_to_last_frame[self._args[1].ident] = frame + * return _original_call(self, frame, event, arg) + */ + __pyx_t_9 = PyCFunction_NewEx(&__pyx_mdef_14_pydevd_bundle_13pydevd_cython_15__call__, NULL, __pyx_n_s_pydevd_bundle_pydevd_cython); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 1133, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_call_2, __pyx_t_9) < 0) __PYX_ERR(0, 1133, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1137 + * return _original_call(self, frame, event, arg) + * + * ThreadTracer.__call__ = __call__ # <<<<<<<<<<<<<< + * + */ + __pyx_t_9 = __Pyx_GetModuleGlobalName(__pyx_n_s_call_2); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 1137, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + if (__Pyx_PyObject_SetAttrStr(((PyObject *)__pyx_ptype_14_pydevd_bundle_13pydevd_cython_ThreadTracer), __pyx_n_s_call_2, __pyx_t_9) < 0) __PYX_ERR(0, 1137, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1121 + * + * + * if IS_IRONPYTHON: # <<<<<<<<<<<<<< + * # This is far from ideal, as we'll leak frames (we'll always have the last created frame, not really + * # the last topmost frame saved -- this should be Ok for our usage, but it may leak frames and things + */ + } + + /* "(tree fragment)":1 + * def __pyx_unpickle_PyDBAdditionalThreadInfo(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< + * if __pyx_checksum != 0xa9a4341: + * from pickle import PickleError + */ + __pyx_t_9 = PyCFunction_NewEx(&__pyx_mdef_14_pydevd_bundle_13pydevd_cython_17__pyx_unpickle_PyDBAdditionalThreadInfo, NULL, __pyx_n_s_pydevd_bundle_pydevd_cython); if (unlikely(!__pyx_t_9)) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_pyx_unpickle_PyDBAdditionalThr, __pyx_t_9) < 0) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + + /* "(tree fragment)":9 + * __pyx_unpickle_PyDBAdditionalThreadInfo__set_state( result, __pyx_state) + * return result + * cdef __pyx_unpickle_PyDBAdditionalThreadInfo__set_state(PyDBAdditionalThreadInfo result, tuple __pyx_state): # <<<<<<<<<<<<<< + * result.conditional_breakpoint_exception = __pyx_state[0]; result.is_tracing = __pyx_state[1]; result.pydev_call_from_jinja2 = __pyx_state[2]; result.pydev_call_inside_jinja2 = __pyx_state[3]; result.pydev_django_resolve_frame = __pyx_state[4]; result.pydev_func_name = __pyx_state[5]; result.pydev_message = __pyx_state[6]; result.pydev_next_line = __pyx_state[7]; result.pydev_notify_kill = __pyx_state[8]; result.pydev_smart_step_stop = __pyx_state[9]; result.pydev_state = __pyx_state[10]; result.pydev_step_cmd = __pyx_state[11]; result.pydev_step_stop = __pyx_state[12]; result.suspend_type = __pyx_state[13] + * if hasattr(result, '__dict__'): + */ + __pyx_t_9 = PyCFunction_NewEx(&__pyx_mdef_14_pydevd_bundle_13pydevd_cython_19__pyx_unpickle_PyDBFrame, NULL, __pyx_n_s_pydevd_bundle_pydevd_cython); if (unlikely(!__pyx_t_9)) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_pyx_unpickle_PyDBFrame, __pyx_t_9) < 0) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + + /* "(tree fragment)":1 + * def __pyx_unpickle_SafeCallWrapper(__pyx_type, long __pyx_checksum, __pyx_state): # <<<<<<<<<<<<<< + * if __pyx_checksum != 0x77c077b: + * from pickle import PickleError + */ + __pyx_t_9 = PyCFunction_NewEx(&__pyx_mdef_14_pydevd_bundle_13pydevd_cython_21__pyx_unpickle_SafeCallWrapper, NULL, __pyx_n_s_pydevd_bundle_pydevd_cython); if (unlikely(!__pyx_t_9)) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_pyx_unpickle_SafeCallWrapper, __pyx_t_9) < 0) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + + /* "(tree fragment)":9 + * __pyx_unpickle_SafeCallWrapper__set_state( result, __pyx_state) + * return result + * cdef __pyx_unpickle_SafeCallWrapper__set_state(SafeCallWrapper result, tuple __pyx_state): # <<<<<<<<<<<<<< + * result.method_object = __pyx_state[0] + * if hasattr(result, '__dict__'): + */ + __pyx_t_9 = PyCFunction_NewEx(&__pyx_mdef_14_pydevd_bundle_13pydevd_cython_23__pyx_unpickle_ThreadTracer, NULL, __pyx_n_s_pydevd_bundle_pydevd_cython); if (unlikely(!__pyx_t_9)) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_pyx_unpickle_ThreadTracer, __pyx_t_9) < 0) __PYX_ERR(1, 1, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + + /* "_pydevd_bundle/pydevd_cython.pyx":1 + * # Important: Autogenerated file. # <<<<<<<<<<<<<< + * + * # DO NOT edit manually! + */ + __pyx_t_9 = PyDict_New(); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_9); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_test, __pyx_t_9) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; + + /*--- Wrapped vars code ---*/ + + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_9); + __Pyx_XDECREF(__pyx_t_10); + __Pyx_XDECREF(__pyx_t_11); + if (__pyx_m) { + if (__pyx_d) { + __Pyx_AddTraceback("init _pydevd_bundle.pydevd_cython", 0, __pyx_lineno, __pyx_filename); + } + Py_DECREF(__pyx_m); __pyx_m = 0; + } else if (!PyErr_Occurred()) { + PyErr_SetString(PyExc_ImportError, "init _pydevd_bundle.pydevd_cython"); + } + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + #if PY_MAJOR_VERSION < 3 + return; + #else + return __pyx_m; + #endif +} + +/* --- Runtime support code --- */ +/* Refnanny */ +#if CYTHON_REFNANNY +static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname) { + PyObject *m = NULL, *p = NULL; + void *r = NULL; + m = PyImport_ImportModule((char *)modname); + if (!m) goto end; + p = PyObject_GetAttrString(m, (char *)"RefNannyAPI"); + if (!p) goto end; + r = PyLong_AsVoidPtr(p); +end: + Py_XDECREF(p); + Py_XDECREF(m); + return (__Pyx_RefNannyAPIStruct *)r; +} +#endif + +/* GetBuiltinName */ +static PyObject *__Pyx_GetBuiltinName(PyObject *name) { + PyObject* result = __Pyx_PyObject_GetAttrStr(__pyx_b, name); + if (unlikely(!result)) { + PyErr_Format(PyExc_NameError, +#if PY_MAJOR_VERSION >= 3 + "name '%U' is not defined", name); +#else + "name '%.200s' is not defined", PyString_AS_STRING(name)); +#endif + } + return result; +} + +/* GetModuleGlobalName */ +static CYTHON_INLINE PyObject *__Pyx_GetModuleGlobalName(PyObject *name) { + PyObject *result; +#if !CYTHON_AVOID_BORROWED_REFS + result = PyDict_GetItem(__pyx_d, name); + if (likely(result)) { + Py_INCREF(result); + } else { +#else + result = PyObject_GetItem(__pyx_d, name); + if (!result) { + PyErr_Clear(); +#endif + result = __Pyx_GetBuiltinName(name); + } + return result; +} + +/* PyCFunctionFastCall */ + #if CYTHON_FAST_PYCCALL +static CYTHON_INLINE PyObject * __Pyx_PyCFunction_FastCall(PyObject *func_obj, PyObject **args, Py_ssize_t nargs) { + PyCFunctionObject *func = (PyCFunctionObject*)func_obj; + PyCFunction meth = PyCFunction_GET_FUNCTION(func); + PyObject *self = PyCFunction_GET_SELF(func); + int flags = PyCFunction_GET_FLAGS(func); + assert(PyCFunction_Check(func)); + assert(METH_FASTCALL == (flags & ~(METH_CLASS | METH_STATIC | METH_COEXIST | METH_KEYWORDS))); + assert(nargs >= 0); + assert(nargs == 0 || args != NULL); + /* _PyCFunction_FastCallDict() must not be called with an exception set, + because it may clear it (directly or indirectly) and so the + caller loses its exception */ + assert(!PyErr_Occurred()); + if ((PY_VERSION_HEX < 0x030700A0) || unlikely(flags & METH_KEYWORDS)) { + return (*((__Pyx_PyCFunctionFastWithKeywords)meth)) (self, args, nargs, NULL); + } else { + return (*((__Pyx_PyCFunctionFast)meth)) (self, args, nargs); + } +} +#endif + +/* PyFunctionFastCall */ + #if CYTHON_FAST_PYCALL +#include "frameobject.h" +static PyObject* __Pyx_PyFunction_FastCallNoKw(PyCodeObject *co, PyObject **args, Py_ssize_t na, + PyObject *globals) { + PyFrameObject *f; + PyThreadState *tstate = PyThreadState_GET(); + PyObject **fastlocals; + Py_ssize_t i; + PyObject *result; + assert(globals != NULL); + /* XXX Perhaps we should create a specialized + PyFrame_New() that doesn't take locals, but does + take builtins without sanity checking them. + */ + assert(tstate != NULL); + f = PyFrame_New(tstate, co, globals, NULL); + if (f == NULL) { + return NULL; + } + fastlocals = f->f_localsplus; + for (i = 0; i < na; i++) { + Py_INCREF(*args); + fastlocals[i] = *args++; + } + result = PyEval_EvalFrameEx(f,0); + ++tstate->recursion_depth; + Py_DECREF(f); + --tstate->recursion_depth; + return result; +} +#if 1 || PY_VERSION_HEX < 0x030600B1 +static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, int nargs, PyObject *kwargs) { + PyCodeObject *co = (PyCodeObject *)PyFunction_GET_CODE(func); + PyObject *globals = PyFunction_GET_GLOBALS(func); + PyObject *argdefs = PyFunction_GET_DEFAULTS(func); + PyObject *closure; +#if PY_MAJOR_VERSION >= 3 + PyObject *kwdefs; +#endif + PyObject *kwtuple, **k; + PyObject **d; + Py_ssize_t nd; + Py_ssize_t nk; + PyObject *result; + assert(kwargs == NULL || PyDict_Check(kwargs)); + nk = kwargs ? PyDict_Size(kwargs) : 0; + if (Py_EnterRecursiveCall((char*)" while calling a Python object")) { + return NULL; + } + if ( +#if PY_MAJOR_VERSION >= 3 + co->co_kwonlyargcount == 0 && +#endif + likely(kwargs == NULL || nk == 0) && + co->co_flags == (CO_OPTIMIZED | CO_NEWLOCALS | CO_NOFREE)) { + if (argdefs == NULL && co->co_argcount == nargs) { + result = __Pyx_PyFunction_FastCallNoKw(co, args, nargs, globals); + goto done; + } + else if (nargs == 0 && argdefs != NULL + && co->co_argcount == Py_SIZE(argdefs)) { + /* function called with no arguments, but all parameters have + a default value: use default values as arguments .*/ + args = &PyTuple_GET_ITEM(argdefs, 0); + result =__Pyx_PyFunction_FastCallNoKw(co, args, Py_SIZE(argdefs), globals); + goto done; + } + } + if (kwargs != NULL) { + Py_ssize_t pos, i; + kwtuple = PyTuple_New(2 * nk); + if (kwtuple == NULL) { + result = NULL; + goto done; + } + k = &PyTuple_GET_ITEM(kwtuple, 0); + pos = i = 0; + while (PyDict_Next(kwargs, &pos, &k[i], &k[i+1])) { + Py_INCREF(k[i]); + Py_INCREF(k[i+1]); + i += 2; + } + nk = i / 2; + } + else { + kwtuple = NULL; + k = NULL; + } + closure = PyFunction_GET_CLOSURE(func); +#if PY_MAJOR_VERSION >= 3 + kwdefs = PyFunction_GET_KW_DEFAULTS(func); +#endif + if (argdefs != NULL) { + d = &PyTuple_GET_ITEM(argdefs, 0); + nd = Py_SIZE(argdefs); + } + else { + d = NULL; + nd = 0; + } +#if PY_MAJOR_VERSION >= 3 + result = PyEval_EvalCodeEx((PyObject*)co, globals, (PyObject *)NULL, + args, nargs, + k, (int)nk, + d, (int)nd, kwdefs, closure); +#else + result = PyEval_EvalCodeEx(co, globals, (PyObject *)NULL, + args, nargs, + k, (int)nk, + d, (int)nd, closure); +#endif + Py_XDECREF(kwtuple); +done: + Py_LeaveRecursiveCall(); + return result; +} +#endif +#endif + +/* PyObjectCall */ + #if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw) { + PyObject *result; + ternaryfunc call = func->ob_type->tp_call; + if (unlikely(!call)) + return PyObject_Call(func, arg, kw); + if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) + return NULL; + result = (*call)(func, arg, kw); + Py_LeaveRecursiveCall(); + if (unlikely(!result) && unlikely(!PyErr_Occurred())) { + PyErr_SetString( + PyExc_SystemError, + "NULL result without error in PyObject_Call"); + } + return result; +} +#endif + +/* PyObjectCallMethO */ + #if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg) { + PyObject *self, *result; + PyCFunction cfunc; + cfunc = PyCFunction_GET_FUNCTION(func); + self = PyCFunction_GET_SELF(func); + if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) + return NULL; + result = cfunc(self, arg); + Py_LeaveRecursiveCall(); + if (unlikely(!result) && unlikely(!PyErr_Occurred())) { + PyErr_SetString( + PyExc_SystemError, + "NULL result without error in PyObject_Call"); + } + return result; +} +#endif + +/* PyObjectCallOneArg */ + #if CYTHON_COMPILING_IN_CPYTHON +static PyObject* __Pyx__PyObject_CallOneArg(PyObject *func, PyObject *arg) { + PyObject *result; + PyObject *args = PyTuple_New(1); + if (unlikely(!args)) return NULL; + Py_INCREF(arg); + PyTuple_SET_ITEM(args, 0, arg); + result = __Pyx_PyObject_Call(func, args, NULL); + Py_DECREF(args); + return result; +} +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg) { +#if CYTHON_FAST_PYCALL + if (PyFunction_Check(func)) { + return __Pyx_PyFunction_FastCall(func, &arg, 1); + } +#endif + if (likely(PyCFunction_Check(func))) { + if (likely(PyCFunction_GET_FLAGS(func) & METH_O)) { + return __Pyx_PyObject_CallMethO(func, arg); +#if CYTHON_FAST_PYCCALL + } else if (PyCFunction_GET_FLAGS(func) & METH_FASTCALL) { + return __Pyx_PyCFunction_FastCall(func, &arg, 1); +#endif + } + } + return __Pyx__PyObject_CallOneArg(func, arg); +} +#else +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg) { + PyObject *result; + PyObject *args = PyTuple_Pack(1, arg); + if (unlikely(!args)) return NULL; + result = __Pyx_PyObject_Call(func, args, NULL); + Py_DECREF(args); + return result; +} +#endif + +/* PyObjectCallNoArg */ + #if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallNoArg(PyObject *func) { +#if CYTHON_FAST_PYCALL + if (PyFunction_Check(func)) { + return __Pyx_PyFunction_FastCall(func, NULL, 0); + } +#endif +#ifdef __Pyx_CyFunction_USED + if (likely(PyCFunction_Check(func) || PyObject_TypeCheck(func, __pyx_CyFunctionType))) { +#else + if (likely(PyCFunction_Check(func))) { +#endif + if (likely(PyCFunction_GET_FLAGS(func) & METH_NOARGS)) { + return __Pyx_PyObject_CallMethO(func, NULL); + } + } + return __Pyx_PyObject_Call(func, __pyx_empty_tuple, NULL); +} +#endif + +/* RaiseArgTupleInvalid */ + static void __Pyx_RaiseArgtupleInvalid( + const char* func_name, + int exact, + Py_ssize_t num_min, + Py_ssize_t num_max, + Py_ssize_t num_found) +{ + Py_ssize_t num_expected; + const char *more_or_less; + if (num_found < num_min) { + num_expected = num_min; + more_or_less = "at least"; + } else { + num_expected = num_max; + more_or_less = "at most"; + } + if (exact) { + more_or_less = "exactly"; + } + PyErr_Format(PyExc_TypeError, + "%.200s() takes %.8s %" CYTHON_FORMAT_SSIZE_T "d positional argument%.1s (%" CYTHON_FORMAT_SSIZE_T "d given)", + func_name, more_or_less, num_expected, + (num_expected == 1) ? "" : "s", num_found); +} + +/* KeywordStringCheck */ + static CYTHON_INLINE int __Pyx_CheckKeywordStrings( + PyObject *kwdict, + const char* function_name, + int kw_allowed) +{ + PyObject* key = 0; + Py_ssize_t pos = 0; +#if CYTHON_COMPILING_IN_PYPY + if (!kw_allowed && PyDict_Next(kwdict, &pos, &key, 0)) + goto invalid_keyword; + return 1; +#else + while (PyDict_Next(kwdict, &pos, &key, 0)) { + #if PY_MAJOR_VERSION < 3 + if (unlikely(!PyString_CheckExact(key)) && unlikely(!PyString_Check(key))) + #endif + if (unlikely(!PyUnicode_Check(key))) + goto invalid_keyword_type; + } + if ((!kw_allowed) && unlikely(key)) + goto invalid_keyword; + return 1; +invalid_keyword_type: + PyErr_Format(PyExc_TypeError, + "%.200s() keywords must be strings", function_name); + return 0; +#endif +invalid_keyword: + PyErr_Format(PyExc_TypeError, + #if PY_MAJOR_VERSION < 3 + "%.200s() got an unexpected keyword argument '%.200s'", + function_name, PyString_AsString(key)); + #else + "%s() got an unexpected keyword argument '%U'", + function_name, key); + #endif + return 0; +} + +/* GetAttr */ + static CYTHON_INLINE PyObject *__Pyx_GetAttr(PyObject *o, PyObject *n) { +#if CYTHON_COMPILING_IN_CPYTHON +#if PY_MAJOR_VERSION >= 3 + if (likely(PyUnicode_Check(n))) +#else + if (likely(PyString_Check(n))) +#endif + return __Pyx_PyObject_GetAttrStr(o, n); +#endif + return PyObject_GetAttr(o, n); +} + +/* GetAttr3 */ + static CYTHON_INLINE PyObject *__Pyx_GetAttr3(PyObject *o, PyObject *n, PyObject *d) { + PyObject *r = __Pyx_GetAttr(o, n); + if (unlikely(!r)) { + if (!PyErr_ExceptionMatches(PyExc_AttributeError)) + goto bad; + PyErr_Clear(); + r = d; + Py_INCREF(d); + } + return r; +bad: + return NULL; +} + +/* RaiseDoubleKeywords */ + static void __Pyx_RaiseDoubleKeywordsError( + const char* func_name, + PyObject* kw_name) +{ + PyErr_Format(PyExc_TypeError, + #if PY_MAJOR_VERSION >= 3 + "%s() got multiple values for keyword argument '%U'", func_name, kw_name); + #else + "%s() got multiple values for keyword argument '%s'", func_name, + PyString_AsString(kw_name)); + #endif +} + +/* ParseKeywords */ + static int __Pyx_ParseOptionalKeywords( + PyObject *kwds, + PyObject **argnames[], + PyObject *kwds2, + PyObject *values[], + Py_ssize_t num_pos_args, + const char* function_name) +{ + PyObject *key = 0, *value = 0; + Py_ssize_t pos = 0; + PyObject*** name; + PyObject*** first_kw_arg = argnames + num_pos_args; + while (PyDict_Next(kwds, &pos, &key, &value)) { + name = first_kw_arg; + while (*name && (**name != key)) name++; + if (*name) { + values[name-argnames] = value; + continue; + } + name = first_kw_arg; + #if PY_MAJOR_VERSION < 3 + if (likely(PyString_CheckExact(key)) || likely(PyString_Check(key))) { + while (*name) { + if ((CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**name) == PyString_GET_SIZE(key)) + && _PyString_Eq(**name, key)) { + values[name-argnames] = value; + break; + } + name++; + } + if (*name) continue; + else { + PyObject*** argname = argnames; + while (argname != first_kw_arg) { + if ((**argname == key) || ( + (CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**argname) == PyString_GET_SIZE(key)) + && _PyString_Eq(**argname, key))) { + goto arg_passed_twice; + } + argname++; + } + } + } else + #endif + if (likely(PyUnicode_Check(key))) { + while (*name) { + int cmp = (**name == key) ? 0 : + #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 + (PyUnicode_GET_SIZE(**name) != PyUnicode_GET_SIZE(key)) ? 1 : + #endif + PyUnicode_Compare(**name, key); + if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; + if (cmp == 0) { + values[name-argnames] = value; + break; + } + name++; + } + if (*name) continue; + else { + PyObject*** argname = argnames; + while (argname != first_kw_arg) { + int cmp = (**argname == key) ? 0 : + #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 + (PyUnicode_GET_SIZE(**argname) != PyUnicode_GET_SIZE(key)) ? 1 : + #endif + PyUnicode_Compare(**argname, key); + if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; + if (cmp == 0) goto arg_passed_twice; + argname++; + } + } + } else + goto invalid_keyword_type; + if (kwds2) { + if (unlikely(PyDict_SetItem(kwds2, key, value))) goto bad; + } else { + goto invalid_keyword; + } + } + return 0; +arg_passed_twice: + __Pyx_RaiseDoubleKeywordsError(function_name, key); + goto bad; +invalid_keyword_type: + PyErr_Format(PyExc_TypeError, + "%.200s() keywords must be strings", function_name); + goto bad; +invalid_keyword: + PyErr_Format(PyExc_TypeError, + #if PY_MAJOR_VERSION < 3 + "%.200s() got an unexpected keyword argument '%.200s'", + function_name, PyString_AsString(key)); + #else + "%s() got an unexpected keyword argument '%U'", + function_name, key); + #endif +bad: + return -1; +} + +/* SaveResetException */ + #if CYTHON_FAST_THREAD_STATE +static CYTHON_INLINE void __Pyx__ExceptionSave(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { + *type = tstate->exc_type; + *value = tstate->exc_value; + *tb = tstate->exc_traceback; + Py_XINCREF(*type); + Py_XINCREF(*value); + Py_XINCREF(*tb); +} +static CYTHON_INLINE void __Pyx__ExceptionReset(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) { + PyObject *tmp_type, *tmp_value, *tmp_tb; + tmp_type = tstate->exc_type; + tmp_value = tstate->exc_value; + tmp_tb = tstate->exc_traceback; + tstate->exc_type = type; + tstate->exc_value = value; + tstate->exc_traceback = tb; + Py_XDECREF(tmp_type); + Py_XDECREF(tmp_value); + Py_XDECREF(tmp_tb); +} +#endif + +/* GetException */ + #if CYTHON_FAST_THREAD_STATE +static int __Pyx__GetException(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { +#else +static int __Pyx_GetException(PyObject **type, PyObject **value, PyObject **tb) { +#endif + PyObject *local_type, *local_value, *local_tb; +#if CYTHON_FAST_THREAD_STATE + PyObject *tmp_type, *tmp_value, *tmp_tb; + local_type = tstate->curexc_type; + local_value = tstate->curexc_value; + local_tb = tstate->curexc_traceback; + tstate->curexc_type = 0; + tstate->curexc_value = 0; + tstate->curexc_traceback = 0; +#else + PyErr_Fetch(&local_type, &local_value, &local_tb); +#endif + PyErr_NormalizeException(&local_type, &local_value, &local_tb); +#if CYTHON_FAST_THREAD_STATE + if (unlikely(tstate->curexc_type)) +#else + if (unlikely(PyErr_Occurred())) +#endif + goto bad; + #if PY_MAJOR_VERSION >= 3 + if (local_tb) { + if (unlikely(PyException_SetTraceback(local_value, local_tb) < 0)) + goto bad; + } + #endif + Py_XINCREF(local_tb); + Py_XINCREF(local_type); + Py_XINCREF(local_value); + *type = local_type; + *value = local_value; + *tb = local_tb; +#if CYTHON_FAST_THREAD_STATE + tmp_type = tstate->exc_type; + tmp_value = tstate->exc_value; + tmp_tb = tstate->exc_traceback; + tstate->exc_type = local_type; + tstate->exc_value = local_value; + tstate->exc_traceback = local_tb; + Py_XDECREF(tmp_type); + Py_XDECREF(tmp_value); + Py_XDECREF(tmp_tb); +#else + PyErr_SetExcInfo(local_type, local_value, local_tb); +#endif + return 0; +bad: + *type = 0; + *value = 0; + *tb = 0; + Py_XDECREF(local_type); + Py_XDECREF(local_value); + Py_XDECREF(local_tb); + return -1; +} + +/* HasAttr */ + static CYTHON_INLINE int __Pyx_HasAttr(PyObject *o, PyObject *n) { + PyObject *r; + if (unlikely(!__Pyx_PyBaseString_Check(n))) { + PyErr_SetString(PyExc_TypeError, + "hasattr(): attribute name must be string"); + return -1; + } + r = __Pyx_GetAttr(o, n); + if (unlikely(!r)) { + PyErr_Clear(); + return 0; + } else { + Py_DECREF(r); + return 1; + } +} + +/* RaiseTooManyValuesToUnpack */ + static CYTHON_INLINE void __Pyx_RaiseTooManyValuesError(Py_ssize_t expected) { + PyErr_Format(PyExc_ValueError, + "too many values to unpack (expected %" CYTHON_FORMAT_SSIZE_T "d)", expected); +} + +/* RaiseNeedMoreValuesToUnpack */ + static CYTHON_INLINE void __Pyx_RaiseNeedMoreValuesError(Py_ssize_t index) { + PyErr_Format(PyExc_ValueError, + "need more than %" CYTHON_FORMAT_SSIZE_T "d value%.1s to unpack", + index, (index == 1) ? "" : "s"); +} + +/* IterFinish */ + static CYTHON_INLINE int __Pyx_IterFinish(void) { +#if CYTHON_FAST_THREAD_STATE + PyThreadState *tstate = PyThreadState_GET(); + PyObject* exc_type = tstate->curexc_type; + if (unlikely(exc_type)) { + if (likely(exc_type == PyExc_StopIteration) || PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration)) { + PyObject *exc_value, *exc_tb; + exc_value = tstate->curexc_value; + exc_tb = tstate->curexc_traceback; + tstate->curexc_type = 0; + tstate->curexc_value = 0; + tstate->curexc_traceback = 0; + Py_DECREF(exc_type); + Py_XDECREF(exc_value); + Py_XDECREF(exc_tb); + return 0; + } else { + return -1; + } + } + return 0; +#else + if (unlikely(PyErr_Occurred())) { + if (likely(PyErr_ExceptionMatches(PyExc_StopIteration))) { + PyErr_Clear(); + return 0; + } else { + return -1; + } + } + return 0; +#endif +} + +/* UnpackItemEndCheck */ + static int __Pyx_IternextUnpackEndCheck(PyObject *retval, Py_ssize_t expected) { + if (unlikely(retval)) { + Py_DECREF(retval); + __Pyx_RaiseTooManyValuesError(expected); + return -1; + } else { + return __Pyx_IterFinish(); + } + return 0; +} + +/* StringJoin */ + #if !CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyBytes_Join(PyObject* sep, PyObject* values) { + return PyObject_CallMethodObjArgs(sep, __pyx_n_s_join, values, NULL); +} +#endif + +/* PyErrFetchRestore */ + #if CYTHON_FAST_THREAD_STATE +static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) { + PyObject *tmp_type, *tmp_value, *tmp_tb; + tmp_type = tstate->curexc_type; + tmp_value = tstate->curexc_value; + tmp_tb = tstate->curexc_traceback; + tstate->curexc_type = type; + tstate->curexc_value = value; + tstate->curexc_traceback = tb; + Py_XDECREF(tmp_type); + Py_XDECREF(tmp_value); + Py_XDECREF(tmp_tb); +} +static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { + *type = tstate->curexc_type; + *value = tstate->curexc_value; + *tb = tstate->curexc_traceback; + tstate->curexc_type = 0; + tstate->curexc_value = 0; + tstate->curexc_traceback = 0; +} +#endif + +/* SwapException */ + #if CYTHON_FAST_THREAD_STATE +static CYTHON_INLINE void __Pyx__ExceptionSwap(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { + PyObject *tmp_type, *tmp_value, *tmp_tb; + tmp_type = tstate->exc_type; + tmp_value = tstate->exc_value; + tmp_tb = tstate->exc_traceback; + tstate->exc_type = *type; + tstate->exc_value = *value; + tstate->exc_traceback = *tb; + *type = tmp_type; + *value = tmp_value; + *tb = tmp_tb; +} +#else +static CYTHON_INLINE void __Pyx_ExceptionSwap(PyObject **type, PyObject **value, PyObject **tb) { + PyObject *tmp_type, *tmp_value, *tmp_tb; + PyErr_GetExcInfo(&tmp_type, &tmp_value, &tmp_tb); + PyErr_SetExcInfo(*type, *value, *tb); + *type = tmp_type; + *value = tmp_value; + *tb = tmp_tb; +} +#endif + +/* GetItemInt */ + static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Generic(PyObject *o, PyObject* j) { + PyObject *r; + if (!j) return NULL; + r = PyObject_GetItem(o, j); + Py_DECREF(j); + return r; +} +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_List_Fast(PyObject *o, Py_ssize_t i, + CYTHON_NCP_UNUSED int wraparound, + CYTHON_NCP_UNUSED int boundscheck) { +#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + Py_ssize_t wrapped_i = i; + if (wraparound & unlikely(i < 0)) { + wrapped_i += PyList_GET_SIZE(o); + } + if ((!boundscheck) || likely((0 <= wrapped_i) & (wrapped_i < PyList_GET_SIZE(o)))) { + PyObject *r = PyList_GET_ITEM(o, wrapped_i); + Py_INCREF(r); + return r; + } + return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); +#else + return PySequence_GetItem(o, i); +#endif +} +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Tuple_Fast(PyObject *o, Py_ssize_t i, + CYTHON_NCP_UNUSED int wraparound, + CYTHON_NCP_UNUSED int boundscheck) { +#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + Py_ssize_t wrapped_i = i; + if (wraparound & unlikely(i < 0)) { + wrapped_i += PyTuple_GET_SIZE(o); + } + if ((!boundscheck) || likely((0 <= wrapped_i) & (wrapped_i < PyTuple_GET_SIZE(o)))) { + PyObject *r = PyTuple_GET_ITEM(o, wrapped_i); + Py_INCREF(r); + return r; + } + return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); +#else + return PySequence_GetItem(o, i); +#endif +} +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Fast(PyObject *o, Py_ssize_t i, int is_list, + CYTHON_NCP_UNUSED int wraparound, + CYTHON_NCP_UNUSED int boundscheck) { +#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS && CYTHON_USE_TYPE_SLOTS + if (is_list || PyList_CheckExact(o)) { + Py_ssize_t n = ((!wraparound) | likely(i >= 0)) ? i : i + PyList_GET_SIZE(o); + if ((!boundscheck) || (likely((n >= 0) & (n < PyList_GET_SIZE(o))))) { + PyObject *r = PyList_GET_ITEM(o, n); + Py_INCREF(r); + return r; + } + } + else if (PyTuple_CheckExact(o)) { + Py_ssize_t n = ((!wraparound) | likely(i >= 0)) ? i : i + PyTuple_GET_SIZE(o); + if ((!boundscheck) || likely((n >= 0) & (n < PyTuple_GET_SIZE(o)))) { + PyObject *r = PyTuple_GET_ITEM(o, n); + Py_INCREF(r); + return r; + } + } else { + PySequenceMethods *m = Py_TYPE(o)->tp_as_sequence; + if (likely(m && m->sq_item)) { + if (wraparound && unlikely(i < 0) && likely(m->sq_length)) { + Py_ssize_t l = m->sq_length(o); + if (likely(l >= 0)) { + i += l; + } else { + if (!PyErr_ExceptionMatches(PyExc_OverflowError)) + return NULL; + PyErr_Clear(); + } + } + return m->sq_item(o, i); + } + } +#else + if (is_list || PySequence_Check(o)) { + return PySequence_GetItem(o, i); + } +#endif + return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); +} + +/* None */ + static CYTHON_INLINE void __Pyx_RaiseUnboundLocalError(const char *varname) { + PyErr_Format(PyExc_UnboundLocalError, "local variable '%s' referenced before assignment", varname); +} + +/* ArgTypeTest */ + static void __Pyx_RaiseArgumentTypeInvalid(const char* name, PyObject *obj, PyTypeObject *type) { + PyErr_Format(PyExc_TypeError, + "Argument '%.200s' has incorrect type (expected %.200s, got %.200s)", + name, type->tp_name, Py_TYPE(obj)->tp_name); +} +static CYTHON_INLINE int __Pyx_ArgTypeTest(PyObject *obj, PyTypeObject *type, int none_allowed, + const char *name, int exact) +{ + if (unlikely(!type)) { + PyErr_SetString(PyExc_SystemError, "Missing type object"); + return 0; + } + if (none_allowed && obj == Py_None) return 1; + else if (exact) { + if (likely(Py_TYPE(obj) == type)) return 1; + #if PY_MAJOR_VERSION == 2 + else if ((type == &PyBaseString_Type) && likely(__Pyx_PyBaseString_CheckExact(obj))) return 1; + #endif + } + else { + if (likely(PyObject_TypeCheck(obj, type))) return 1; + } + __Pyx_RaiseArgumentTypeInvalid(name, obj, type); + return 0; +} + +/* BytesEquals */ + static CYTHON_INLINE int __Pyx_PyBytes_Equals(PyObject* s1, PyObject* s2, int equals) { +#if CYTHON_COMPILING_IN_PYPY + return PyObject_RichCompareBool(s1, s2, equals); +#else + if (s1 == s2) { + return (equals == Py_EQ); + } else if (PyBytes_CheckExact(s1) & PyBytes_CheckExact(s2)) { + const char *ps1, *ps2; + Py_ssize_t length = PyBytes_GET_SIZE(s1); + if (length != PyBytes_GET_SIZE(s2)) + return (equals == Py_NE); + ps1 = PyBytes_AS_STRING(s1); + ps2 = PyBytes_AS_STRING(s2); + if (ps1[0] != ps2[0]) { + return (equals == Py_NE); + } else if (length == 1) { + return (equals == Py_EQ); + } else { + int result; +#if CYTHON_USE_UNICODE_INTERNALS + Py_hash_t hash1, hash2; + hash1 = ((PyBytesObject*)s1)->ob_shash; + hash2 = ((PyBytesObject*)s2)->ob_shash; + if (hash1 != hash2 && hash1 != -1 && hash2 != -1) { + return (equals == Py_NE); + } +#endif + result = memcmp(ps1, ps2, (size_t)length); + return (equals == Py_EQ) ? (result == 0) : (result != 0); + } + } else if ((s1 == Py_None) & PyBytes_CheckExact(s2)) { + return (equals == Py_NE); + } else if ((s2 == Py_None) & PyBytes_CheckExact(s1)) { + return (equals == Py_NE); + } else { + int result; + PyObject* py_result = PyObject_RichCompare(s1, s2, equals); + if (!py_result) + return -1; + result = __Pyx_PyObject_IsTrue(py_result); + Py_DECREF(py_result); + return result; + } +#endif +} + +/* UnicodeEquals */ + static CYTHON_INLINE int __Pyx_PyUnicode_Equals(PyObject* s1, PyObject* s2, int equals) { +#if CYTHON_COMPILING_IN_PYPY + return PyObject_RichCompareBool(s1, s2, equals); +#else +#if PY_MAJOR_VERSION < 3 + PyObject* owned_ref = NULL; +#endif + int s1_is_unicode, s2_is_unicode; + if (s1 == s2) { + goto return_eq; + } + s1_is_unicode = PyUnicode_CheckExact(s1); + s2_is_unicode = PyUnicode_CheckExact(s2); +#if PY_MAJOR_VERSION < 3 + if ((s1_is_unicode & (!s2_is_unicode)) && PyString_CheckExact(s2)) { + owned_ref = PyUnicode_FromObject(s2); + if (unlikely(!owned_ref)) + return -1; + s2 = owned_ref; + s2_is_unicode = 1; + } else if ((s2_is_unicode & (!s1_is_unicode)) && PyString_CheckExact(s1)) { + owned_ref = PyUnicode_FromObject(s1); + if (unlikely(!owned_ref)) + return -1; + s1 = owned_ref; + s1_is_unicode = 1; + } else if (((!s2_is_unicode) & (!s1_is_unicode))) { + return __Pyx_PyBytes_Equals(s1, s2, equals); + } +#endif + if (s1_is_unicode & s2_is_unicode) { + Py_ssize_t length; + int kind; + void *data1, *data2; + if (unlikely(__Pyx_PyUnicode_READY(s1) < 0) || unlikely(__Pyx_PyUnicode_READY(s2) < 0)) + return -1; + length = __Pyx_PyUnicode_GET_LENGTH(s1); + if (length != __Pyx_PyUnicode_GET_LENGTH(s2)) { + goto return_ne; + } +#if CYTHON_USE_UNICODE_INTERNALS + { + Py_hash_t hash1, hash2; + #if CYTHON_PEP393_ENABLED + hash1 = ((PyASCIIObject*)s1)->hash; + hash2 = ((PyASCIIObject*)s2)->hash; + #else + hash1 = ((PyUnicodeObject*)s1)->hash; + hash2 = ((PyUnicodeObject*)s2)->hash; + #endif + if (hash1 != hash2 && hash1 != -1 && hash2 != -1) { + goto return_ne; + } + } +#endif + kind = __Pyx_PyUnicode_KIND(s1); + if (kind != __Pyx_PyUnicode_KIND(s2)) { + goto return_ne; + } + data1 = __Pyx_PyUnicode_DATA(s1); + data2 = __Pyx_PyUnicode_DATA(s2); + if (__Pyx_PyUnicode_READ(kind, data1, 0) != __Pyx_PyUnicode_READ(kind, data2, 0)) { + goto return_ne; + } else if (length == 1) { + goto return_eq; + } else { + int result = memcmp(data1, data2, (size_t)(length * kind)); + #if PY_MAJOR_VERSION < 3 + Py_XDECREF(owned_ref); + #endif + return (equals == Py_EQ) ? (result == 0) : (result != 0); + } + } else if ((s1 == Py_None) & s2_is_unicode) { + goto return_ne; + } else if ((s2 == Py_None) & s1_is_unicode) { + goto return_ne; + } else { + int result; + PyObject* py_result = PyObject_RichCompare(s1, s2, equals); + if (!py_result) + return -1; + result = __Pyx_PyObject_IsTrue(py_result); + Py_DECREF(py_result); + return result; + } +return_eq: + #if PY_MAJOR_VERSION < 3 + Py_XDECREF(owned_ref); + #endif + return (equals == Py_EQ); +return_ne: + #if PY_MAJOR_VERSION < 3 + Py_XDECREF(owned_ref); + #endif + return (equals == Py_NE); +#endif +} + +/* ExtTypeTest */ + static CYTHON_INLINE int __Pyx_TypeTest(PyObject *obj, PyTypeObject *type) { + if (unlikely(!type)) { + PyErr_SetString(PyExc_SystemError, "Missing type object"); + return 0; + } + if (likely(PyObject_TypeCheck(obj, type))) + return 1; + PyErr_Format(PyExc_TypeError, "Cannot convert %.200s to %.200s", + Py_TYPE(obj)->tp_name, type->tp_name); + return 0; +} + +/* RaiseNoneIterError */ + static CYTHON_INLINE void __Pyx_RaiseNoneNotIterableError(void) { + PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable"); +} + +/* PyIntBinop */ + #if !CYTHON_COMPILING_IN_PYPY +static PyObject* __Pyx_PyInt_AndObjC(PyObject *op1, PyObject *op2, CYTHON_UNUSED long intval, CYTHON_UNUSED int inplace) { + #if PY_MAJOR_VERSION < 3 + if (likely(PyInt_CheckExact(op1))) { + const long b = intval; + long a = PyInt_AS_LONG(op1); + return PyInt_FromLong(a & b); + } + #endif + #if CYTHON_USE_PYLONG_INTERNALS + if (likely(PyLong_CheckExact(op1))) { + const long b = intval; + long a, x; +#ifdef HAVE_LONG_LONG + const PY_LONG_LONG llb = intval; + PY_LONG_LONG lla, llx; +#endif + const digit* digits = ((PyLongObject*)op1)->ob_digit; + const Py_ssize_t size = Py_SIZE(op1); + if (likely(__Pyx_sst_abs(size) <= 1)) { + a = likely(size) ? digits[0] : 0; + if (size == -1) a = -a; + } else { + switch (size) { + case -2: + if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { + a = -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])); + break; +#ifdef HAVE_LONG_LONG + } else if (8 * sizeof(PY_LONG_LONG) - 1 > 2 * PyLong_SHIFT) { + lla = -(PY_LONG_LONG) (((((unsigned PY_LONG_LONG)digits[1]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[0])); + goto long_long; +#endif + } + case 2: + if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { + a = (long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])); + break; +#ifdef HAVE_LONG_LONG + } else if (8 * sizeof(PY_LONG_LONG) - 1 > 2 * PyLong_SHIFT) { + lla = (PY_LONG_LONG) (((((unsigned PY_LONG_LONG)digits[1]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[0])); + goto long_long; +#endif + } + case -3: + if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { + a = -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])); + break; +#ifdef HAVE_LONG_LONG + } else if (8 * sizeof(PY_LONG_LONG) - 1 > 3 * PyLong_SHIFT) { + lla = -(PY_LONG_LONG) (((((((unsigned PY_LONG_LONG)digits[2]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[1]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[0])); + goto long_long; +#endif + } + case 3: + if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { + a = (long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])); + break; +#ifdef HAVE_LONG_LONG + } else if (8 * sizeof(PY_LONG_LONG) - 1 > 3 * PyLong_SHIFT) { + lla = (PY_LONG_LONG) (((((((unsigned PY_LONG_LONG)digits[2]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[1]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[0])); + goto long_long; +#endif + } + case -4: + if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) { + a = -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])); + break; +#ifdef HAVE_LONG_LONG + } else if (8 * sizeof(PY_LONG_LONG) - 1 > 4 * PyLong_SHIFT) { + lla = -(PY_LONG_LONG) (((((((((unsigned PY_LONG_LONG)digits[3]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[2]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[1]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[0])); + goto long_long; +#endif + } + case 4: + if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) { + a = (long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])); + break; +#ifdef HAVE_LONG_LONG + } else if (8 * sizeof(PY_LONG_LONG) - 1 > 4 * PyLong_SHIFT) { + lla = (PY_LONG_LONG) (((((((((unsigned PY_LONG_LONG)digits[3]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[2]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[1]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[0])); + goto long_long; +#endif + } + default: return PyLong_Type.tp_as_number->nb_and(op1, op2); + } + } + x = a & b; + return PyLong_FromLong(x); +#ifdef HAVE_LONG_LONG + long_long: + llx = lla & llb; + return PyLong_FromLongLong(llx); +#endif + + + } + #endif + return (inplace ? PyNumber_InPlaceAnd : PyNumber_And)(op1, op2); +} +#endif + +/* dict_getitem_default */ + static PyObject* __Pyx_PyDict_GetItemDefault(PyObject* d, PyObject* key, PyObject* default_value) { + PyObject* value; +#if PY_MAJOR_VERSION >= 3 && !CYTHON_COMPILING_IN_PYPY + value = PyDict_GetItemWithError(d, key); + if (unlikely(!value)) { + if (unlikely(PyErr_Occurred())) + return NULL; + value = default_value; + } + Py_INCREF(value); +#else + if (PyString_CheckExact(key) || PyUnicode_CheckExact(key) || PyInt_CheckExact(key)) { + value = PyDict_GetItem(d, key); + if (unlikely(!value)) { + value = default_value; + } + Py_INCREF(value); + } else { + if (default_value == Py_None) + default_value = NULL; + value = PyObject_CallMethodObjArgs( + d, __pyx_n_s_get, key, default_value, NULL); + } +#endif + return value; +} + +/* PyErrExceptionMatches */ + #if CYTHON_FAST_THREAD_STATE +static CYTHON_INLINE int __Pyx_PyErr_ExceptionMatchesInState(PyThreadState* tstate, PyObject* err) { + PyObject *exc_type = tstate->curexc_type; + if (exc_type == err) return 1; + if (unlikely(!exc_type)) return 0; + return PyErr_GivenExceptionMatches(exc_type, err); +} +#endif + +/* RaiseException */ + #if PY_MAJOR_VERSION < 3 +static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, + CYTHON_UNUSED PyObject *cause) { + __Pyx_PyThreadState_declare + Py_XINCREF(type); + if (!value || value == Py_None) + value = NULL; + else + Py_INCREF(value); + if (!tb || tb == Py_None) + tb = NULL; + else { + Py_INCREF(tb); + if (!PyTraceBack_Check(tb)) { + PyErr_SetString(PyExc_TypeError, + "raise: arg 3 must be a traceback or None"); + goto raise_error; + } + } + if (PyType_Check(type)) { +#if CYTHON_COMPILING_IN_PYPY + if (!value) { + Py_INCREF(Py_None); + value = Py_None; + } +#endif + PyErr_NormalizeException(&type, &value, &tb); + } else { + if (value) { + PyErr_SetString(PyExc_TypeError, + "instance exception may not have a separate value"); + goto raise_error; + } + value = type; + type = (PyObject*) Py_TYPE(type); + Py_INCREF(type); + if (!PyType_IsSubtype((PyTypeObject *)type, (PyTypeObject *)PyExc_BaseException)) { + PyErr_SetString(PyExc_TypeError, + "raise: exception class must be a subclass of BaseException"); + goto raise_error; + } + } + __Pyx_PyThreadState_assign + __Pyx_ErrRestore(type, value, tb); + return; +raise_error: + Py_XDECREF(value); + Py_XDECREF(type); + Py_XDECREF(tb); + return; +} +#else +static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause) { + PyObject* owned_instance = NULL; + if (tb == Py_None) { + tb = 0; + } else if (tb && !PyTraceBack_Check(tb)) { + PyErr_SetString(PyExc_TypeError, + "raise: arg 3 must be a traceback or None"); + goto bad; + } + if (value == Py_None) + value = 0; + if (PyExceptionInstance_Check(type)) { + if (value) { + PyErr_SetString(PyExc_TypeError, + "instance exception may not have a separate value"); + goto bad; + } + value = type; + type = (PyObject*) Py_TYPE(value); + } else if (PyExceptionClass_Check(type)) { + PyObject *instance_class = NULL; + if (value && PyExceptionInstance_Check(value)) { + instance_class = (PyObject*) Py_TYPE(value); + if (instance_class != type) { + int is_subclass = PyObject_IsSubclass(instance_class, type); + if (!is_subclass) { + instance_class = NULL; + } else if (unlikely(is_subclass == -1)) { + goto bad; + } else { + type = instance_class; + } + } + } + if (!instance_class) { + PyObject *args; + if (!value) + args = PyTuple_New(0); + else if (PyTuple_Check(value)) { + Py_INCREF(value); + args = value; + } else + args = PyTuple_Pack(1, value); + if (!args) + goto bad; + owned_instance = PyObject_Call(type, args, NULL); + Py_DECREF(args); + if (!owned_instance) + goto bad; + value = owned_instance; + if (!PyExceptionInstance_Check(value)) { + PyErr_Format(PyExc_TypeError, + "calling %R should have returned an instance of " + "BaseException, not %R", + type, Py_TYPE(value)); + goto bad; + } + } + } else { + PyErr_SetString(PyExc_TypeError, + "raise: exception class must be a subclass of BaseException"); + goto bad; + } +#if PY_VERSION_HEX >= 0x03030000 + if (cause) { +#else + if (cause && cause != Py_None) { +#endif + PyObject *fixed_cause; + if (cause == Py_None) { + fixed_cause = NULL; + } else if (PyExceptionClass_Check(cause)) { + fixed_cause = PyObject_CallObject(cause, NULL); + if (fixed_cause == NULL) + goto bad; + } else if (PyExceptionInstance_Check(cause)) { + fixed_cause = cause; + Py_INCREF(fixed_cause); + } else { + PyErr_SetString(PyExc_TypeError, + "exception causes must derive from " + "BaseException"); + goto bad; + } + PyException_SetCause(value, fixed_cause); + } + PyErr_SetObject(type, value); + if (tb) { +#if CYTHON_COMPILING_IN_PYPY + PyObject *tmp_type, *tmp_value, *tmp_tb; + PyErr_Fetch(&tmp_type, &tmp_value, &tmp_tb); + Py_INCREF(tb); + PyErr_Restore(tmp_type, tmp_value, tb); + Py_XDECREF(tmp_tb); +#else + PyThreadState *tstate = PyThreadState_GET(); + PyObject* tmp_tb = tstate->curexc_traceback; + if (tb != tmp_tb) { + Py_INCREF(tb); + tstate->curexc_traceback = tb; + Py_XDECREF(tmp_tb); + } +#endif + } +bad: + Py_XDECREF(owned_instance); + return; +} +#endif + +/* PyIntBinop */ + #if !CYTHON_COMPILING_IN_PYPY +static PyObject* __Pyx_PyInt_EqObjC(PyObject *op1, PyObject *op2, CYTHON_UNUSED long intval, CYTHON_UNUSED int inplace) { + if (op1 == op2) { + Py_RETURN_TRUE; + } + #if PY_MAJOR_VERSION < 3 + if (likely(PyInt_CheckExact(op1))) { + const long b = intval; + long a = PyInt_AS_LONG(op1); + if (a == b) { + Py_RETURN_TRUE; + } else { + Py_RETURN_FALSE; + } + } + #endif + #if CYTHON_USE_PYLONG_INTERNALS + if (likely(PyLong_CheckExact(op1))) { + const long b = intval; + long a; + const digit* digits = ((PyLongObject*)op1)->ob_digit; + const Py_ssize_t size = Py_SIZE(op1); + if (likely(__Pyx_sst_abs(size) <= 1)) { + a = likely(size) ? digits[0] : 0; + if (size == -1) a = -a; + } else { + switch (size) { + case -2: + if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { + a = -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])); + break; + } + case 2: + if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { + a = (long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])); + break; + } + case -3: + if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { + a = -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])); + break; + } + case 3: + if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { + a = (long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])); + break; + } + case -4: + if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) { + a = -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])); + break; + } + case 4: + if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) { + a = (long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])); + break; + } + #if PyLong_SHIFT < 30 && PyLong_SHIFT != 15 + default: return PyLong_Type.tp_richcompare(op1, op2, Py_EQ); + #else + default: Py_RETURN_FALSE; + #endif + } + } + if (a == b) { + Py_RETURN_TRUE; + } else { + Py_RETURN_FALSE; + } + } + #endif + if (PyFloat_CheckExact(op1)) { + const long b = intval; + double a = PyFloat_AS_DOUBLE(op1); + if ((double)a == (double)b) { + Py_RETURN_TRUE; + } else { + Py_RETURN_FALSE; + } + } + return PyObject_RichCompare(op1, op2, Py_EQ); +} +#endif + +/* Import */ + static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level) { + PyObject *empty_list = 0; + PyObject *module = 0; + PyObject *global_dict = 0; + PyObject *empty_dict = 0; + PyObject *list; + #if PY_VERSION_HEX < 0x03030000 + PyObject *py_import; + py_import = __Pyx_PyObject_GetAttrStr(__pyx_b, __pyx_n_s_import); + if (!py_import) + goto bad; + #endif + if (from_list) + list = from_list; + else { + empty_list = PyList_New(0); + if (!empty_list) + goto bad; + list = empty_list; + } + global_dict = PyModule_GetDict(__pyx_m); + if (!global_dict) + goto bad; + empty_dict = PyDict_New(); + if (!empty_dict) + goto bad; + { + #if PY_MAJOR_VERSION >= 3 + if (level == -1) { + if (strchr(__Pyx_MODULE_NAME, '.')) { + #if PY_VERSION_HEX < 0x03030000 + PyObject *py_level = PyInt_FromLong(1); + if (!py_level) + goto bad; + module = PyObject_CallFunctionObjArgs(py_import, + name, global_dict, empty_dict, list, py_level, NULL); + Py_DECREF(py_level); + #else + module = PyImport_ImportModuleLevelObject( + name, global_dict, empty_dict, list, 1); + #endif + if (!module) { + if (!PyErr_ExceptionMatches(PyExc_ImportError)) + goto bad; + PyErr_Clear(); + } + } + level = 0; + } + #endif + if (!module) { + #if PY_VERSION_HEX < 0x03030000 + PyObject *py_level = PyInt_FromLong(level); + if (!py_level) + goto bad; + module = PyObject_CallFunctionObjArgs(py_import, + name, global_dict, empty_dict, list, py_level, NULL); + Py_DECREF(py_level); + #else + module = PyImport_ImportModuleLevelObject( + name, global_dict, empty_dict, list, level); + #endif + } + } +bad: + #if PY_VERSION_HEX < 0x03030000 + Py_XDECREF(py_import); + #endif + Py_XDECREF(empty_list); + Py_XDECREF(empty_dict); + return module; +} + +/* ImportFrom */ + static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name) { + PyObject* value = __Pyx_PyObject_GetAttrStr(module, name); + if (unlikely(!value) && PyErr_ExceptionMatches(PyExc_AttributeError)) { + PyErr_Format(PyExc_ImportError, + #if PY_MAJOR_VERSION < 3 + "cannot import name %.230s", PyString_AS_STRING(name)); + #else + "cannot import name %S", name); + #endif + } + return value; +} + +/* SetupReduce */ + #define __Pyx_setup_reduce_GET_ATTR_OR_BAD(res, obj, name) res = PyObject_GetAttrString(obj, name); if (res == NULL) goto BAD; +static int __Pyx_setup_reduce_is_named(PyObject* meth, PyObject* name) { + int ret; + PyObject *name_attr; + name_attr = PyObject_GetAttrString(meth, "__name__"); + if (name_attr) { + ret = PyObject_RichCompareBool(name_attr, name, Py_EQ); + } else { + ret = -1; + } + if (ret < 0) { + PyErr_Clear(); + ret = 0; + } + Py_XDECREF(name_attr); + return ret; +} +static int __Pyx_setup_reduce(PyObject* type_obj) { + int ret = 0; + PyObject* builtin_object = NULL; + static PyObject *object_reduce = NULL; + static PyObject *object_reduce_ex = NULL; + PyObject *reduce = NULL; + PyObject *reduce_ex = NULL; + PyObject *reduce_cython = NULL; + PyObject *setstate = NULL; + PyObject *setstate_cython = NULL; + if (PyObject_HasAttrString(type_obj, "__getstate__")) goto GOOD; + if (object_reduce_ex == NULL) { + __Pyx_setup_reduce_GET_ATTR_OR_BAD(builtin_object, __pyx_b, "object"); + __Pyx_setup_reduce_GET_ATTR_OR_BAD(object_reduce, builtin_object, "__reduce__"); + __Pyx_setup_reduce_GET_ATTR_OR_BAD(object_reduce_ex, builtin_object, "__reduce_ex__"); + } + __Pyx_setup_reduce_GET_ATTR_OR_BAD(reduce_ex, type_obj, "__reduce_ex__"); + if (reduce_ex == object_reduce_ex) { + __Pyx_setup_reduce_GET_ATTR_OR_BAD(reduce, type_obj, "__reduce__"); + if (object_reduce == reduce || __Pyx_setup_reduce_is_named(reduce, __pyx_n_s_reduce_cython)) { + __Pyx_setup_reduce_GET_ATTR_OR_BAD(reduce_cython, type_obj, "__reduce_cython__"); + ret = PyDict_SetItemString(((PyTypeObject*)type_obj)->tp_dict, "__reduce__", reduce_cython); if (ret < 0) goto BAD; + ret = PyDict_DelItemString(((PyTypeObject*)type_obj)->tp_dict, "__reduce_cython__"); if (ret < 0) goto BAD; + setstate = PyObject_GetAttrString(type_obj, "__setstate__"); + if (!setstate) PyErr_Clear(); + if (!setstate || __Pyx_setup_reduce_is_named(setstate, __pyx_n_s_setstate_cython)) { + __Pyx_setup_reduce_GET_ATTR_OR_BAD(setstate_cython, type_obj, "__setstate_cython__"); + ret = PyDict_SetItemString(((PyTypeObject*)type_obj)->tp_dict, "__setstate__", setstate_cython); if (ret < 0) goto BAD; + ret = PyDict_DelItemString(((PyTypeObject*)type_obj)->tp_dict, "__setstate_cython__"); if (ret < 0) goto BAD; + } + PyType_Modified((PyTypeObject*)type_obj); + } + } + goto GOOD; +BAD: + if (!PyErr_Occurred()) PyErr_Format(PyExc_RuntimeError, "Unable to initialize pickling for %s", ((PyTypeObject*)type_obj)->tp_name); + ret = -1; +GOOD: + Py_XDECREF(builtin_object); + Py_XDECREF(reduce); + Py_XDECREF(reduce_ex); + Py_XDECREF(reduce_cython); + Py_XDECREF(setstate); + Py_XDECREF(setstate_cython); + return ret; +} + +/* SetVTable */ + static int __Pyx_SetVtable(PyObject *dict, void *vtable) { +#if PY_VERSION_HEX >= 0x02070000 + PyObject *ob = PyCapsule_New(vtable, 0, 0); +#else + PyObject *ob = PyCObject_FromVoidPtr(vtable, 0); +#endif + if (!ob) + goto bad; + if (PyDict_SetItem(dict, __pyx_n_s_pyx_vtable, ob) < 0) + goto bad; + Py_DECREF(ob); + return 0; +bad: + Py_XDECREF(ob); + return -1; +} + +/* PatchModuleWithCoroutine */ + static PyObject* __Pyx_Coroutine_patch_module(PyObject* module, const char* py_code) { +#if defined(__Pyx_Generator_USED) || defined(__Pyx_Coroutine_USED) + int result; + PyObject *globals, *result_obj; + globals = PyDict_New(); if (unlikely(!globals)) goto ignore; + result = PyDict_SetItemString(globals, "_cython_coroutine_type", + #ifdef __Pyx_Coroutine_USED + (PyObject*)__pyx_CoroutineType); + #else + Py_None); + #endif + if (unlikely(result < 0)) goto ignore; + result = PyDict_SetItemString(globals, "_cython_generator_type", + #ifdef __Pyx_Generator_USED + (PyObject*)__pyx_GeneratorType); + #else + Py_None); + #endif + if (unlikely(result < 0)) goto ignore; + if (unlikely(PyDict_SetItemString(globals, "_module", module) < 0)) goto ignore; + if (unlikely(PyDict_SetItemString(globals, "__builtins__", __pyx_b) < 0)) goto ignore; + result_obj = PyRun_String(py_code, Py_file_input, globals, globals); + if (unlikely(!result_obj)) goto ignore; + Py_DECREF(result_obj); + Py_DECREF(globals); + return module; +ignore: + Py_XDECREF(globals); + PyErr_WriteUnraisable(module); + if (unlikely(PyErr_WarnEx(PyExc_RuntimeWarning, "Cython module failed to patch module with custom type", 1) < 0)) { + Py_DECREF(module); + module = NULL; + } +#else + py_code++; +#endif + return module; +} + +/* PatchInspect */ + static PyObject* __Pyx_patch_inspect(PyObject* module) { +#if defined(__Pyx_Generator_USED) && (!defined(CYTHON_PATCH_INSPECT) || CYTHON_PATCH_INSPECT) + static int inspect_patched = 0; + if (unlikely((!inspect_patched) && module)) { + module = __Pyx_Coroutine_patch_module( + module, "" +"old_types = getattr(_module.isgenerator, '_cython_generator_types', None)\n" +"if old_types is None or not isinstance(old_types, set):\n" +" old_types = set()\n" +" def cy_wrap(orig_func, type=type, cython_generator_types=old_types):\n" +" def cy_isgenerator(obj): return type(obj) in cython_generator_types or orig_func(obj)\n" +" cy_isgenerator._cython_generator_types = cython_generator_types\n" +" return cy_isgenerator\n" +" _module.isgenerator = cy_wrap(_module.isgenerator)\n" +"old_types.add(_cython_generator_type)\n" + ); + inspect_patched = 1; + } +#else + if (0) return __Pyx_Coroutine_patch_module(module, NULL); +#endif + return module; +} + +/* CLineInTraceback */ + static int __Pyx_CLineForTraceback(int c_line) { +#ifdef CYTHON_CLINE_IN_TRACEBACK + return ((CYTHON_CLINE_IN_TRACEBACK)) ? c_line : 0; +#else + PyObject **cython_runtime_dict; + PyObject *use_cline; + cython_runtime_dict = _PyObject_GetDictPtr(__pyx_cython_runtime); + if (unlikely(!cython_runtime_dict)) { + PyObject *ptype, *pvalue, *ptraceback; + PyObject *use_cline_obj; + PyErr_Fetch(&ptype, &pvalue, &ptraceback); + use_cline_obj = __Pyx_PyObject_GetAttrStr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback); + if (use_cline_obj) { + use_cline = PyObject_Not(use_cline_obj) ? Py_False : Py_True; + Py_DECREF(use_cline_obj); + } else { + use_cline = NULL; + } + PyErr_Restore(ptype, pvalue, ptraceback); + } else { + use_cline = PyDict_GetItem(*_PyObject_GetDictPtr(__pyx_cython_runtime), __pyx_n_s_cline_in_traceback); + } + if (!use_cline) { + c_line = 0; + PyObject_SetAttr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback, Py_False); + } + else if (PyObject_Not(use_cline) != 0) { + c_line = 0; + } + return c_line; +#endif +} + +/* CodeObjectCache */ + static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line) { + int start = 0, mid = 0, end = count - 1; + if (end >= 0 && code_line > entries[end].code_line) { + return count; + } + while (start < end) { + mid = start + (end - start) / 2; + if (code_line < entries[mid].code_line) { + end = mid; + } else if (code_line > entries[mid].code_line) { + start = mid + 1; + } else { + return mid; + } + } + if (code_line <= entries[mid].code_line) { + return mid; + } else { + return mid + 1; + } +} +static PyCodeObject *__pyx_find_code_object(int code_line) { + PyCodeObject* code_object; + int pos; + if (unlikely(!code_line) || unlikely(!__pyx_code_cache.entries)) { + return NULL; + } + pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); + if (unlikely(pos >= __pyx_code_cache.count) || unlikely(__pyx_code_cache.entries[pos].code_line != code_line)) { + return NULL; + } + code_object = __pyx_code_cache.entries[pos].code_object; + Py_INCREF(code_object); + return code_object; +} +static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object) { + int pos, i; + __Pyx_CodeObjectCacheEntry* entries = __pyx_code_cache.entries; + if (unlikely(!code_line)) { + return; + } + if (unlikely(!entries)) { + entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Malloc(64*sizeof(__Pyx_CodeObjectCacheEntry)); + if (likely(entries)) { + __pyx_code_cache.entries = entries; + __pyx_code_cache.max_count = 64; + __pyx_code_cache.count = 1; + entries[0].code_line = code_line; + entries[0].code_object = code_object; + Py_INCREF(code_object); + } + return; + } + pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); + if ((pos < __pyx_code_cache.count) && unlikely(__pyx_code_cache.entries[pos].code_line == code_line)) { + PyCodeObject* tmp = entries[pos].code_object; + entries[pos].code_object = code_object; + Py_DECREF(tmp); + return; + } + if (__pyx_code_cache.count == __pyx_code_cache.max_count) { + int new_max = __pyx_code_cache.max_count + 64; + entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Realloc( + __pyx_code_cache.entries, (size_t)new_max*sizeof(__Pyx_CodeObjectCacheEntry)); + if (unlikely(!entries)) { + return; + } + __pyx_code_cache.entries = entries; + __pyx_code_cache.max_count = new_max; + } + for (i=__pyx_code_cache.count; i>pos; i--) { + entries[i] = entries[i-1]; + } + entries[pos].code_line = code_line; + entries[pos].code_object = code_object; + __pyx_code_cache.count++; + Py_INCREF(code_object); +} + +/* AddTraceback */ + #include "compile.h" +#include "frameobject.h" +#include "traceback.h" +static PyCodeObject* __Pyx_CreateCodeObjectForTraceback( + const char *funcname, int c_line, + int py_line, const char *filename) { + PyCodeObject *py_code = 0; + PyObject *py_srcfile = 0; + PyObject *py_funcname = 0; + #if PY_MAJOR_VERSION < 3 + py_srcfile = PyString_FromString(filename); + #else + py_srcfile = PyUnicode_FromString(filename); + #endif + if (!py_srcfile) goto bad; + if (c_line) { + #if PY_MAJOR_VERSION < 3 + py_funcname = PyString_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); + #else + py_funcname = PyUnicode_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); + #endif + } + else { + #if PY_MAJOR_VERSION < 3 + py_funcname = PyString_FromString(funcname); + #else + py_funcname = PyUnicode_FromString(funcname); + #endif + } + if (!py_funcname) goto bad; + py_code = __Pyx_PyCode_New( + 0, + 0, + 0, + 0, + 0, + __pyx_empty_bytes, /*PyObject *code,*/ + __pyx_empty_tuple, /*PyObject *consts,*/ + __pyx_empty_tuple, /*PyObject *names,*/ + __pyx_empty_tuple, /*PyObject *varnames,*/ + __pyx_empty_tuple, /*PyObject *freevars,*/ + __pyx_empty_tuple, /*PyObject *cellvars,*/ + py_srcfile, /*PyObject *filename,*/ + py_funcname, /*PyObject *name,*/ + py_line, + __pyx_empty_bytes /*PyObject *lnotab*/ + ); + Py_DECREF(py_srcfile); + Py_DECREF(py_funcname); + return py_code; +bad: + Py_XDECREF(py_srcfile); + Py_XDECREF(py_funcname); + return NULL; +} +static void __Pyx_AddTraceback(const char *funcname, int c_line, + int py_line, const char *filename) { + PyCodeObject *py_code = 0; + PyFrameObject *py_frame = 0; + if (c_line) { + c_line = __Pyx_CLineForTraceback(c_line); + } + py_code = __pyx_find_code_object(c_line ? -c_line : py_line); + if (!py_code) { + py_code = __Pyx_CreateCodeObjectForTraceback( + funcname, c_line, py_line, filename); + if (!py_code) goto bad; + __pyx_insert_code_object(c_line ? -c_line : py_line, py_code); + } + py_frame = PyFrame_New( + PyThreadState_GET(), /*PyThreadState *tstate,*/ + py_code, /*PyCodeObject *code,*/ + __pyx_d, /*PyObject *globals,*/ + 0 /*PyObject *locals*/ + ); + if (!py_frame) goto bad; + __Pyx_PyFrame_SetLineNumber(py_frame, py_line); + PyTraceBack_Here(py_frame); +bad: + Py_XDECREF(py_code); + Py_XDECREF(py_frame); +} + +/* CIntToPy */ + static CYTHON_INLINE PyObject* __Pyx_PyInt_From_int(int value) { + const int neg_one = (int) -1, const_zero = (int) 0; + const int is_unsigned = neg_one > const_zero; + if (is_unsigned) { + if (sizeof(int) < sizeof(long)) { + return PyInt_FromLong((long) value); + } else if (sizeof(int) <= sizeof(unsigned long)) { + return PyLong_FromUnsignedLong((unsigned long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(int) <= sizeof(unsigned PY_LONG_LONG)) { + return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); +#endif + } + } else { + if (sizeof(int) <= sizeof(long)) { + return PyInt_FromLong((long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(int) <= sizeof(PY_LONG_LONG)) { + return PyLong_FromLongLong((PY_LONG_LONG) value); +#endif + } + } + { + int one = 1; int little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&value; + return _PyLong_FromByteArray(bytes, sizeof(int), + little, !is_unsigned); + } +} + +/* CIntFromPyVerify */ + #define __PYX_VERIFY_RETURN_INT(target_type, func_type, func_value)\ + __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 0) +#define __PYX_VERIFY_RETURN_INT_EXC(target_type, func_type, func_value)\ + __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 1) +#define __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, exc)\ + {\ + func_type value = func_value;\ + if (sizeof(target_type) < sizeof(func_type)) {\ + if (unlikely(value != (func_type) (target_type) value)) {\ + func_type zero = 0;\ + if (exc && unlikely(value == (func_type)-1 && PyErr_Occurred()))\ + return (target_type) -1;\ + if (is_unsigned && unlikely(value < zero))\ + goto raise_neg_overflow;\ + else\ + goto raise_overflow;\ + }\ + }\ + return (target_type) value;\ + } + +/* CIntToPy */ + static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value) { + const long neg_one = (long) -1, const_zero = (long) 0; + const int is_unsigned = neg_one > const_zero; + if (is_unsigned) { + if (sizeof(long) < sizeof(long)) { + return PyInt_FromLong((long) value); + } else if (sizeof(long) <= sizeof(unsigned long)) { + return PyLong_FromUnsignedLong((unsigned long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) { + return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); +#endif + } + } else { + if (sizeof(long) <= sizeof(long)) { + return PyInt_FromLong((long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) { + return PyLong_FromLongLong((PY_LONG_LONG) value); +#endif + } + } + { + int one = 1; int little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&value; + return _PyLong_FromByteArray(bytes, sizeof(long), + little, !is_unsigned); + } +} + +/* CIntFromPy */ + static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *x) { + const int neg_one = (int) -1, const_zero = (int) 0; + const int is_unsigned = neg_one > const_zero; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_Check(x))) { + if (sizeof(int) < sizeof(long)) { + __PYX_VERIFY_RETURN_INT(int, long, PyInt_AS_LONG(x)) + } else { + long val = PyInt_AS_LONG(x); + if (is_unsigned && unlikely(val < 0)) { + goto raise_neg_overflow; + } + return (int) val; + } + } else +#endif + if (likely(PyLong_Check(x))) { + if (is_unsigned) { +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (int) 0; + case 1: __PYX_VERIFY_RETURN_INT(int, digit, digits[0]) + case 2: + if (8 * sizeof(int) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) >= 2 * PyLong_SHIFT) { + return (int) (((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); + } + } + break; + case 3: + if (8 * sizeof(int) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) >= 3 * PyLong_SHIFT) { + return (int) (((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); + } + } + break; + case 4: + if (8 * sizeof(int) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) >= 4 * PyLong_SHIFT) { + return (int) (((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); + } + } + break; + } +#endif +#if CYTHON_COMPILING_IN_CPYTHON + if (unlikely(Py_SIZE(x) < 0)) { + goto raise_neg_overflow; + } +#else + { + int result = PyObject_RichCompareBool(x, Py_False, Py_LT); + if (unlikely(result < 0)) + return (int) -1; + if (unlikely(result == 1)) + goto raise_neg_overflow; + } +#endif + if (sizeof(int) <= sizeof(unsigned long)) { + __PYX_VERIFY_RETURN_INT_EXC(int, unsigned long, PyLong_AsUnsignedLong(x)) +#ifdef HAVE_LONG_LONG + } else if (sizeof(int) <= sizeof(unsigned PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(int, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) +#endif + } + } else { +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (int) 0; + case -1: __PYX_VERIFY_RETURN_INT(int, sdigit, (sdigit) (-(sdigit)digits[0])) + case 1: __PYX_VERIFY_RETURN_INT(int, digit, +digits[0]) + case -2: + if (8 * sizeof(int) - 1 > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { + return (int) (((int)-1)*(((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case 2: + if (8 * sizeof(int) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { + return (int) ((((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case -3: + if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { + return (int) (((int)-1)*(((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case 3: + if (8 * sizeof(int) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { + return (int) ((((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case -4: + if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 4 * PyLong_SHIFT) { + return (int) (((int)-1)*(((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case 4: + if (8 * sizeof(int) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 4 * PyLong_SHIFT) { + return (int) ((((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + } +#endif + if (sizeof(int) <= sizeof(long)) { + __PYX_VERIFY_RETURN_INT_EXC(int, long, PyLong_AsLong(x)) +#ifdef HAVE_LONG_LONG + } else if (sizeof(int) <= sizeof(PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(int, PY_LONG_LONG, PyLong_AsLongLong(x)) +#endif + } + } + { +#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray) + PyErr_SetString(PyExc_RuntimeError, + "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers"); +#else + int val; + PyObject *v = __Pyx_PyNumber_IntOrLong(x); + #if PY_MAJOR_VERSION < 3 + if (likely(v) && !PyLong_Check(v)) { + PyObject *tmp = v; + v = PyNumber_Long(tmp); + Py_DECREF(tmp); + } + #endif + if (likely(v)) { + int one = 1; int is_little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&val; + int ret = _PyLong_AsByteArray((PyLongObject *)v, + bytes, sizeof(val), + is_little, !is_unsigned); + Py_DECREF(v); + if (likely(!ret)) + return val; + } +#endif + return (int) -1; + } + } else { + int val; + PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); + if (!tmp) return (int) -1; + val = __Pyx_PyInt_As_int(tmp); + Py_DECREF(tmp); + return val; + } +raise_overflow: + PyErr_SetString(PyExc_OverflowError, + "value too large to convert to int"); + return (int) -1; +raise_neg_overflow: + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to int"); + return (int) -1; +} + +/* CIntFromPy */ + static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *x) { + const long neg_one = (long) -1, const_zero = (long) 0; + const int is_unsigned = neg_one > const_zero; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_Check(x))) { + if (sizeof(long) < sizeof(long)) { + __PYX_VERIFY_RETURN_INT(long, long, PyInt_AS_LONG(x)) + } else { + long val = PyInt_AS_LONG(x); + if (is_unsigned && unlikely(val < 0)) { + goto raise_neg_overflow; + } + return (long) val; + } + } else +#endif + if (likely(PyLong_Check(x))) { + if (is_unsigned) { +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (long) 0; + case 1: __PYX_VERIFY_RETURN_INT(long, digit, digits[0]) + case 2: + if (8 * sizeof(long) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) >= 2 * PyLong_SHIFT) { + return (long) (((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); + } + } + break; + case 3: + if (8 * sizeof(long) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) >= 3 * PyLong_SHIFT) { + return (long) (((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); + } + } + break; + case 4: + if (8 * sizeof(long) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) >= 4 * PyLong_SHIFT) { + return (long) (((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); + } + } + break; + } +#endif +#if CYTHON_COMPILING_IN_CPYTHON + if (unlikely(Py_SIZE(x) < 0)) { + goto raise_neg_overflow; + } +#else + { + int result = PyObject_RichCompareBool(x, Py_False, Py_LT); + if (unlikely(result < 0)) + return (long) -1; + if (unlikely(result == 1)) + goto raise_neg_overflow; + } +#endif + if (sizeof(long) <= sizeof(unsigned long)) { + __PYX_VERIFY_RETURN_INT_EXC(long, unsigned long, PyLong_AsUnsignedLong(x)) +#ifdef HAVE_LONG_LONG + } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(long, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) +#endif + } + } else { +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (long) 0; + case -1: __PYX_VERIFY_RETURN_INT(long, sdigit, (sdigit) (-(sdigit)digits[0])) + case 1: __PYX_VERIFY_RETURN_INT(long, digit, +digits[0]) + case -2: + if (8 * sizeof(long) - 1 > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { + return (long) (((long)-1)*(((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case 2: + if (8 * sizeof(long) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { + return (long) ((((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case -3: + if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { + return (long) (((long)-1)*(((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case 3: + if (8 * sizeof(long) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { + return (long) ((((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case -4: + if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) { + return (long) (((long)-1)*(((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case 4: + if (8 * sizeof(long) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) { + return (long) ((((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + } +#endif + if (sizeof(long) <= sizeof(long)) { + __PYX_VERIFY_RETURN_INT_EXC(long, long, PyLong_AsLong(x)) +#ifdef HAVE_LONG_LONG + } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(long, PY_LONG_LONG, PyLong_AsLongLong(x)) +#endif + } + } + { +#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray) + PyErr_SetString(PyExc_RuntimeError, + "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers"); +#else + long val; + PyObject *v = __Pyx_PyNumber_IntOrLong(x); + #if PY_MAJOR_VERSION < 3 + if (likely(v) && !PyLong_Check(v)) { + PyObject *tmp = v; + v = PyNumber_Long(tmp); + Py_DECREF(tmp); + } + #endif + if (likely(v)) { + int one = 1; int is_little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&val; + int ret = _PyLong_AsByteArray((PyLongObject *)v, + bytes, sizeof(val), + is_little, !is_unsigned); + Py_DECREF(v); + if (likely(!ret)) + return val; + } +#endif + return (long) -1; + } + } else { + long val; + PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); + if (!tmp) return (long) -1; + val = __Pyx_PyInt_As_long(tmp); + Py_DECREF(tmp); + return val; + } +raise_overflow: + PyErr_SetString(PyExc_OverflowError, + "value too large to convert to long"); + return (long) -1; +raise_neg_overflow: + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to long"); + return (long) -1; +} + +/* CheckBinaryVersion */ + static int __Pyx_check_binary_version(void) { + char ctversion[4], rtversion[4]; + PyOS_snprintf(ctversion, 4, "%d.%d", PY_MAJOR_VERSION, PY_MINOR_VERSION); + PyOS_snprintf(rtversion, 4, "%s", Py_GetVersion()); + if (ctversion[0] != rtversion[0] || ctversion[2] != rtversion[2]) { + char message[200]; + PyOS_snprintf(message, sizeof(message), + "compiletime version %s of module '%.100s' " + "does not match runtime version %s", + ctversion, __Pyx_MODULE_NAME, rtversion); + return PyErr_WarnEx(NULL, message, 1); + } + return 0; +} + +/* ModuleImport */ + #ifndef __PYX_HAVE_RT_ImportModule +#define __PYX_HAVE_RT_ImportModule +static PyObject *__Pyx_ImportModule(const char *name) { + PyObject *py_name = 0; + PyObject *py_module = 0; + py_name = __Pyx_PyIdentifier_FromString(name); + if (!py_name) + goto bad; + py_module = PyImport_Import(py_name); + Py_DECREF(py_name); + return py_module; +bad: + Py_XDECREF(py_name); + return 0; +} +#endif + +/* TypeImport */ + #ifndef __PYX_HAVE_RT_ImportType +#define __PYX_HAVE_RT_ImportType +static PyTypeObject *__Pyx_ImportType(const char *module_name, const char *class_name, + size_t size, int strict) +{ + PyObject *py_module = 0; + PyObject *result = 0; + PyObject *py_name = 0; + char warning[200]; + Py_ssize_t basicsize; +#ifdef Py_LIMITED_API + PyObject *py_basicsize; +#endif + py_module = __Pyx_ImportModule(module_name); + if (!py_module) + goto bad; + py_name = __Pyx_PyIdentifier_FromString(class_name); + if (!py_name) + goto bad; + result = PyObject_GetAttr(py_module, py_name); + Py_DECREF(py_name); + py_name = 0; + Py_DECREF(py_module); + py_module = 0; + if (!result) + goto bad; + if (!PyType_Check(result)) { + PyErr_Format(PyExc_TypeError, + "%.200s.%.200s is not a type object", + module_name, class_name); + goto bad; + } +#ifndef Py_LIMITED_API + basicsize = ((PyTypeObject *)result)->tp_basicsize; +#else + py_basicsize = PyObject_GetAttrString(result, "__basicsize__"); + if (!py_basicsize) + goto bad; + basicsize = PyLong_AsSsize_t(py_basicsize); + Py_DECREF(py_basicsize); + py_basicsize = 0; + if (basicsize == (Py_ssize_t)-1 && PyErr_Occurred()) + goto bad; +#endif + if (!strict && (size_t)basicsize > size) { + PyOS_snprintf(warning, sizeof(warning), + "%s.%s size changed, may indicate binary incompatibility. Expected %zd, got %zd", + module_name, class_name, basicsize, size); + if (PyErr_WarnEx(NULL, warning, 0) < 0) goto bad; + } + else if ((size_t)basicsize != size) { + PyErr_Format(PyExc_ValueError, + "%.200s.%.200s has the wrong size, try recompiling. Expected %zd, got %zd", + module_name, class_name, basicsize, size); + goto bad; + } + return (PyTypeObject *)result; +bad: + Py_XDECREF(py_module); + Py_XDECREF(result); + return NULL; +} +#endif + +/* InitStrings */ + static int __Pyx_InitStrings(__Pyx_StringTabEntry *t) { + while (t->p) { + #if PY_MAJOR_VERSION < 3 + if (t->is_unicode) { + *t->p = PyUnicode_DecodeUTF8(t->s, t->n - 1, NULL); + } else if (t->intern) { + *t->p = PyString_InternFromString(t->s); + } else { + *t->p = PyString_FromStringAndSize(t->s, t->n - 1); + } + #else + if (t->is_unicode | t->is_str) { + if (t->intern) { + *t->p = PyUnicode_InternFromString(t->s); + } else if (t->encoding) { + *t->p = PyUnicode_Decode(t->s, t->n - 1, t->encoding, NULL); + } else { + *t->p = PyUnicode_FromStringAndSize(t->s, t->n - 1); + } + } else { + *t->p = PyBytes_FromStringAndSize(t->s, t->n - 1); + } + #endif + if (!*t->p) + return -1; + if (PyObject_Hash(*t->p) == -1) + PyErr_Clear(); + ++t; + } + return 0; +} + +static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char* c_str) { + return __Pyx_PyUnicode_FromStringAndSize(c_str, (Py_ssize_t)strlen(c_str)); +} +static CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject* o) { + Py_ssize_t ignore; + return __Pyx_PyObject_AsStringAndSize(o, &ignore); +} +static CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject* o, Py_ssize_t *length) { +#if CYTHON_COMPILING_IN_CPYTHON && (__PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT) + if ( +#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII + __Pyx_sys_getdefaultencoding_not_ascii && +#endif + PyUnicode_Check(o)) { +#if PY_VERSION_HEX < 0x03030000 + char* defenc_c; + PyObject* defenc = _PyUnicode_AsDefaultEncodedString(o, NULL); + if (!defenc) return NULL; + defenc_c = PyBytes_AS_STRING(defenc); +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII + { + char* end = defenc_c + PyBytes_GET_SIZE(defenc); + char* c; + for (c = defenc_c; c < end; c++) { + if ((unsigned char) (*c) >= 128) { + PyUnicode_AsASCIIString(o); + return NULL; + } + } + } +#endif + *length = PyBytes_GET_SIZE(defenc); + return defenc_c; +#else + if (__Pyx_PyUnicode_READY(o) == -1) return NULL; +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII + if (PyUnicode_IS_ASCII(o)) { + *length = PyUnicode_GET_LENGTH(o); + return PyUnicode_AsUTF8(o); + } else { + PyUnicode_AsASCIIString(o); + return NULL; + } +#else + return PyUnicode_AsUTF8AndSize(o, length); +#endif +#endif + } else +#endif +#if (!CYTHON_COMPILING_IN_PYPY) || (defined(PyByteArray_AS_STRING) && defined(PyByteArray_GET_SIZE)) + if (PyByteArray_Check(o)) { + *length = PyByteArray_GET_SIZE(o); + return PyByteArray_AS_STRING(o); + } else +#endif + { + char* result; + int r = PyBytes_AsStringAndSize(o, &result, length); + if (unlikely(r < 0)) { + return NULL; + } else { + return result; + } + } +} +static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject* x) { + int is_true = x == Py_True; + if (is_true | (x == Py_False) | (x == Py_None)) return is_true; + else return PyObject_IsTrue(x); +} +static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x) { +#if CYTHON_USE_TYPE_SLOTS + PyNumberMethods *m; +#endif + const char *name = NULL; + PyObject *res = NULL; +#if PY_MAJOR_VERSION < 3 + if (PyInt_Check(x) || PyLong_Check(x)) +#else + if (PyLong_Check(x)) +#endif + return __Pyx_NewRef(x); +#if CYTHON_USE_TYPE_SLOTS + m = Py_TYPE(x)->tp_as_number; + #if PY_MAJOR_VERSION < 3 + if (m && m->nb_int) { + name = "int"; + res = PyNumber_Int(x); + } + else if (m && m->nb_long) { + name = "long"; + res = PyNumber_Long(x); + } + #else + if (m && m->nb_int) { + name = "int"; + res = PyNumber_Long(x); + } + #endif +#else + res = PyNumber_Int(x); +#endif + if (res) { +#if PY_MAJOR_VERSION < 3 + if (!PyInt_Check(res) && !PyLong_Check(res)) { +#else + if (!PyLong_Check(res)) { +#endif + PyErr_Format(PyExc_TypeError, + "__%.4s__ returned non-%.4s (type %.200s)", + name, name, Py_TYPE(res)->tp_name); + Py_DECREF(res); + return NULL; + } + } + else if (!PyErr_Occurred()) { + PyErr_SetString(PyExc_TypeError, + "an integer is required"); + } + return res; +} +static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject* b) { + Py_ssize_t ival; + PyObject *x; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_CheckExact(b))) { + if (sizeof(Py_ssize_t) >= sizeof(long)) + return PyInt_AS_LONG(b); + else + return PyInt_AsSsize_t(x); + } +#endif + if (likely(PyLong_CheckExact(b))) { + #if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)b)->ob_digit; + const Py_ssize_t size = Py_SIZE(b); + if (likely(__Pyx_sst_abs(size) <= 1)) { + ival = likely(size) ? digits[0] : 0; + if (size == -1) ival = -ival; + return ival; + } else { + switch (size) { + case 2: + if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { + return (Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case -2: + if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { + return -(Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case 3: + if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { + return (Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case -3: + if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { + return -(Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case 4: + if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { + return (Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case -4: + if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { + return -(Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + } + } + #endif + return PyLong_AsSsize_t(b); + } + x = PyNumber_Index(b); + if (!x) return -1; + ival = PyInt_AsSsize_t(x); + Py_DECREF(x); + return ival; +} +static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t ival) { + return PyInt_FromSize_t(ival); +} + + +#endif /* Py_PYTHON_H */ diff --git a/ptvsd/pydevd/_pydevd_bundle/pydevd_cython.pyx b/ptvsd/pydevd/_pydevd_bundle/pydevd_cython.pyx new file mode 100644 index 00000000..29e7a471 --- /dev/null +++ b/ptvsd/pydevd/_pydevd_bundle/pydevd_cython.pyx @@ -0,0 +1,1138 @@ +# Important: Autogenerated file. + +# DO NOT edit manually! +# DO NOT edit manually! +import sys +from _pydevd_bundle.pydevd_constants import STATE_RUN, PYTHON_SUSPEND, IS_JYTHON, IS_IRONPYTHON +# IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) +# ELSE +# from _pydevd_bundle.pydevd_frame import PyDBFrame +# ENDIF + +version = 4 + +if not hasattr(sys, '_current_frames'): + + # Some versions of Jython don't have it (but we can provide a replacement) + if IS_JYTHON: + from java.lang import NoSuchFieldException + from org.python.core import ThreadStateMapping + try: + cachedThreadState = ThreadStateMapping.getDeclaredField('globalThreadStates') # Dev version + except NoSuchFieldException: + cachedThreadState = ThreadStateMapping.getDeclaredField('cachedThreadState') # Release Jython 2.7.0 + cachedThreadState.accessible = True + thread_states = cachedThreadState.get(ThreadStateMapping) + + def _current_frames(): + as_array = thread_states.entrySet().toArray() + ret = {} + for thread_to_state in as_array: + thread = thread_to_state.getKey() + if thread is None: + continue + thread_state = thread_to_state.getValue() + if thread_state is None: + continue + + frame = thread_state.frame + if frame is None: + continue + + ret[thread.getId()] = frame + return ret + + elif IS_IRONPYTHON: + _tid_to_last_frame = {} + + # IronPython doesn't have it. Let's use our workaround... + def _current_frames(): + return _tid_to_last_frame + + else: + raise RuntimeError('Unable to proceed (sys._current_frames not available in this Python implementation).') +else: + _current_frames = sys._current_frames + +#======================================================================================================================= +# PyDBAdditionalThreadInfo +#======================================================================================================================= +# IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) +cdef class PyDBAdditionalThreadInfo: +# ELSE +# class PyDBAdditionalThreadInfo(object): +# ENDIF + + # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + cdef public int pydev_state; + cdef public object pydev_step_stop; # Actually, it's a frame or None + cdef public int pydev_step_cmd; + cdef public bint pydev_notify_kill; + cdef public object pydev_smart_step_stop; # Actually, it's a frame or None + cdef public bint pydev_django_resolve_frame; + cdef public object pydev_call_from_jinja2; + cdef public object pydev_call_inside_jinja2; + cdef public bint is_tracing; + cdef public tuple conditional_breakpoint_exception; + cdef public str pydev_message; + cdef public int suspend_type; + cdef public int pydev_next_line; + cdef public str pydev_func_name; + # ELSE +# __slots__ = [ +# 'pydev_state', +# 'pydev_step_stop', +# 'pydev_step_cmd', +# 'pydev_notify_kill', +# 'pydev_smart_step_stop', +# 'pydev_django_resolve_frame', +# 'pydev_call_from_jinja2', +# 'pydev_call_inside_jinja2', +# 'is_tracing', +# 'conditional_breakpoint_exception', +# 'pydev_message', +# 'suspend_type', +# 'pydev_next_line', +# 'pydev_func_name', +# ] + # ENDIF + + def __init__(self): + self.pydev_state = STATE_RUN + self.pydev_step_stop = None + self.pydev_step_cmd = -1 # Something as CMD_STEP_INTO, CMD_STEP_OVER, etc. + self.pydev_notify_kill = False + self.pydev_smart_step_stop = None + self.pydev_django_resolve_frame = False + self.pydev_call_from_jinja2 = None + self.pydev_call_inside_jinja2 = None + self.is_tracing = False + self.conditional_breakpoint_exception = None + self.pydev_message = '' + self.suspend_type = PYTHON_SUSPEND + self.pydev_next_line = -1 + self.pydev_func_name = '.invalid.' # Must match the type in cython + + + def iter_frames(self, t): + #sys._current_frames(): dictionary with thread id -> topmost frame + current_frames = _current_frames() + v = current_frames.get(t.ident) + if v is not None: + return [v] + return [] + + def __str__(self): + return 'State:%s Stop:%s Cmd: %s Kill:%s' % ( + self.pydev_state, self.pydev_step_stop, self.pydev_step_cmd, self.pydev_notify_kill) + +import linecache +import os.path +import re +import sys +import traceback # @Reimport + +from _pydev_bundle import pydev_log +from _pydevd_bundle import pydevd_dont_trace +from _pydevd_bundle import pydevd_vars +from _pydevd_bundle.pydevd_breakpoints import get_exception_breakpoint +from _pydevd_bundle.pydevd_comm import CMD_STEP_CAUGHT_EXCEPTION, CMD_STEP_RETURN, CMD_STEP_OVER, CMD_SET_BREAK, \ + CMD_STEP_INTO, CMD_SMART_STEP_INTO, CMD_RUN_TO_LINE, CMD_SET_NEXT_STATEMENT, CMD_STEP_INTO_MY_CODE +from _pydevd_bundle.pydevd_constants import STATE_SUSPEND, get_thread_id, STATE_RUN, dict_iter_values, IS_PY3K, \ + RETURN_VALUES_DICT +from _pydevd_bundle.pydevd_dont_trace_files import DONT_TRACE, PYDEV_FILE +from _pydevd_bundle.pydevd_frame_utils import add_exception_to_frame, just_raised +from _pydevd_bundle.pydevd_utils import get_clsname_for_code +from pydevd_file_utils import get_abs_path_real_path_and_base_from_frame +try: + from inspect import CO_GENERATOR +except: + CO_GENERATOR = 0 + +try: + from _pydevd_bundle.pydevd_signature import send_signature_call_trace, send_signature_return_trace +except ImportError: + def send_signature_call_trace(*args, **kwargs): + pass + +basename = os.path.basename + +IGNORE_EXCEPTION_TAG = re.compile('[^#]*#.*@IgnoreException') +DEBUG_START = ('pydevd.py', 'run') +DEBUG_START_PY3K = ('_pydev_execfile.py', 'execfile') +TRACE_PROPERTY = 'pydevd_traceproperty.py' +get_file_type = DONT_TRACE.get + + +def handle_breakpoint_condition(py_db, info, breakpoint, new_frame, default_return_value): + condition = breakpoint.condition + try: + val = eval(condition, new_frame.f_globals, new_frame.f_locals) + if not val: + return default_return_value + + except: + if type(condition) != type(''): + if hasattr(condition, 'encode'): + condition = condition.encode('utf-8') + + msg = 'Error while evaluating expression: %s\n' % (condition,) + sys.stderr.write(msg) + traceback.print_exc() + if not py_db.suspend_on_breakpoint_exception: + return default_return_value + else: + stop = True + try: + # add exception_type and stacktrace into thread additional info + etype, value, tb = sys.exc_info() + try: + error = ''.join(traceback.format_exception_only(etype, value)) + stack = traceback.extract_stack(f=tb.tb_frame.f_back) + + # On self.set_suspend(thread, CMD_SET_BREAK) this info will be + # sent to the client. + info.conditional_breakpoint_exception = \ + ('Condition:\n' + condition + '\n\nError:\n' + error, stack) + finally: + etype, value, tb = None, None, None + except: + traceback.print_exc() + + +def handle_breakpoint_expression(breakpoint, info, new_frame): + try: + try: + val = eval(breakpoint.expression, new_frame.f_globals, new_frame.f_locals) + except: + val = sys.exc_info()[1] + finally: + if val is not None: + info.pydev_message = str(val) + + +#======================================================================================================================= +# PyDBFrame +#======================================================================================================================= +# IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) +cdef class PyDBFrame: +# ELSE +# class PyDBFrame: +# '''This makes the tracing for a given frame, so, the trace_dispatch +# is used initially when we enter into a new context ('call') and then +# is reused for the entire context. +# ''' +# ENDIF + + + #Note: class (and not instance) attributes. + + #Same thing in the main debugger but only considering the file contents, while the one in the main debugger + #considers the user input (so, the actual result must be a join of both). + filename_to_lines_where_exceptions_are_ignored = {} + filename_to_stat_info = {} + + # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + cdef tuple _args + cdef int should_skip + def __init__(self, tuple args): + self._args = args # In the cython version we don't need to pass the frame + self.should_skip = -1 # On cythonized version, put in instance. + # ELSE +# should_skip = -1 # Default value in class (put in instance on set). +# +# def __init__(self, args): +# #args = main_debugger, filename, base, info, t, frame +# #yeap, much faster than putting in self and then getting it from self later on +# self._args = args + # ENDIF + + def set_suspend(self, *args, **kwargs): + self._args[0].set_suspend(*args, **kwargs) + + def do_wait_suspend(self, *args, **kwargs): + self._args[0].do_wait_suspend(*args, **kwargs) + + # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + def trace_exception(self, frame, str event, arg): + cdef bint flag; + # ELSE +# def trace_exception(self, frame, event, arg): + # ENDIF + if event == 'exception': + flag, frame = self.should_stop_on_exception(frame, event, arg) + + if flag: + self.handle_exception(frame, event, arg) + return self.trace_dispatch + + return self.trace_exception + + def trace_return(self, frame, event, arg): + if event == 'return': + main_debugger, filename = self._args[0], self._args[1] + send_signature_return_trace(main_debugger, frame, filename, arg) + return self.trace_return + + # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + def should_stop_on_exception(self, frame, str event, arg): + cdef PyDBAdditionalThreadInfo info; + cdef bint flag; + # ELSE +# def should_stop_on_exception(self, frame, event, arg): + # ENDIF + + # main_debugger, _filename, info, _thread = self._args + main_debugger = self._args[0] + info = self._args[2] + flag = False + + # STATE_SUSPEND = 2 + if info.pydev_state != 2: #and breakpoint is not None: + exception, value, trace = arg + + if trace is not None: #on jython trace is None on the first event + exception_breakpoint = get_exception_breakpoint( + exception, main_debugger.break_on_caught_exceptions) + + if exception_breakpoint is not None: + if exception_breakpoint.ignore_libraries: + if exception_breakpoint.notify_on_first_raise_only: + if main_debugger.first_appearance_in_scope(trace): + add_exception_to_frame(frame, (exception, value, trace)) + try: + info.pydev_message = exception_breakpoint.qname + except: + info.pydev_message = exception_breakpoint.qname.encode('utf-8') + flag = True + else: + pydev_log.debug("Ignore exception %s in library %s" % (exception, frame.f_code.co_filename)) + flag = False + else: + if not exception_breakpoint.notify_on_first_raise_only or just_raised(trace): + add_exception_to_frame(frame, (exception, value, trace)) + try: + info.pydev_message = exception_breakpoint.qname + except: + info.pydev_message = exception_breakpoint.qname.encode('utf-8') + flag = True + else: + flag = False + else: + try: + if main_debugger.plugin is not None: + result = main_debugger.plugin.exception_break(main_debugger, self, frame, self._args, arg) + if result: + flag, frame = result + except: + flag = False + + return flag, frame + + def handle_exception(self, frame, event, arg): + try: + # print 'handle_exception', frame.f_lineno, frame.f_code.co_name + + # We have 3 things in arg: exception type, description, traceback object + trace_obj = arg[2] + main_debugger = self._args[0] + + if not hasattr(trace_obj, 'tb_next'): + return #Not always there on Jython... + + initial_trace_obj = trace_obj + if trace_obj.tb_next is None and trace_obj.tb_frame is frame: + #I.e.: tb_next should be only None in the context it was thrown (trace_obj.tb_frame is frame is just a double check). + + if main_debugger.break_on_exceptions_thrown_in_same_context: + #Option: Don't break if an exception is caught in the same function from which it is thrown + return + else: + #Get the trace_obj from where the exception was raised... + while trace_obj.tb_next is not None: + trace_obj = trace_obj.tb_next + + + if main_debugger.ignore_exceptions_thrown_in_lines_with_ignore_exception: + for check_trace_obj in (initial_trace_obj, trace_obj): + filename = get_abs_path_real_path_and_base_from_frame(check_trace_obj.tb_frame)[1] + + + filename_to_lines_where_exceptions_are_ignored = self.filename_to_lines_where_exceptions_are_ignored + + + lines_ignored = filename_to_lines_where_exceptions_are_ignored.get(filename) + if lines_ignored is None: + lines_ignored = filename_to_lines_where_exceptions_are_ignored[filename] = {} + + try: + curr_stat = os.stat(filename) + curr_stat = (curr_stat.st_size, curr_stat.st_mtime) + except: + curr_stat = None + + last_stat = self.filename_to_stat_info.get(filename) + if last_stat != curr_stat: + self.filename_to_stat_info[filename] = curr_stat + lines_ignored.clear() + try: + linecache.checkcache(filename) + except: + #Jython 2.1 + linecache.checkcache() + + from_user_input = main_debugger.filename_to_lines_where_exceptions_are_ignored.get(filename) + if from_user_input: + merged = {} + merged.update(lines_ignored) + #Override what we have with the related entries that the user entered + merged.update(from_user_input) + else: + merged = lines_ignored + + exc_lineno = check_trace_obj.tb_lineno + + # print ('lines ignored', lines_ignored) + # print ('user input', from_user_input) + # print ('merged', merged, 'curr', exc_lineno) + + if exc_lineno not in merged: #Note: check on merged but update lines_ignored. + try: + line = linecache.getline(filename, exc_lineno, check_trace_obj.tb_frame.f_globals) + except: + #Jython 2.1 + line = linecache.getline(filename, exc_lineno) + + if IGNORE_EXCEPTION_TAG.match(line) is not None: + lines_ignored[exc_lineno] = 1 + return + else: + #Put in the cache saying not to ignore + lines_ignored[exc_lineno] = 0 + else: + #Ok, dict has it already cached, so, let's check it... + if merged.get(exc_lineno, 0): + return + + + thread = self._args[3] + + try: + frame_id_to_frame = {} + frame_id_to_frame[id(frame)] = frame + f = trace_obj.tb_frame + while f is not None: + frame_id_to_frame[id(f)] = f + f = f.f_back + f = None + + thread_id = get_thread_id(thread) + pydevd_vars.add_additional_frame_by_id(thread_id, frame_id_to_frame) + try: + main_debugger.send_caught_exception_stack(thread, arg, id(frame)) + self.set_suspend(thread, CMD_STEP_CAUGHT_EXCEPTION) + self.do_wait_suspend(thread, frame, event, arg) + main_debugger.send_caught_exception_stack_proceeded(thread) + + finally: + pydevd_vars.remove_additional_frame_by_id(thread_id) + except: + traceback.print_exc() + + main_debugger.set_trace_for_frame_and_parents(frame) + finally: + #Clear some local variables... + trace_obj = None + initial_trace_obj = None + check_trace_obj = None + f = None + frame_id_to_frame = None + main_debugger = None + thread = None + + def get_func_name(self, frame): + code_obj = frame.f_code + func_name = code_obj.co_name + try: + cls_name = get_clsname_for_code(code_obj, frame) + if cls_name is not None: + return "%s.%s" % (cls_name, func_name) + else: + return func_name + except: + traceback.print_exc() + return func_name + + def show_return_values(self, frame, arg): + try: + try: + f_locals_back = getattr(frame.f_back, "f_locals", None) + if f_locals_back is not None: + return_values_dict = f_locals_back.get(RETURN_VALUES_DICT, None) + if return_values_dict is None: + return_values_dict = {} + f_locals_back[RETURN_VALUES_DICT] = return_values_dict + name = self.get_func_name(frame) + return_values_dict[name] = arg + except: + traceback.print_exc() + finally: + f_locals_back = None + + def remove_return_values(self, main_debugger, frame): + try: + try: + # Showing return values was turned off, we should remove them from locals dict. + # The values can be in the current frame or in the back one + frame.f_locals.pop(RETURN_VALUES_DICT, None) + + f_locals_back = getattr(frame.f_back, "f_locals", None) + if f_locals_back is not None: + f_locals_back.pop(RETURN_VALUES_DICT, None) + except: + traceback.print_exc() + finally: + f_locals_back = None + + # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + cpdef trace_dispatch(self, frame, str event, arg): + cdef str filename; + cdef bint is_exception_event; + cdef bint has_exception_breakpoints; + cdef bint can_skip; + cdef PyDBAdditionalThreadInfo info; + cdef int step_cmd; + cdef int line; + cdef bint is_line; + cdef bint is_call; + cdef bint is_return; + cdef str curr_func_name; + cdef bint exist_result; + cdef dict frame_skips_cache; + cdef tuple frame_cache_key; + cdef tuple line_cache_key; + cdef int breakpoints_in_line_cache; + cdef int breakpoints_in_frame_cache; + cdef bint has_breakpoint_in_frame; + # ELSE +# def trace_dispatch(self, frame, event, arg): + # ENDIF + + main_debugger, filename, info, thread, frame_skips_cache, frame_cache_key = self._args + # print('frame trace_dispatch', frame.f_lineno, frame.f_code.co_name, event, info.pydev_step_cmd) + try: + info.is_tracing = True + line = frame.f_lineno + line_cache_key = (frame_cache_key, line) + + if main_debugger._finish_debugging_session: + return None + + plugin_manager = main_debugger.plugin + + is_exception_event = event == 'exception' + has_exception_breakpoints = main_debugger.break_on_caught_exceptions or main_debugger.has_plugin_exception_breaks + + if is_exception_event: + if has_exception_breakpoints: + flag, frame = self.should_stop_on_exception(frame, event, arg) + if flag: + self.handle_exception(frame, event, arg) + return self.trace_dispatch + is_line = False + is_return = False + is_call = False + else: + is_line = event == 'line' + is_return = event == 'return' + is_call = event == 'call' + if not is_line and not is_return and not is_call: + # I believe this can only happen in jython on some frontiers on jython and java code, which we don't want to trace. + return None + + need_trace_return = False + if is_call and main_debugger.signature_factory: + need_trace_return = send_signature_call_trace(main_debugger, frame, filename) + if is_return and main_debugger.signature_factory: + send_signature_return_trace(main_debugger, frame, filename, arg) + + stop_frame = info.pydev_step_stop + step_cmd = info.pydev_step_cmd + + if is_exception_event: + breakpoints_for_file = None + # CMD_STEP_OVER = 108 + if stop_frame and stop_frame is not frame and step_cmd == 108 and \ + arg[0] in (StopIteration, GeneratorExit) and arg[2] is None: + info.pydev_step_cmd = 107 # CMD_STEP_INTO = 107 + info.pydev_step_stop = None + else: + # If we are in single step mode and something causes us to exit the current frame, we need to make sure we break + # eventually. Force the step mode to step into and the step stop frame to None. + # I.e.: F6 in the end of a function should stop in the next possible position (instead of forcing the user + # to make a step in or step over at that location). + # Note: this is especially troublesome when we're skipping code with the + # @DontTrace comment. + if stop_frame is frame and is_return and step_cmd in (109, 108): # CMD_STEP_RETURN = 109, CMD_STEP_OVER = 108 + if not frame.f_code.co_flags & 0x20: # CO_GENERATOR = 0x20 (inspect.CO_GENERATOR) + info.pydev_step_cmd = 107 # CMD_STEP_INTO = 107 + info.pydev_step_stop = None + + breakpoints_for_file = main_debugger.breakpoints.get(filename) + + can_skip = False + + if info.pydev_state == 1: # STATE_RUN = 1 + #we can skip if: + #- we have no stop marked + #- we should make a step return/step over and we're not in the current frame + # CMD_STEP_RETURN = 109, CMD_STEP_OVER = 108 + can_skip = (step_cmd == -1 and stop_frame is None)\ + or (step_cmd in (109, 108) and stop_frame is not frame) + + if can_skip: + if plugin_manager is not None and main_debugger.has_plugin_line_breaks: + can_skip = not plugin_manager.can_not_skip(main_debugger, self, frame) + + # CMD_STEP_OVER = 108 + if can_skip and is_return and main_debugger.show_return_values and info.pydev_step_cmd == 108 and frame.f_back is info.pydev_step_stop: + # trace function for showing return values after step over + can_skip = False + + # Let's check to see if we are in a function that has a breakpoint. If we don't have a breakpoint, + # we will return nothing for the next trace + # also, after we hit a breakpoint and go to some other debugging state, we have to force the set trace anyway, + # so, that's why the additional checks are there. + if not breakpoints_for_file: + if can_skip: + if has_exception_breakpoints: + return self.trace_exception + else: + if need_trace_return: + return self.trace_return + else: + return None + + else: + # When cached, 0 means we don't have a breakpoint and 1 means we have. + if can_skip: + breakpoints_in_line_cache = frame_skips_cache.get(line_cache_key, -1) + if breakpoints_in_line_cache == 0: + return self.trace_dispatch + + breakpoints_in_frame_cache = frame_skips_cache.get(frame_cache_key, -1) + if breakpoints_in_frame_cache != -1: + # Gotten from cache. + has_breakpoint_in_frame = breakpoints_in_frame_cache == 1 + + else: + has_breakpoint_in_frame = False + # Checks the breakpoint to see if there is a context match in some function + curr_func_name = frame.f_code.co_name + + #global context is set with an empty name + if curr_func_name in ('?', ''): + curr_func_name = '' + + for breakpoint in dict_iter_values(breakpoints_for_file): #jython does not support itervalues() + #will match either global or some function + if breakpoint.func_name in ('None', curr_func_name): + has_breakpoint_in_frame = True + break + + # Cache the value (1 or 0 or -1 for default because of cython). + if has_breakpoint_in_frame: + frame_skips_cache[frame_cache_key] = 1 + else: + frame_skips_cache[frame_cache_key] = 0 + + + if can_skip and not has_breakpoint_in_frame: + if has_exception_breakpoints: + return self.trace_exception + else: + if need_trace_return: + return self.trace_return + else: + return None + + #We may have hit a breakpoint or we are already in step mode. Either way, let's check what we should do in this frame + # print('NOT skipped', frame.f_lineno, frame.f_code.co_name, event) + + try: + flag = False + #return is not taken into account for breakpoint hit because we'd have a double-hit in this case + #(one for the line and the other for the return). + + stop_info = {} + breakpoint = None + exist_result = False + stop = False + bp_type = None + if not is_return and info.pydev_state != STATE_SUSPEND and breakpoints_for_file is not None and line in breakpoints_for_file: + breakpoint = breakpoints_for_file[line] + new_frame = frame + stop = True + if step_cmd == CMD_STEP_OVER and stop_frame is frame and (is_line or is_return): + stop = False #we don't stop on breakpoint if we have to stop by step-over (it will be processed later) + elif plugin_manager is not None and main_debugger.has_plugin_line_breaks: + result = plugin_manager.get_breakpoint(main_debugger, self, frame, event, self._args) + if result: + exist_result = True + flag, breakpoint, new_frame, bp_type = result + + if breakpoint: + #ok, hit breakpoint, now, we have to discover if it is a conditional breakpoint + # lets do the conditional stuff here + if stop or exist_result: + condition = breakpoint.condition + if condition is not None: + result = handle_breakpoint_condition(main_debugger, info, breakpoint, new_frame, + self.trace_dispatch) + if result is not None: + return result + + if breakpoint.expression is not None: + handle_breakpoint_expression(breakpoint, info, new_frame) + + if not main_debugger.first_breakpoint_reached: + if is_call: + back = frame.f_back + if back is not None: + # When we start debug session, we call execfile in pydevd run function. It produces an additional + # 'call' event for tracing and we stop on the first line of code twice. + _, back_filename, base = get_abs_path_real_path_and_base_from_frame(back) + if (base == DEBUG_START[0] and back.f_code.co_name == DEBUG_START[1]) or \ + (base == DEBUG_START_PY3K[0] and back.f_code.co_name == DEBUG_START_PY3K[1]): + stop = False + main_debugger.first_breakpoint_reached = True + else: + # if the frame is traced after breakpoint stop, + # but the file should be ignored while stepping because of filters + if step_cmd != -1: + if main_debugger.is_filter_enabled and main_debugger.is_ignored_by_filters(filename): + # ignore files matching stepping filters + return self.trace_dispatch + if main_debugger.is_filter_libraries and main_debugger.not_in_scope(filename): + # ignore library files while stepping + return self.trace_dispatch + + if main_debugger.show_return_values: + if is_return and info.pydev_step_cmd == CMD_STEP_OVER and frame.f_back == info.pydev_step_stop: + self.show_return_values(frame, arg) + + elif main_debugger.remove_return_values_flag: + try: + self.remove_return_values(main_debugger, frame) + finally: + main_debugger.remove_return_values_flag = False + + if stop: + self.set_suspend(thread, CMD_SET_BREAK) + if breakpoint and breakpoint.suspend_policy == "ALL": + main_debugger.suspend_all_other_threads(thread) + elif flag and plugin_manager is not None: + result = plugin_manager.suspend(main_debugger, thread, frame, bp_type) + if result: + frame = result + + # if thread has a suspend flag, we suspend with a busy wait + if info.pydev_state == STATE_SUSPEND: + self.do_wait_suspend(thread, frame, event, arg) + return self.trace_dispatch + else: + if not breakpoint and not is_return: + # No stop from anyone and no breakpoint found in line (cache that). + frame_skips_cache[line_cache_key] = 0 + + except: + traceback.print_exc() + raise + + #step handling. We stop when we hit the right frame + try: + should_skip = 0 + if pydevd_dont_trace.should_trace_hook is not None: + if self.should_skip == -1: + # I.e.: cache the result on self.should_skip (no need to evaluate the same frame multiple times). + # Note that on a code reload, we won't re-evaluate this because in practice, the frame.f_code + # Which will be handled by this frame is read-only, so, we can cache it safely. + if not pydevd_dont_trace.should_trace_hook(frame, filename): + # -1, 0, 1 to be Cython-friendly + should_skip = self.should_skip = 1 + else: + should_skip = self.should_skip = 0 + else: + should_skip = self.should_skip + + plugin_stop = False + if should_skip: + stop = False + + elif step_cmd == CMD_STEP_INTO: + stop = is_line or is_return + if plugin_manager is not None: + result = plugin_manager.cmd_step_into(main_debugger, frame, event, self._args, stop_info, stop) + if result: + stop, plugin_stop = result + + elif step_cmd == CMD_STEP_INTO_MY_CODE: + if not main_debugger.not_in_scope(frame.f_code.co_filename): + stop = is_line + + elif step_cmd == CMD_STEP_OVER: + stop = stop_frame is frame and (is_line or is_return) + + if frame.f_code.co_flags & CO_GENERATOR: + if is_return: + stop = False + + if plugin_manager is not None: + result = plugin_manager.cmd_step_over(main_debugger, frame, event, self._args, stop_info, stop) + if result: + stop, plugin_stop = result + + elif step_cmd == CMD_SMART_STEP_INTO: + stop = False + if info.pydev_smart_step_stop is frame: + info.pydev_func_name = '.invalid.' # Must match the type in cython + info.pydev_smart_step_stop = None + + if is_line or is_exception_event: + curr_func_name = frame.f_code.co_name + + #global context is set with an empty name + if curr_func_name in ('?', '') or curr_func_name is None: + curr_func_name = '' + + if curr_func_name == info.pydev_func_name: + stop = True + + elif step_cmd == CMD_STEP_RETURN: + stop = is_return and stop_frame is frame + + elif step_cmd == CMD_RUN_TO_LINE or step_cmd == CMD_SET_NEXT_STATEMENT: + stop = False + + if is_line or is_exception_event: + #Yes, we can only act on line events (weird hum?) + #Note: This code is duplicated at pydevd.py + #Acting on exception events after debugger breaks with exception + curr_func_name = frame.f_code.co_name + + #global context is set with an empty name + if curr_func_name in ('?', ''): + curr_func_name = '' + + if curr_func_name == info.pydev_func_name: + line = info.pydev_next_line + if frame.f_lineno == line: + stop = True + else: + if frame.f_trace is None: + frame.f_trace = self.trace_dispatch + frame.f_lineno = line + frame.f_trace = None + stop = True + + else: + stop = False + + if stop and step_cmd != -1 and is_return and IS_PY3K and hasattr(frame, "f_back"): + f_code = getattr(frame.f_back, 'f_code', None) + if f_code is not None: + back_filename = os.path.basename(f_code.co_filename) + file_type = get_file_type(back_filename) + if file_type == PYDEV_FILE: + stop = False + + if plugin_stop: + stopped_on_plugin = plugin_manager.stop(main_debugger, frame, event, self._args, stop_info, arg, step_cmd) + elif stop: + if is_line: + self.set_suspend(thread, step_cmd) + self.do_wait_suspend(thread, frame, event, arg) + else: #return event + back = frame.f_back + if back is not None: + #When we get to the pydevd run function, the debugging has actually finished for the main thread + #(note that it can still go on for other threads, but for this one, we just make it finish) + #So, just setting it to None should be OK + _, back_filename, base = get_abs_path_real_path_and_base_from_frame(back) + if base == DEBUG_START[0] and back.f_code.co_name == DEBUG_START[1]: + back = None + + elif base == TRACE_PROPERTY: + # We dont want to trace the return event of pydevd_traceproperty (custom property for debugging) + #if we're in a return, we want it to appear to the user in the previous frame! + return None + + elif pydevd_dont_trace.should_trace_hook is not None: + if not pydevd_dont_trace.should_trace_hook(back, back_filename): + # In this case, we'll have to skip the previous one because it shouldn't be traced. + # Also, we have to reset the tracing, because if the parent's parent (or some + # other parent) has to be traced and it's not currently, we wouldn't stop where + # we should anymore (so, a step in/over/return may not stop anywhere if no parent is traced). + # Related test: _debugger_case17a.py + main_debugger.set_trace_for_frame_and_parents(back, overwrite_prev_trace=True) + return None + + if back is not None: + #if we're in a return, we want it to appear to the user in the previous frame! + self.set_suspend(thread, step_cmd) + self.do_wait_suspend(thread, back, event, arg) + else: + #in jython we may not have a back frame + info.pydev_step_stop = None + info.pydev_step_cmd = -1 + info.pydev_state = STATE_RUN + + except KeyboardInterrupt: + raise + except: + try: + traceback.print_exc() + info.pydev_step_cmd = -1 + except: + return None + + #if we are quitting, let's stop the tracing + retVal = None + if not main_debugger.quitting: + retVal = self.trace_dispatch + + return retVal + finally: + info.is_tracing = False + + #end trace_dispatch + +import traceback + +from _pydev_bundle.pydev_is_thread_alive import is_thread_alive +from _pydev_imps._pydev_saved_modules import threading +from _pydevd_bundle.pydevd_constants import get_thread_id, IS_IRONPYTHON +from _pydevd_bundle.pydevd_dont_trace_files import DONT_TRACE +from _pydevd_bundle.pydevd_kill_all_pydevd_threads import kill_all_pydev_threads +from pydevd_file_utils import get_abs_path_real_path_and_base_from_frame, NORM_PATHS_AND_BASE_CONTAINER +from pydevd_tracing import SetTrace +# IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) +# In Cython, PyDBAdditionalThreadInfo is bundled in the file. +from cpython.object cimport PyObject +from cpython.ref cimport Py_INCREF, Py_XDECREF +# ELSE +# from _pydevd_bundle.pydevd_additional_thread_info import PyDBAdditionalThreadInfo +# from _pydevd_bundle.pydevd_frame import PyDBFrame +# ENDIF + +try: + from _pydevd_bundle.pydevd_signature import send_signature_call_trace +except ImportError: + def send_signature_call_trace(*args, **kwargs): + pass + +threadingCurrentThread = threading.currentThread +get_file_type = DONT_TRACE.get + +# IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) +# cdef dict global_cache_skips +# cdef dict global_cache_frame_skips +# ELSE +# ENDIF + + +# Cache where we should keep that we completely skipped entering some context. +# It needs to be invalidated when: +# - Breakpoints are changed +# It can be used when running regularly (without step over/step in/step return) +global_cache_skips = {} +global_cache_frame_skips = {} + +def trace_dispatch(py_db, frame, event, arg): + t = threadingCurrentThread() + + if getattr(t, 'pydev_do_not_trace', None): + return None + + try: + additional_info = t.additional_info + if additional_info is None: + raise AttributeError() + except: + additional_info = t.additional_info = PyDBAdditionalThreadInfo() + + thread_tracer = ThreadTracer((py_db, t, additional_info, global_cache_skips, global_cache_frame_skips)) +# IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + t._tracer = thread_tracer # Hack for cython to keep it alive while the thread is alive (just the method in the SetTrace is not enough). +# ELSE +# ENDIF + SetTrace(thread_tracer.__call__) + return thread_tracer.__call__(frame, event, arg) + +# IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) +cdef class SafeCallWrapper: + cdef method_object + def __init__(self, method_object): + self.method_object = method_object + def __call__(self, *args): + #Cannot use 'self' once inside the delegate call since we are borrowing the self reference f_trace field + #in the frame, and that reference might get destroyed by set trace on frame and parents + cdef PyObject* method_obj = self.method_object + Py_INCREF(method_obj) + ret = (method_obj)(*args) + Py_XDECREF (method_obj) + return SafeCallWrapper(ret) if ret is not None else None +cdef class ThreadTracer: + cdef public tuple _args; + def __init__(self, tuple args): + self._args = args +# ELSE +# class ThreadTracer: +# def __init__(self, args): +# self._args = args +# ENDIF + + + def __call__(self, frame, event, arg): + ''' This is the callback used when we enter some context in the debugger. + + We also decorate the thread we are in with info about the debugging. + The attributes added are: + pydev_state + pydev_step_stop + pydev_step_cmd + pydev_notify_kill + + :param PyDB py_db: + This is the global debugger (this method should actually be added as a method to it). + ''' + # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + cdef str filename; + cdef str base; + cdef int pydev_step_cmd; + cdef tuple cache_key; + cdef dict cache_skips; + cdef bint is_stepping; + cdef tuple abs_path_real_path_and_base; + cdef PyDBAdditionalThreadInfo additional_info; + # ENDIF + # print('ENTER: trace_dispatch', frame.f_code.co_filename, frame.f_lineno, event, frame.f_code.co_name) + py_db, t, additional_info, cache_skips, frame_skips_cache = self._args + pydev_step_cmd = additional_info.pydev_step_cmd + is_stepping = pydev_step_cmd != -1 + + try: + if py_db._finish_debugging_session: + if not py_db._termination_event_set: + #that was not working very well because jython gave some socket errors + try: + if py_db.output_checker is None: + kill_all_pydev_threads() + except: + traceback.print_exc() + py_db._termination_event_set = True + return None + + # if thread is not alive, cancel trace_dispatch processing + if not is_thread_alive(t): + py_db._process_thread_not_alive(get_thread_id(t)) + return None # suspend tracing + + try: + # Make fast path faster! + abs_path_real_path_and_base = NORM_PATHS_AND_BASE_CONTAINER[frame.f_code.co_filename] + except: + abs_path_real_path_and_base = get_abs_path_real_path_and_base_from_frame(frame) + + if py_db.thread_analyser is not None: + py_db.thread_analyser.log_event(frame) + + if py_db.asyncio_analyser is not None: + py_db.asyncio_analyser.log_event(frame) + + filename = abs_path_real_path_and_base[1] + # Note: it's important that the context name is also given because we may hit something once + # in the global context and another in the local context. + cache_key = (frame.f_lineno, frame.f_code.co_name, filename) + if not is_stepping and cache_key in cache_skips: + # print('skipped: trace_dispatch (cache hit)', cache_key, frame.f_lineno, event, frame.f_code.co_name) + return None + + file_type = get_file_type(abs_path_real_path_and_base[-1]) #we don't want to debug threading or anything related to pydevd + + if file_type is not None: + if file_type == 1: # inlining LIB_FILE = 1 + if py_db.not_in_scope(filename): + # print('skipped: trace_dispatch (not in scope)', abs_path_real_path_and_base[-1], frame.f_lineno, event, frame.f_code.co_name, file_type) + cache_skips[cache_key] = 1 + return None + else: + # print('skipped: trace_dispatch', abs_path_real_path_and_base[-1], frame.f_lineno, event, frame.f_code.co_name, file_type) + cache_skips[cache_key] = 1 + return None + + if is_stepping: + if py_db.is_filter_enabled and py_db.is_ignored_by_filters(filename): + # ignore files matching stepping filters + return None + if py_db.is_filter_libraries and py_db.not_in_scope(filename): + # ignore library files while stepping + return None + + # print('trace_dispatch', base, frame.f_lineno, event, frame.f_code.co_name, file_type) + if additional_info.is_tracing: + return None #we don't wan't to trace code invoked from pydevd_frame.trace_dispatch + + if event == 'call' and py_db.signature_factory: + # We can only have a call when entering a context, so, check at this level, not at the PyDBFrame. + send_signature_call_trace(py_db, frame, filename) + + # Just create PyDBFrame directly (removed support for Python versions < 2.5, which required keeping a weak + # reference to the frame). + ret = PyDBFrame((py_db, filename, additional_info, t, frame_skips_cache, (frame.f_code.co_name, frame.f_code.co_firstlineno, filename))).trace_dispatch(frame, event, arg) + if ret is None: + cache_skips[cache_key] = 1 + return None + + # IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) + return SafeCallWrapper(ret) + # ELSE +# return ret + # ENDIF + + except SystemExit: + return None + + except Exception: + if py_db._finish_debugging_session: + return None # Don't log errors when we're shutting down. + # Log it + try: + if traceback is not None: + # This can actually happen during the interpreter shutdown in Python 2.7 + traceback.print_exc() + except: + # Error logging? We're really in the interpreter shutdown... + # (https://github.com/fabioz/PyDev.Debugger/issues/8) + pass + return None + + +if IS_IRONPYTHON: + # This is far from ideal, as we'll leak frames (we'll always have the last created frame, not really + # the last topmost frame saved -- this should be Ok for our usage, but it may leak frames and things + # may live longer... as IronPython is garbage-collected, things should live longer anyways, so, it + # shouldn't be an issue as big as it's in CPython -- it may still be annoying, but this should + # be a reasonable workaround until IronPython itself is able to provide that functionality). + # + # See: https://github.com/IronLanguages/main/issues/1630 + from _pydevd_bundle.pydevd_additional_thread_info_regular import _tid_to_last_frame + + _original_call = ThreadTracer.__call__ + + def __call__(self, frame, event, arg): + _tid_to_last_frame[self._args[1].ident] = frame + return _original_call(self, frame, event, arg) + + ThreadTracer.__call__ = __call__ + diff --git a/ptvsd/pydevd/_pydevd_bundle/pydevd_cython_wrapper.py b/ptvsd/pydevd/_pydevd_bundle/pydevd_cython_wrapper.py new file mode 100644 index 00000000..17373b65 --- /dev/null +++ b/ptvsd/pydevd/_pydevd_bundle/pydevd_cython_wrapper.py @@ -0,0 +1,35 @@ +try: + from _pydevd_bundle.pydevd_cython import trace_dispatch, PyDBAdditionalThreadInfo, global_cache_skips, global_cache_frame_skips + import _pydevd_bundle.pydevd_cython + # this version number can be unavailable in old versions of compiled extensions + version = getattr(_pydevd_bundle.pydevd_cython, 'version', 0) +except ImportError: + try: + import struct + import sys + try: + is_python_64bit = (struct.calcsize('P') == 8) + except: + # In Jython this call fails, but this is Ok, we don't support Jython for speedups anyways. + raise ImportError + plat = '32' + if is_python_64bit: + plat = '64' + + # We also accept things as: + # + # _pydevd_bundle.pydevd_cython_win32_27_32 + # _pydevd_bundle.pydevd_cython_win32_34_64 + # + # to have multiple pre-compiled pyds distributed along the IDE + # (generated by build_tools/build_binaries_windows.py). + + mod_name = 'pydevd_cython_%s_%s%s_%s' % (sys.platform, sys.version_info[0], sys.version_info[1], plat) + check_name = '_pydevd_bundle.%s' % (mod_name,) + mod = __import__(check_name) + mod = getattr(mod, mod_name) + trace_dispatch, PyDBAdditionalThreadInfo, global_cache_skips, global_cache_frame_skips = \ + mod.trace_dispatch, mod.PyDBAdditionalThreadInfo, mod.global_cache_skips, mod.global_cache_frame_skips + version = getattr(mod, 'version', 0) + except ImportError: + raise \ No newline at end of file diff --git a/ptvsd/pydevd/_pydevd_bundle/pydevd_dont_trace.py b/ptvsd/pydevd/_pydevd_bundle/pydevd_dont_trace.py new file mode 100644 index 00000000..be73810f --- /dev/null +++ b/ptvsd/pydevd/_pydevd_bundle/pydevd_dont_trace.py @@ -0,0 +1,123 @@ +''' +Support for a tag that allows skipping over functions while debugging. +''' +import linecache +import re + +# To suppress tracing a method, add the tag @DontTrace +# to a comment either preceding or on the same line as +# the method definition +# +# E.g.: +# #@DontTrace +# def test1(): +# pass +# +# ... or ... +# +# def test2(): #@DontTrace +# pass +DONT_TRACE_TAG = '@DontTrace' + +# Regular expression to match a decorator (at the beginning +# of a line). +RE_DECORATOR = re.compile(r'^\s*@') + +# Mapping from code object to bool. +# If the key exists, the value is the cached result of should_trace_hook +_filename_to_ignored_lines = {} + +def default_should_trace_hook(frame, filename): + ''' + Return True if this frame should be traced, False if tracing should be blocked. + ''' + # First, check whether this code object has a cached value + ignored_lines = _filename_to_ignored_lines.get(filename) + if ignored_lines is None: + # Now, look up that line of code and check for a @DontTrace + # preceding or on the same line as the method. + # E.g.: + # #@DontTrace + # def test(): + # pass + # ... or ... + # def test(): #@DontTrace + # pass + ignored_lines = {} + lines = linecache.getlines(filename) + for i_line, line in enumerate(lines): + j = line.find('#') + if j >= 0: + comment = line[j:] + if DONT_TRACE_TAG in comment: + ignored_lines[i_line] = 1 + + #Note: when it's found in the comment, mark it up and down for the decorator lines found. + k = i_line - 1 + while k >= 0: + if RE_DECORATOR.match(lines[k]): + ignored_lines[k] = 1 + k -= 1 + else: + break + + k = i_line + 1 + while k <= len(lines): + if RE_DECORATOR.match(lines[k]): + ignored_lines[k] = 1 + k += 1 + else: + break + + + _filename_to_ignored_lines[filename] = ignored_lines + + func_line = frame.f_code.co_firstlineno - 1 # co_firstlineno is 1-based, so -1 is needed + return not ( + func_line - 1 in ignored_lines or #-1 to get line before method + func_line in ignored_lines) #method line + + +should_trace_hook = None + + +def clear_trace_filter_cache(): + ''' + Clear the trace filter cache. + Call this after reloading. + ''' + global should_trace_hook + try: + # Need to temporarily disable a hook because otherwise + # _filename_to_ignored_lines.clear() will never complete. + old_hook = should_trace_hook + should_trace_hook = None + + # Clear the linecache + linecache.clearcache() + _filename_to_ignored_lines.clear() + + finally: + should_trace_hook = old_hook + + +def trace_filter(mode): + ''' + Set the trace filter mode. + + mode: Whether to enable the trace hook. + True: Trace filtering on (skipping methods tagged @DontTrace) + False: Trace filtering off (trace methods tagged @DontTrace) + None/default: Toggle trace filtering. + ''' + global should_trace_hook + if mode is None: + mode = should_trace_hook is None + + if mode: + should_trace_hook = default_should_trace_hook + else: + should_trace_hook = None + + return mode + diff --git a/ptvsd/pydevd/_pydevd_bundle/pydevd_dont_trace_files.py b/ptvsd/pydevd/_pydevd_bundle/pydevd_dont_trace_files.py new file mode 100644 index 00000000..37039045 --- /dev/null +++ b/ptvsd/pydevd/_pydevd_bundle/pydevd_dont_trace_files.py @@ -0,0 +1,119 @@ +# Important: Autogenerated file. + +# DO NOT edit manually! +# DO NOT edit manually! + +from _pydevd_bundle.pydevd_constants import IS_PY3K + +LIB_FILE = 1 +PYDEV_FILE = 2 + +DONT_TRACE = { + # commonly used things from the stdlib that we don't want to trace + 'Queue.py':LIB_FILE, + 'queue.py':LIB_FILE, + 'socket.py':LIB_FILE, + 'weakref.py':LIB_FILE, + '_weakrefset.py':LIB_FILE, + 'linecache.py':LIB_FILE, + 'threading.py':LIB_FILE, + 'dis.py':LIB_FILE, + + #things from pydev that we don't want to trace + '_pydev_execfile.py':PYDEV_FILE, + '_pydev_BaseHTTPServer.py': PYDEV_FILE, + '_pydev_SimpleXMLRPCServer.py': PYDEV_FILE, + '_pydev_SocketServer.py': PYDEV_FILE, + '_pydev_calltip_util.py': PYDEV_FILE, + '_pydev_completer.py': PYDEV_FILE, + '_pydev_execfile.py': PYDEV_FILE, + '_pydev_filesystem_encoding.py': PYDEV_FILE, + '_pydev_getopt.py': PYDEV_FILE, + '_pydev_imports_tipper.py': PYDEV_FILE, + '_pydev_inspect.py': PYDEV_FILE, + '_pydev_jy_imports_tipper.py': PYDEV_FILE, + '_pydev_log.py': PYDEV_FILE, + '_pydev_pkgutil_old.py': PYDEV_FILE, + '_pydev_saved_modules.py': PYDEV_FILE, + '_pydev_sys_patch.py': PYDEV_FILE, + '_pydev_tipper_common.py': PYDEV_FILE, + '_pydev_uuid_old.py': PYDEV_FILE, + '_pydev_xmlrpclib.py': PYDEV_FILE, + 'django_debug.py': PYDEV_FILE, + 'fix_getpass.py': PYDEV_FILE, + 'jinja2_debug.py': PYDEV_FILE, + 'pycompletionserver.py': PYDEV_FILE, + 'pydev_app_engine_debug_startup.py': PYDEV_FILE, + 'pydev_console_utils.py': PYDEV_FILE, + 'pydev_import_hook.py': PYDEV_FILE, + 'pydev_imports.py': PYDEV_FILE, + 'pydev_ipython_console.py': PYDEV_FILE, + 'pydev_ipython_console_011.py': PYDEV_FILE, + 'pydev_is_thread_alive.py': PYDEV_FILE, + 'pydev_localhost.py': PYDEV_FILE, + 'pydev_log.py': PYDEV_FILE, + 'pydev_monkey.py': PYDEV_FILE, + 'pydev_monkey_qt.py': PYDEV_FILE, + 'pydev_override.py': PYDEV_FILE, + 'pydev_run_in_console.py': PYDEV_FILE, + 'pydev_umd.py': PYDEV_FILE, + 'pydev_versioncheck.py': PYDEV_FILE, + 'pydevconsole.py': PYDEV_FILE, + 'pydevconsole_code_for_ironpython.py': PYDEV_FILE, + 'pydevd.py': PYDEV_FILE, + 'pydevd_additional_thread_info.py': PYDEV_FILE, + 'pydevd_additional_thread_info_regular.py': PYDEV_FILE, + 'pydevd_breakpoints.py': PYDEV_FILE, + 'pydevd_comm.py': PYDEV_FILE, + 'pydevd_command_line_handling.py': PYDEV_FILE, + 'pydevd_concurrency_logger.py': PYDEV_FILE, + 'pydevd_console.py': PYDEV_FILE, + 'pydevd_constants.py': PYDEV_FILE, + 'pydevd_custom_frames.py': PYDEV_FILE, + 'pydevd_cython_wrapper.py': PYDEV_FILE, + 'pydevd_dont_trace.py': PYDEV_FILE, + 'pydevd_dont_trace_files.py': PYDEV_FILE, + 'pydevd_exec.py': PYDEV_FILE, + 'pydevd_exec2.py': PYDEV_FILE, + 'pydevd_extension_api.py': PYDEV_FILE, + 'pydevd_extension_utils.py': PYDEV_FILE, + 'pydevd_file_utils.py': PYDEV_FILE, + 'pydevd_frame.py': PYDEV_FILE, + 'pydevd_frame_eval_cython_wrapper.py': PYDEV_FILE, + 'pydevd_frame_eval_main.py': PYDEV_FILE, + 'pydevd_frame_tracing.py': PYDEV_FILE, + 'pydevd_frame_utils.py': PYDEV_FILE, + 'pydevd_helpers.py': PYDEV_FILE, + 'pydevd_import_class.py': PYDEV_FILE, + 'pydevd_io.py': PYDEV_FILE, + 'pydevd_kill_all_pydevd_threads.py': PYDEV_FILE, + 'pydevd_modify_bytecode.py': PYDEV_FILE, + 'pydevd_plugin_numpy_types.py': PYDEV_FILE, + 'pydevd_plugin_utils.py': PYDEV_FILE, + 'pydevd_plugins_django_form_str.py': PYDEV_FILE, + 'pydevd_process_net_command.py': PYDEV_FILE, + 'pydevd_referrers.py': PYDEV_FILE, + 'pydevd_reload.py': PYDEV_FILE, + 'pydevd_resolver.py': PYDEV_FILE, + 'pydevd_save_locals.py': PYDEV_FILE, + 'pydevd_signature.py': PYDEV_FILE, + 'pydevd_stackless.py': PYDEV_FILE, + 'pydevd_thread_wrappers.py': PYDEV_FILE, + 'pydevd_trace_api.py': PYDEV_FILE, + 'pydevd_trace_dispatch.py': PYDEV_FILE, + 'pydevd_trace_dispatch_regular.py': PYDEV_FILE, + 'pydevd_traceproperty.py': PYDEV_FILE, + 'pydevd_tracing.py': PYDEV_FILE, + 'pydevd_utils.py': PYDEV_FILE, + 'pydevd_vars.py': PYDEV_FILE, + 'pydevd_vm_type.py': PYDEV_FILE, + 'pydevd_xml.py': PYDEV_FILE, +} + +if IS_PY3K: + # if we try to trace io.py it seems it can get halted (see http://bugs.python.org/issue4716) + DONT_TRACE['io.py'] = LIB_FILE + + # Don't trace common encodings too + DONT_TRACE['cp1252.py'] = LIB_FILE + DONT_TRACE['utf_8.py'] = LIB_FILE diff --git a/ptvsd/pydevd/_pydevd_bundle/pydevd_exec.py b/ptvsd/pydevd/_pydevd_bundle/pydevd_exec.py new file mode 100644 index 00000000..9a342ee1 --- /dev/null +++ b/ptvsd/pydevd/_pydevd_bundle/pydevd_exec.py @@ -0,0 +1,5 @@ +def Exec(exp, global_vars, local_vars=None): + if local_vars is not None: + exec exp in global_vars, local_vars + else: + exec exp in global_vars \ No newline at end of file diff --git a/ptvsd/pydevd/_pydevd_bundle/pydevd_exec2.py b/ptvsd/pydevd/_pydevd_bundle/pydevd_exec2.py new file mode 100644 index 00000000..ee4f37a6 --- /dev/null +++ b/ptvsd/pydevd/_pydevd_bundle/pydevd_exec2.py @@ -0,0 +1,5 @@ +def Exec(exp, global_vars, local_vars=None): + if local_vars is not None: + exec(exp, global_vars, local_vars) + else: + exec(exp, global_vars) \ No newline at end of file diff --git a/ptvsd/pydevd/_pydevd_bundle/pydevd_extension_api.py b/ptvsd/pydevd/_pydevd_bundle/pydevd_extension_api.py new file mode 100644 index 00000000..aac7c799 --- /dev/null +++ b/ptvsd/pydevd/_pydevd_bundle/pydevd_extension_api.py @@ -0,0 +1,87 @@ +import abc + + +# borrowed from from six +def _with_metaclass(meta, *bases): + """Create a base class with a metaclass.""" + + class metaclass(meta): + def __new__(cls, name, this_bases, d): + return meta(name, bases, d) + + return type.__new__(metaclass, 'temporary_class', (), {}) + + +# ======================================================================================================================= +# AbstractResolver +# ======================================================================================================================= +class _AbstractResolver(_with_metaclass(abc.ABCMeta)): + """ + This class exists only for documentation purposes to explain how to create a resolver. + + Some examples on how to resolve things: + - list: get_dictionary could return a dict with index->item and use the index to resolve it later + - set: get_dictionary could return a dict with id(object)->object and reiterate in that array to resolve it later + - arbitrary instance: get_dictionary could return dict with attr_name->attr and use getattr to resolve it later + """ + + @abc.abstractmethod + def resolve(self, var, attribute): + """ + In this method, we'll resolve some child item given the string representation of the item in the key + representing the previously asked dictionary. + + @param var: this is the actual variable to be resolved. + @param attribute: this is the string representation of a key previously returned in get_dictionary. + """ + raise NotImplementedError + + @abc.abstractmethod + def get_dictionary(self, var): + """ + @param var: this is the variable that should have its children gotten. + + @return: a dictionary where each pair key, value should be shown to the user as children items + in the variables view for the given var. + """ + raise NotImplementedError + + +class _AbstractProvider(_with_metaclass(abc.ABCMeta)): + @abc.abstractmethod + def can_provide(self, type_object, type_name): + raise NotImplementedError + + +# ======================================================================================================================= +# API CLASSES: +# ======================================================================================================================= + +class TypeResolveProvider(_AbstractResolver, _AbstractProvider): + """ + Implement this in an extension to provide a custom resolver, see _AbstractResolver + """ + + +class StrPresentationProvider(_AbstractProvider): + """ + Implement this in an extension to provide a str presentation for a type + """ + + @abc.abstractmethod + def get_str(self, val): + raise NotImplementedError + + +class DebuggerEventHandler(_with_metaclass(abc.ABCMeta)): + """ + Implement this to receive lifecycle events from the debugger + """ + + def on_debugger_modules_loaded(self, **kwargs): + """ + This method invoked after all debugger modules are loaded. Useful for importing and/or patching debugger + modules at a safe time + :param kwargs: This is intended to be flexible dict passed from the debugger. + Currently passes the debugger version + """ diff --git a/ptvsd/pydevd/_pydevd_bundle/pydevd_extension_utils.py b/ptvsd/pydevd/_pydevd_bundle/pydevd_extension_utils.py new file mode 100644 index 00000000..cc0de9d7 --- /dev/null +++ b/ptvsd/pydevd/_pydevd_bundle/pydevd_extension_utils.py @@ -0,0 +1,61 @@ +import pkgutil +import sys +from _pydev_bundle import pydev_log +import pydevd_plugins.extensions + +class ExtensionManager(object): + + def __init__(self): + self.loaded_extensions = None + self.type_to_instance = {} + + def _load_modules(self): + self.loaded_extensions = [] + for module_loader, name, ispkg in pkgutil.walk_packages(pydevd_plugins.extensions.__path__, + pydevd_plugins.extensions.__name__ + '.'): + mod_name = name.split('.')[-1] + if not ispkg and mod_name.startswith('pydevd_plugin'): + try: + __import__(name) + module = sys.modules[name] + self.loaded_extensions.append(module) + except ImportError: + pydev_log.error('Unable to load extension ' + name) + + def _ensure_loaded(self): + if self.loaded_extensions is None: + self._load_modules() + + def _iter_attr(self): + for extension in self.loaded_extensions: + dunder_all = getattr(extension, '__all__', None) + for attr_name in dir(extension): + if not attr_name.startswith('_'): + if dunder_all is None or attr_name in dunder_all: + yield attr_name, getattr(extension, attr_name) + + def get_extension_classes(self, extension_type): + self._ensure_loaded() + if extension_type in self.type_to_instance: + return self.type_to_instance[extension_type] + handlers = self.type_to_instance.setdefault(extension_type, []) + for attr_name, attr in self._iter_attr(): + if isinstance(attr, type) and issubclass(attr, extension_type) and attr is not extension_type: + try: + handlers.append(attr()) + except: + pydev_log.error('Unable to load extension class' + attr_name, tb=True) + return handlers + + +EXTENSION_MANAGER_INSTANCE = ExtensionManager() + +def extensions_of_type(extension_type): + """ + + :param T extension_type: The type of the extension hook + :rtype: list[T] + """ + return EXTENSION_MANAGER_INSTANCE.get_extension_classes(extension_type) + + diff --git a/ptvsd/pydevd/_pydevd_bundle/pydevd_frame.py b/ptvsd/pydevd/_pydevd_bundle/pydevd_frame.py new file mode 100644 index 00000000..5458c330 --- /dev/null +++ b/ptvsd/pydevd/_pydevd_bundle/pydevd_frame.py @@ -0,0 +1,781 @@ +import linecache +import os.path +import re +import sys +import traceback # @Reimport + +from _pydev_bundle import pydev_log +from _pydevd_bundle import pydevd_dont_trace +from _pydevd_bundle import pydevd_vars +from _pydevd_bundle.pydevd_breakpoints import get_exception_breakpoint +from _pydevd_bundle.pydevd_comm import CMD_STEP_CAUGHT_EXCEPTION, CMD_STEP_RETURN, CMD_STEP_OVER, CMD_SET_BREAK, \ + CMD_STEP_INTO, CMD_SMART_STEP_INTO, CMD_RUN_TO_LINE, CMD_SET_NEXT_STATEMENT, CMD_STEP_INTO_MY_CODE +from _pydevd_bundle.pydevd_constants import STATE_SUSPEND, get_thread_id, STATE_RUN, dict_iter_values, IS_PY3K, \ + RETURN_VALUES_DICT +from _pydevd_bundle.pydevd_dont_trace_files import DONT_TRACE, PYDEV_FILE +from _pydevd_bundle.pydevd_frame_utils import add_exception_to_frame, just_raised +from _pydevd_bundle.pydevd_utils import get_clsname_for_code +from pydevd_file_utils import get_abs_path_real_path_and_base_from_frame +try: + from inspect import CO_GENERATOR +except: + CO_GENERATOR = 0 + +try: + from _pydevd_bundle.pydevd_signature import send_signature_call_trace, send_signature_return_trace +except ImportError: + def send_signature_call_trace(*args, **kwargs): + pass + +basename = os.path.basename + +IGNORE_EXCEPTION_TAG = re.compile('[^#]*#.*@IgnoreException') +DEBUG_START = ('pydevd.py', 'run') +DEBUG_START_PY3K = ('_pydev_execfile.py', 'execfile') +TRACE_PROPERTY = 'pydevd_traceproperty.py' +get_file_type = DONT_TRACE.get + + +def handle_breakpoint_condition(py_db, info, breakpoint, new_frame, default_return_value): + condition = breakpoint.condition + try: + val = eval(condition, new_frame.f_globals, new_frame.f_locals) + if not val: + return default_return_value + + except: + if type(condition) != type(''): + if hasattr(condition, 'encode'): + condition = condition.encode('utf-8') + + msg = 'Error while evaluating expression: %s\n' % (condition,) + sys.stderr.write(msg) + traceback.print_exc() + if not py_db.suspend_on_breakpoint_exception: + return default_return_value + else: + stop = True + try: + # add exception_type and stacktrace into thread additional info + etype, value, tb = sys.exc_info() + try: + error = ''.join(traceback.format_exception_only(etype, value)) + stack = traceback.extract_stack(f=tb.tb_frame.f_back) + + # On self.set_suspend(thread, CMD_SET_BREAK) this info will be + # sent to the client. + info.conditional_breakpoint_exception = \ + ('Condition:\n' + condition + '\n\nError:\n' + error, stack) + finally: + etype, value, tb = None, None, None + except: + traceback.print_exc() + + +def handle_breakpoint_expression(breakpoint, info, new_frame): + try: + try: + val = eval(breakpoint.expression, new_frame.f_globals, new_frame.f_locals) + except: + val = sys.exc_info()[1] + finally: + if val is not None: + info.pydev_message = str(val) + + +#======================================================================================================================= +# PyDBFrame +#======================================================================================================================= +# IFDEF CYTHON +# cdef class PyDBFrame: +# ELSE +class PyDBFrame: + '''This makes the tracing for a given frame, so, the trace_dispatch + is used initially when we enter into a new context ('call') and then + is reused for the entire context. + ''' +# ENDIF + + + #Note: class (and not instance) attributes. + + #Same thing in the main debugger but only considering the file contents, while the one in the main debugger + #considers the user input (so, the actual result must be a join of both). + filename_to_lines_where_exceptions_are_ignored = {} + filename_to_stat_info = {} + + # IFDEF CYTHON + # cdef tuple _args + # cdef int should_skip + # def __init__(self, tuple args): + # self._args = args # In the cython version we don't need to pass the frame + # self.should_skip = -1 # On cythonized version, put in instance. + # ELSE + should_skip = -1 # Default value in class (put in instance on set). + + def __init__(self, args): + #args = main_debugger, filename, base, info, t, frame + #yeap, much faster than putting in self and then getting it from self later on + self._args = args + # ENDIF + + def set_suspend(self, *args, **kwargs): + self._args[0].set_suspend(*args, **kwargs) + + def do_wait_suspend(self, *args, **kwargs): + self._args[0].do_wait_suspend(*args, **kwargs) + + # IFDEF CYTHON + # def trace_exception(self, frame, str event, arg): + # cdef bint flag; + # ELSE + def trace_exception(self, frame, event, arg): + # ENDIF + if event == 'exception': + flag, frame = self.should_stop_on_exception(frame, event, arg) + + if flag: + self.handle_exception(frame, event, arg) + return self.trace_dispatch + + return self.trace_exception + + def trace_return(self, frame, event, arg): + if event == 'return': + main_debugger, filename = self._args[0], self._args[1] + send_signature_return_trace(main_debugger, frame, filename, arg) + return self.trace_return + + # IFDEF CYTHON + # def should_stop_on_exception(self, frame, str event, arg): + # cdef PyDBAdditionalThreadInfo info; + # cdef bint flag; + # ELSE + def should_stop_on_exception(self, frame, event, arg): + # ENDIF + + # main_debugger, _filename, info, _thread = self._args + main_debugger = self._args[0] + info = self._args[2] + flag = False + + # STATE_SUSPEND = 2 + if info.pydev_state != 2: #and breakpoint is not None: + exception, value, trace = arg + + if trace is not None: #on jython trace is None on the first event + exception_breakpoint = get_exception_breakpoint( + exception, main_debugger.break_on_caught_exceptions) + + if exception_breakpoint is not None: + if exception_breakpoint.ignore_libraries: + if exception_breakpoint.notify_on_first_raise_only: + if main_debugger.first_appearance_in_scope(trace): + add_exception_to_frame(frame, (exception, value, trace)) + try: + info.pydev_message = exception_breakpoint.qname + except: + info.pydev_message = exception_breakpoint.qname.encode('utf-8') + flag = True + else: + pydev_log.debug("Ignore exception %s in library %s" % (exception, frame.f_code.co_filename)) + flag = False + else: + if not exception_breakpoint.notify_on_first_raise_only or just_raised(trace): + add_exception_to_frame(frame, (exception, value, trace)) + try: + info.pydev_message = exception_breakpoint.qname + except: + info.pydev_message = exception_breakpoint.qname.encode('utf-8') + flag = True + else: + flag = False + else: + try: + if main_debugger.plugin is not None: + result = main_debugger.plugin.exception_break(main_debugger, self, frame, self._args, arg) + if result: + flag, frame = result + except: + flag = False + + return flag, frame + + def handle_exception(self, frame, event, arg): + try: + # print 'handle_exception', frame.f_lineno, frame.f_code.co_name + + # We have 3 things in arg: exception type, description, traceback object + trace_obj = arg[2] + main_debugger = self._args[0] + + if not hasattr(trace_obj, 'tb_next'): + return #Not always there on Jython... + + initial_trace_obj = trace_obj + if trace_obj.tb_next is None and trace_obj.tb_frame is frame: + #I.e.: tb_next should be only None in the context it was thrown (trace_obj.tb_frame is frame is just a double check). + + if main_debugger.break_on_exceptions_thrown_in_same_context: + #Option: Don't break if an exception is caught in the same function from which it is thrown + return + else: + #Get the trace_obj from where the exception was raised... + while trace_obj.tb_next is not None: + trace_obj = trace_obj.tb_next + + + if main_debugger.ignore_exceptions_thrown_in_lines_with_ignore_exception: + for check_trace_obj in (initial_trace_obj, trace_obj): + filename = get_abs_path_real_path_and_base_from_frame(check_trace_obj.tb_frame)[1] + + + filename_to_lines_where_exceptions_are_ignored = self.filename_to_lines_where_exceptions_are_ignored + + + lines_ignored = filename_to_lines_where_exceptions_are_ignored.get(filename) + if lines_ignored is None: + lines_ignored = filename_to_lines_where_exceptions_are_ignored[filename] = {} + + try: + curr_stat = os.stat(filename) + curr_stat = (curr_stat.st_size, curr_stat.st_mtime) + except: + curr_stat = None + + last_stat = self.filename_to_stat_info.get(filename) + if last_stat != curr_stat: + self.filename_to_stat_info[filename] = curr_stat + lines_ignored.clear() + try: + linecache.checkcache(filename) + except: + #Jython 2.1 + linecache.checkcache() + + from_user_input = main_debugger.filename_to_lines_where_exceptions_are_ignored.get(filename) + if from_user_input: + merged = {} + merged.update(lines_ignored) + #Override what we have with the related entries that the user entered + merged.update(from_user_input) + else: + merged = lines_ignored + + exc_lineno = check_trace_obj.tb_lineno + + # print ('lines ignored', lines_ignored) + # print ('user input', from_user_input) + # print ('merged', merged, 'curr', exc_lineno) + + if exc_lineno not in merged: #Note: check on merged but update lines_ignored. + try: + line = linecache.getline(filename, exc_lineno, check_trace_obj.tb_frame.f_globals) + except: + #Jython 2.1 + line = linecache.getline(filename, exc_lineno) + + if IGNORE_EXCEPTION_TAG.match(line) is not None: + lines_ignored[exc_lineno] = 1 + return + else: + #Put in the cache saying not to ignore + lines_ignored[exc_lineno] = 0 + else: + #Ok, dict has it already cached, so, let's check it... + if merged.get(exc_lineno, 0): + return + + + thread = self._args[3] + + try: + frame_id_to_frame = {} + frame_id_to_frame[id(frame)] = frame + f = trace_obj.tb_frame + while f is not None: + frame_id_to_frame[id(f)] = f + f = f.f_back + f = None + + thread_id = get_thread_id(thread) + pydevd_vars.add_additional_frame_by_id(thread_id, frame_id_to_frame) + try: + main_debugger.send_caught_exception_stack(thread, arg, id(frame)) + self.set_suspend(thread, CMD_STEP_CAUGHT_EXCEPTION) + self.do_wait_suspend(thread, frame, event, arg) + main_debugger.send_caught_exception_stack_proceeded(thread) + + finally: + pydevd_vars.remove_additional_frame_by_id(thread_id) + except: + traceback.print_exc() + + main_debugger.set_trace_for_frame_and_parents(frame) + finally: + #Clear some local variables... + trace_obj = None + initial_trace_obj = None + check_trace_obj = None + f = None + frame_id_to_frame = None + main_debugger = None + thread = None + + def get_func_name(self, frame): + code_obj = frame.f_code + func_name = code_obj.co_name + try: + cls_name = get_clsname_for_code(code_obj, frame) + if cls_name is not None: + return "%s.%s" % (cls_name, func_name) + else: + return func_name + except: + traceback.print_exc() + return func_name + + def show_return_values(self, frame, arg): + try: + try: + f_locals_back = getattr(frame.f_back, "f_locals", None) + if f_locals_back is not None: + return_values_dict = f_locals_back.get(RETURN_VALUES_DICT, None) + if return_values_dict is None: + return_values_dict = {} + f_locals_back[RETURN_VALUES_DICT] = return_values_dict + name = self.get_func_name(frame) + return_values_dict[name] = arg + except: + traceback.print_exc() + finally: + f_locals_back = None + + def remove_return_values(self, main_debugger, frame): + try: + try: + # Showing return values was turned off, we should remove them from locals dict. + # The values can be in the current frame or in the back one + frame.f_locals.pop(RETURN_VALUES_DICT, None) + + f_locals_back = getattr(frame.f_back, "f_locals", None) + if f_locals_back is not None: + f_locals_back.pop(RETURN_VALUES_DICT, None) + except: + traceback.print_exc() + finally: + f_locals_back = None + + # IFDEF CYTHON + # cpdef trace_dispatch(self, frame, str event, arg): + # cdef str filename; + # cdef bint is_exception_event; + # cdef bint has_exception_breakpoints; + # cdef bint can_skip; + # cdef PyDBAdditionalThreadInfo info; + # cdef int step_cmd; + # cdef int line; + # cdef bint is_line; + # cdef bint is_call; + # cdef bint is_return; + # cdef str curr_func_name; + # cdef bint exist_result; + # cdef dict frame_skips_cache; + # cdef tuple frame_cache_key; + # cdef tuple line_cache_key; + # cdef int breakpoints_in_line_cache; + # cdef int breakpoints_in_frame_cache; + # cdef bint has_breakpoint_in_frame; + # ELSE + def trace_dispatch(self, frame, event, arg): + # ENDIF + + main_debugger, filename, info, thread, frame_skips_cache, frame_cache_key = self._args + # print('frame trace_dispatch', frame.f_lineno, frame.f_code.co_name, event, info.pydev_step_cmd) + try: + info.is_tracing = True + line = frame.f_lineno + line_cache_key = (frame_cache_key, line) + + if main_debugger._finish_debugging_session: + return None + + plugin_manager = main_debugger.plugin + + is_exception_event = event == 'exception' + has_exception_breakpoints = main_debugger.break_on_caught_exceptions or main_debugger.has_plugin_exception_breaks + + if is_exception_event: + if has_exception_breakpoints: + flag, frame = self.should_stop_on_exception(frame, event, arg) + if flag: + self.handle_exception(frame, event, arg) + return self.trace_dispatch + is_line = False + is_return = False + is_call = False + else: + is_line = event == 'line' + is_return = event == 'return' + is_call = event == 'call' + if not is_line and not is_return and not is_call: + # I believe this can only happen in jython on some frontiers on jython and java code, which we don't want to trace. + return None + + need_trace_return = False + if is_call and main_debugger.signature_factory: + need_trace_return = send_signature_call_trace(main_debugger, frame, filename) + if is_return and main_debugger.signature_factory: + send_signature_return_trace(main_debugger, frame, filename, arg) + + stop_frame = info.pydev_step_stop + step_cmd = info.pydev_step_cmd + + if is_exception_event: + breakpoints_for_file = None + # CMD_STEP_OVER = 108 + if stop_frame and stop_frame is not frame and step_cmd == 108 and \ + arg[0] in (StopIteration, GeneratorExit) and arg[2] is None: + info.pydev_step_cmd = 107 # CMD_STEP_INTO = 107 + info.pydev_step_stop = None + else: + # If we are in single step mode and something causes us to exit the current frame, we need to make sure we break + # eventually. Force the step mode to step into and the step stop frame to None. + # I.e.: F6 in the end of a function should stop in the next possible position (instead of forcing the user + # to make a step in or step over at that location). + # Note: this is especially troublesome when we're skipping code with the + # @DontTrace comment. + if stop_frame is frame and is_return and step_cmd in (109, 108): # CMD_STEP_RETURN = 109, CMD_STEP_OVER = 108 + if not frame.f_code.co_flags & 0x20: # CO_GENERATOR = 0x20 (inspect.CO_GENERATOR) + info.pydev_step_cmd = 107 # CMD_STEP_INTO = 107 + info.pydev_step_stop = None + + breakpoints_for_file = main_debugger.breakpoints.get(filename) + + can_skip = False + + if info.pydev_state == 1: # STATE_RUN = 1 + #we can skip if: + #- we have no stop marked + #- we should make a step return/step over and we're not in the current frame + # CMD_STEP_RETURN = 109, CMD_STEP_OVER = 108 + can_skip = (step_cmd == -1 and stop_frame is None)\ + or (step_cmd in (109, 108) and stop_frame is not frame) + + if can_skip: + if plugin_manager is not None and main_debugger.has_plugin_line_breaks: + can_skip = not plugin_manager.can_not_skip(main_debugger, self, frame) + + # CMD_STEP_OVER = 108 + if can_skip and is_return and main_debugger.show_return_values and info.pydev_step_cmd == 108 and frame.f_back is info.pydev_step_stop: + # trace function for showing return values after step over + can_skip = False + + # Let's check to see if we are in a function that has a breakpoint. If we don't have a breakpoint, + # we will return nothing for the next trace + # also, after we hit a breakpoint and go to some other debugging state, we have to force the set trace anyway, + # so, that's why the additional checks are there. + if not breakpoints_for_file: + if can_skip: + if has_exception_breakpoints: + return self.trace_exception + else: + if need_trace_return: + return self.trace_return + else: + return None + + else: + # When cached, 0 means we don't have a breakpoint and 1 means we have. + if can_skip: + breakpoints_in_line_cache = frame_skips_cache.get(line_cache_key, -1) + if breakpoints_in_line_cache == 0: + return self.trace_dispatch + + breakpoints_in_frame_cache = frame_skips_cache.get(frame_cache_key, -1) + if breakpoints_in_frame_cache != -1: + # Gotten from cache. + has_breakpoint_in_frame = breakpoints_in_frame_cache == 1 + + else: + has_breakpoint_in_frame = False + # Checks the breakpoint to see if there is a context match in some function + curr_func_name = frame.f_code.co_name + + #global context is set with an empty name + if curr_func_name in ('?', ''): + curr_func_name = '' + + for breakpoint in dict_iter_values(breakpoints_for_file): #jython does not support itervalues() + #will match either global or some function + if breakpoint.func_name in ('None', curr_func_name): + has_breakpoint_in_frame = True + break + + # Cache the value (1 or 0 or -1 for default because of cython). + if has_breakpoint_in_frame: + frame_skips_cache[frame_cache_key] = 1 + else: + frame_skips_cache[frame_cache_key] = 0 + + + if can_skip and not has_breakpoint_in_frame: + if has_exception_breakpoints: + return self.trace_exception + else: + if need_trace_return: + return self.trace_return + else: + return None + + #We may have hit a breakpoint or we are already in step mode. Either way, let's check what we should do in this frame + # print('NOT skipped', frame.f_lineno, frame.f_code.co_name, event) + + try: + flag = False + #return is not taken into account for breakpoint hit because we'd have a double-hit in this case + #(one for the line and the other for the return). + + stop_info = {} + breakpoint = None + exist_result = False + stop = False + bp_type = None + if not is_return and info.pydev_state != STATE_SUSPEND and breakpoints_for_file is not None and line in breakpoints_for_file: + breakpoint = breakpoints_for_file[line] + new_frame = frame + stop = True + if step_cmd == CMD_STEP_OVER and stop_frame is frame and (is_line or is_return): + stop = False #we don't stop on breakpoint if we have to stop by step-over (it will be processed later) + elif plugin_manager is not None and main_debugger.has_plugin_line_breaks: + result = plugin_manager.get_breakpoint(main_debugger, self, frame, event, self._args) + if result: + exist_result = True + flag, breakpoint, new_frame, bp_type = result + + if breakpoint: + #ok, hit breakpoint, now, we have to discover if it is a conditional breakpoint + # lets do the conditional stuff here + if stop or exist_result: + condition = breakpoint.condition + if condition is not None: + result = handle_breakpoint_condition(main_debugger, info, breakpoint, new_frame, + self.trace_dispatch) + if result is not None: + return result + + if breakpoint.expression is not None: + handle_breakpoint_expression(breakpoint, info, new_frame) + + if not main_debugger.first_breakpoint_reached: + if is_call: + back = frame.f_back + if back is not None: + # When we start debug session, we call execfile in pydevd run function. It produces an additional + # 'call' event for tracing and we stop on the first line of code twice. + _, back_filename, base = get_abs_path_real_path_and_base_from_frame(back) + if (base == DEBUG_START[0] and back.f_code.co_name == DEBUG_START[1]) or \ + (base == DEBUG_START_PY3K[0] and back.f_code.co_name == DEBUG_START_PY3K[1]): + stop = False + main_debugger.first_breakpoint_reached = True + else: + # if the frame is traced after breakpoint stop, + # but the file should be ignored while stepping because of filters + if step_cmd != -1: + if main_debugger.is_filter_enabled and main_debugger.is_ignored_by_filters(filename): + # ignore files matching stepping filters + return self.trace_dispatch + if main_debugger.is_filter_libraries and main_debugger.not_in_scope(filename): + # ignore library files while stepping + return self.trace_dispatch + + if main_debugger.show_return_values: + if is_return and info.pydev_step_cmd == CMD_STEP_OVER and frame.f_back == info.pydev_step_stop: + self.show_return_values(frame, arg) + + elif main_debugger.remove_return_values_flag: + try: + self.remove_return_values(main_debugger, frame) + finally: + main_debugger.remove_return_values_flag = False + + if stop: + self.set_suspend(thread, CMD_SET_BREAK) + if breakpoint and breakpoint.suspend_policy == "ALL": + main_debugger.suspend_all_other_threads(thread) + elif flag and plugin_manager is not None: + result = plugin_manager.suspend(main_debugger, thread, frame, bp_type) + if result: + frame = result + + # if thread has a suspend flag, we suspend with a busy wait + if info.pydev_state == STATE_SUSPEND: + self.do_wait_suspend(thread, frame, event, arg) + return self.trace_dispatch + else: + if not breakpoint and not is_return: + # No stop from anyone and no breakpoint found in line (cache that). + frame_skips_cache[line_cache_key] = 0 + + except: + traceback.print_exc() + raise + + #step handling. We stop when we hit the right frame + try: + should_skip = 0 + if pydevd_dont_trace.should_trace_hook is not None: + if self.should_skip == -1: + # I.e.: cache the result on self.should_skip (no need to evaluate the same frame multiple times). + # Note that on a code reload, we won't re-evaluate this because in practice, the frame.f_code + # Which will be handled by this frame is read-only, so, we can cache it safely. + if not pydevd_dont_trace.should_trace_hook(frame, filename): + # -1, 0, 1 to be Cython-friendly + should_skip = self.should_skip = 1 + else: + should_skip = self.should_skip = 0 + else: + should_skip = self.should_skip + + plugin_stop = False + if should_skip: + stop = False + + elif step_cmd == CMD_STEP_INTO: + stop = is_line or is_return + if plugin_manager is not None: + result = plugin_manager.cmd_step_into(main_debugger, frame, event, self._args, stop_info, stop) + if result: + stop, plugin_stop = result + + elif step_cmd == CMD_STEP_INTO_MY_CODE: + if not main_debugger.not_in_scope(frame.f_code.co_filename): + stop = is_line + + elif step_cmd == CMD_STEP_OVER: + stop = stop_frame is frame and (is_line or is_return) + + if frame.f_code.co_flags & CO_GENERATOR: + if is_return: + stop = False + + if plugin_manager is not None: + result = plugin_manager.cmd_step_over(main_debugger, frame, event, self._args, stop_info, stop) + if result: + stop, plugin_stop = result + + elif step_cmd == CMD_SMART_STEP_INTO: + stop = False + if info.pydev_smart_step_stop is frame: + info.pydev_func_name = '.invalid.' # Must match the type in cython + info.pydev_smart_step_stop = None + + if is_line or is_exception_event: + curr_func_name = frame.f_code.co_name + + #global context is set with an empty name + if curr_func_name in ('?', '') or curr_func_name is None: + curr_func_name = '' + + if curr_func_name == info.pydev_func_name: + stop = True + + elif step_cmd == CMD_STEP_RETURN: + stop = is_return and stop_frame is frame + + elif step_cmd == CMD_RUN_TO_LINE or step_cmd == CMD_SET_NEXT_STATEMENT: + stop = False + + if is_line or is_exception_event: + #Yes, we can only act on line events (weird hum?) + #Note: This code is duplicated at pydevd.py + #Acting on exception events after debugger breaks with exception + curr_func_name = frame.f_code.co_name + + #global context is set with an empty name + if curr_func_name in ('?', ''): + curr_func_name = '' + + if curr_func_name == info.pydev_func_name: + line = info.pydev_next_line + if frame.f_lineno == line: + stop = True + else: + if frame.f_trace is None: + frame.f_trace = self.trace_dispatch + frame.f_lineno = line + frame.f_trace = None + stop = True + + else: + stop = False + + if stop and step_cmd != -1 and is_return and IS_PY3K and hasattr(frame, "f_back"): + f_code = getattr(frame.f_back, 'f_code', None) + if f_code is not None: + back_filename = os.path.basename(f_code.co_filename) + file_type = get_file_type(back_filename) + if file_type == PYDEV_FILE: + stop = False + + if plugin_stop: + stopped_on_plugin = plugin_manager.stop(main_debugger, frame, event, self._args, stop_info, arg, step_cmd) + elif stop: + if is_line: + self.set_suspend(thread, step_cmd) + self.do_wait_suspend(thread, frame, event, arg) + else: #return event + back = frame.f_back + if back is not None: + #When we get to the pydevd run function, the debugging has actually finished for the main thread + #(note that it can still go on for other threads, but for this one, we just make it finish) + #So, just setting it to None should be OK + _, back_filename, base = get_abs_path_real_path_and_base_from_frame(back) + if base == DEBUG_START[0] and back.f_code.co_name == DEBUG_START[1]: + back = None + + elif base == TRACE_PROPERTY: + # We dont want to trace the return event of pydevd_traceproperty (custom property for debugging) + #if we're in a return, we want it to appear to the user in the previous frame! + return None + + elif pydevd_dont_trace.should_trace_hook is not None: + if not pydevd_dont_trace.should_trace_hook(back, back_filename): + # In this case, we'll have to skip the previous one because it shouldn't be traced. + # Also, we have to reset the tracing, because if the parent's parent (or some + # other parent) has to be traced and it's not currently, we wouldn't stop where + # we should anymore (so, a step in/over/return may not stop anywhere if no parent is traced). + # Related test: _debugger_case17a.py + main_debugger.set_trace_for_frame_and_parents(back, overwrite_prev_trace=True) + return None + + if back is not None: + #if we're in a return, we want it to appear to the user in the previous frame! + self.set_suspend(thread, step_cmd) + self.do_wait_suspend(thread, back, event, arg) + else: + #in jython we may not have a back frame + info.pydev_step_stop = None + info.pydev_step_cmd = -1 + info.pydev_state = STATE_RUN + + except KeyboardInterrupt: + raise + except: + try: + traceback.print_exc() + info.pydev_step_cmd = -1 + except: + return None + + #if we are quitting, let's stop the tracing + retVal = None + if not main_debugger.quitting: + retVal = self.trace_dispatch + + return retVal + finally: + info.is_tracing = False + + #end trace_dispatch + diff --git a/ptvsd/pydevd/_pydevd_bundle/pydevd_frame_utils.py b/ptvsd/pydevd/_pydevd_bundle/pydevd_frame_utils.py new file mode 100644 index 00000000..fbefd843 --- /dev/null +++ b/ptvsd/pydevd/_pydevd_bundle/pydevd_frame_utils.py @@ -0,0 +1,59 @@ +from _pydevd_bundle.pydevd_constants import IS_PY3K + +class Frame(object): + def __init__( + self, + f_back, + f_fileno, + f_code, + f_locals, + f_globals=None, + f_trace=None): + self.f_back = f_back + self.f_lineno = f_fileno + self.f_code = f_code + self.f_locals = f_locals + self.f_globals = f_globals + self.f_trace = f_trace + + if self.f_globals is None: + self.f_globals = {} + + +class FCode(object): + def __init__(self, name, filename): + self.co_name = name + self.co_filename = filename + + +def add_exception_to_frame(frame, exception_info): + frame.f_locals['__exception__'] = exception_info + +FILES_WITH_IMPORT_HOOKS = ['pydev_monkey_qt.py', 'pydev_import_hook.py'] + +def just_raised(trace): + if trace is None: + return False + if trace.tb_next is None: + if IS_PY3K: + if trace.tb_frame.f_code.co_filename != '': + # Do not stop on inner exceptions in py3 while importing + return True + else: + return True + if trace.tb_next is not None: + filename = trace.tb_next.tb_frame.f_code.co_filename + # ImportError should appear in a user's code, not inside debugger + for file in FILES_WITH_IMPORT_HOOKS: + if filename.endswith(file): + return True + return False + +def cached_call(obj, func, *args): + cached_name = '_cached_' + func.__name__ + if not hasattr(obj, cached_name): + setattr(obj, cached_name, func(*args)) + + return getattr(obj, cached_name) + + diff --git a/ptvsd/pydevd/_pydevd_bundle/pydevd_import_class.py b/ptvsd/pydevd/_pydevd_bundle/pydevd_import_class.py new file mode 100644 index 00000000..ee3527c5 --- /dev/null +++ b/ptvsd/pydevd/_pydevd_bundle/pydevd_import_class.py @@ -0,0 +1,68 @@ +#Note: code gotten from _pydev_imports_tipper. + +import sys + +def _imp(name, log=None): + try: + return __import__(name) + except: + if '.' in name: + sub = name[0:name.rfind('.')] + + if log is not None: + log.add_content('Unable to import', name, 'trying with', sub) + log.add_exception() + + return _imp(sub, log) + else: + s = 'Unable to import module: %s - sys.path: %s' % (str(name), sys.path) + if log is not None: + log.add_content(s) + log.add_exception() + + raise ImportError(s) + + +IS_IPY = False +if sys.platform == 'cli': + IS_IPY = True + _old_imp = _imp + def _imp(name, log=None): + #We must add a reference in clr for .Net + import clr #@UnresolvedImport + initial_name = name + while '.' in name: + try: + clr.AddReference(name) + break #If it worked, that's OK. + except: + name = name[0:name.rfind('.')] + else: + try: + clr.AddReference(name) + except: + pass #That's OK (not dot net module). + + return _old_imp(initial_name, log) + + +def import_name(name, log=None): + mod = _imp(name, log) + + components = name.split('.') + + old_comp = None + for comp in components[1:]: + try: + #this happens in the following case: + #we have mx.DateTime.mxDateTime.mxDateTime.pyd + #but after importing it, mx.DateTime.mxDateTime shadows access to mxDateTime.pyd + mod = getattr(mod, comp) + except AttributeError: + if old_comp != comp: + raise + + old_comp = comp + + return mod + diff --git a/ptvsd/pydevd/_pydevd_bundle/pydevd_io.py b/ptvsd/pydevd/_pydevd_bundle/pydevd_io.py new file mode 100644 index 00000000..197f72c2 --- /dev/null +++ b/ptvsd/pydevd/_pydevd_bundle/pydevd_io.py @@ -0,0 +1,101 @@ +from _pydevd_bundle import pydevd_constants + +IS_PY3K = pydevd_constants.IS_PY3K + +class IORedirector: + '''This class works to redirect the write function to many streams + ''' + + def __init__(self, *args): + self._redirectTo = args + + def write(self, s): + for r in self._redirectTo: + try: + r.write(s) + except: + pass + + def isatty(self): + return False + + def flush(self): + for r in self._redirectTo: + r.flush() + + def __getattr__(self, name): + for r in self._redirectTo: + if hasattr(r, name): + return getattr(r, name) + raise AttributeError(name) + +class IOBuf: + '''This class works as a replacement for stdio and stderr. + It is a buffer and when its contents are requested, it will erase what + + it has so far so that the next return will not return the same contents again. + ''' + def __init__(self): + self.buflist = [] + import os + self.encoding = os.environ.get('PYTHONIOENCODING', 'utf-8') + + def getvalue(self): + b = self.buflist + self.buflist = [] #clear it + return ''.join(b) + + def write(self, s): + if not IS_PY3K: + if isinstance(s, unicode): + s = s.encode(self.encoding) + self.buflist.append(s) + + def isatty(self): + return False + + def flush(self): + pass + + def empty(self): + return len(self.buflist) == 0 + +class _RedirectionsHolder: + _stack_stdout = [] + _stack_stderr = [] + + +def start_redirect(keep_original_redirection=False, std='stdout'): + ''' + @param std: 'stdout', 'stderr', or 'both' + ''' + import sys + buf = IOBuf() + + if std == 'both': + config_stds = ['stdout', 'stderr'] + else: + config_stds = [std] + + for std in config_stds: + original = getattr(sys, std) + stack = getattr(_RedirectionsHolder, '_stack_%s' % std) + stack.append(original) + + if keep_original_redirection: + setattr(sys, std, IORedirector(buf, getattr(sys, std))) + else: + setattr(sys, std, buf) + return buf + + +def end_redirect(std='stdout'): + import sys + if std == 'both': + config_stds = ['stdout', 'stderr'] + else: + config_stds = [std] + for std in config_stds: + stack = getattr(_RedirectionsHolder, '_stack_%s' % std) + setattr(sys, std, stack.pop()) + diff --git a/ptvsd/pydevd/_pydevd_bundle/pydevd_kill_all_pydevd_threads.py b/ptvsd/pydevd/_pydevd_bundle/pydevd_kill_all_pydevd_threads.py new file mode 100644 index 00000000..1ae81e91 --- /dev/null +++ b/ptvsd/pydevd/_pydevd_bundle/pydevd_kill_all_pydevd_threads.py @@ -0,0 +1,8 @@ +from _pydevd_bundle.pydevd_comm import PyDBDaemonThread +from _pydevd_bundle.pydevd_constants import dict_keys + +def kill_all_pydev_threads(): + threads = dict_keys(PyDBDaemonThread.created_pydb_daemon_threads) + for t in threads: + if hasattr(t, 'do_kill_pydev_thread'): + t.do_kill_pydev_thread() diff --git a/ptvsd/pydevd/_pydevd_bundle/pydevd_plugin_utils.py b/ptvsd/pydevd/_pydevd_bundle/pydevd_plugin_utils.py new file mode 100644 index 00000000..0cd0d761 --- /dev/null +++ b/ptvsd/pydevd/_pydevd_bundle/pydevd_plugin_utils.py @@ -0,0 +1,91 @@ +import types + +from _pydev_bundle import pydev_log +from _pydevd_bundle import pydevd_trace_api + +try: + from pydevd_plugins import django_debug +except: + django_debug = None + pydev_log.debug('Unable to load django_debug plugin') + +try: + from pydevd_plugins import jinja2_debug +except: + jinja2_debug = None + pydev_log.debug('Unable to load jinja2_debug plugin') + +def load_plugins(): + plugins = [] + if django_debug is not None: + plugins.append(django_debug) + + if jinja2_debug is not None: + plugins.append(jinja2_debug) + return plugins + + +def bind_func_to_method(func, obj, method_name): + bound_method = types.MethodType(func, obj) + + setattr(obj, method_name, bound_method) + return bound_method + + +class PluginManager(object): + + def __init__(self, main_debugger): + self.plugins = load_plugins() + self.active_plugins = [] + self.main_debugger = main_debugger + self.rebind_methods() + + def add_breakpoint(self, func_name, *args, **kwargs): + # add breakpoint for plugin and remember which plugin to use in tracing + for plugin in self.plugins: + if hasattr(plugin, func_name): + func = getattr(plugin, func_name) + result = func(self, *args, **kwargs) + if result: + self.activate(plugin) + + return result + return None + + def activate(self, plugin): + if plugin not in self.active_plugins: + self.active_plugins.append(plugin) + self.rebind_methods() + + def rebind_methods(self): + if len(self.active_plugins) == 0: + self.bind_functions(pydevd_trace_api, getattr, pydevd_trace_api) + elif len(self.active_plugins) == 1: + self.bind_functions(pydevd_trace_api, getattr, self.active_plugins[0]) + else: + self.bind_functions(pydevd_trace_api, create_dispatch, self.active_plugins) + + def bind_functions(self, interface, function_factory, arg): + for name in dir(interface): + func = function_factory(arg, name) + if type(func) == types.FunctionType: + bind_func_to_method(func, self, name) + + +def create_dispatch(obj, name): + def dispatch(self, *args, **kwargs): + result = None + for p in self.active_plugins: + r = getattr(p, name)(self, *args, **kwargs) + if not result: + result = r + return result + return dispatch + + + + + + + + diff --git a/ptvsd/pydevd/_pydevd_bundle/pydevd_process_net_command.py b/ptvsd/pydevd/_pydevd_bundle/pydevd_process_net_command.py new file mode 100644 index 00000000..5931e3e5 --- /dev/null +++ b/ptvsd/pydevd/_pydevd_bundle/pydevd_process_net_command.py @@ -0,0 +1,709 @@ +import os +import sys +import traceback + +from _pydev_bundle import pydev_log +from _pydevd_bundle import pydevd_traceproperty, pydevd_dont_trace +import pydevd_tracing +import pydevd_file_utils +from _pydevd_bundle.pydevd_breakpoints import LineBreakpoint, update_exception_hook +from _pydevd_bundle.pydevd_comm import CMD_RUN, CMD_VERSION, CMD_LIST_THREADS, CMD_THREAD_KILL, InternalTerminateThread, \ + CMD_THREAD_SUSPEND, pydevd_find_thread_by_id, CMD_THREAD_RUN, InternalRunThread, CMD_STEP_INTO, CMD_STEP_OVER, \ + CMD_STEP_RETURN, CMD_STEP_INTO_MY_CODE, InternalStepThread, CMD_RUN_TO_LINE, CMD_SET_NEXT_STATEMENT, \ + CMD_SMART_STEP_INTO, InternalSetNextStatementThread, CMD_RELOAD_CODE, ReloadCodeCommand, CMD_CHANGE_VARIABLE, \ + InternalChangeVariable, CMD_GET_VARIABLE, InternalGetVariable, CMD_GET_ARRAY, InternalGetArray, CMD_GET_COMPLETIONS, \ + InternalGetCompletions, CMD_GET_FRAME, InternalGetFrame, CMD_SET_BREAK, file_system_encoding, CMD_REMOVE_BREAK, \ + CMD_EVALUATE_EXPRESSION, CMD_EXEC_EXPRESSION, InternalEvaluateExpression, CMD_CONSOLE_EXEC, InternalConsoleExec, \ + CMD_SET_PY_EXCEPTION, CMD_GET_FILE_CONTENTS, CMD_SET_PROPERTY_TRACE, CMD_ADD_EXCEPTION_BREAK, \ + CMD_REMOVE_EXCEPTION_BREAK, CMD_LOAD_SOURCE, CMD_ADD_DJANGO_EXCEPTION_BREAK, CMD_REMOVE_DJANGO_EXCEPTION_BREAK, \ + CMD_EVALUATE_CONSOLE_EXPRESSION, InternalEvaluateConsoleExpression, InternalConsoleGetCompletions, \ + CMD_RUN_CUSTOM_OPERATION, InternalRunCustomOperation, CMD_IGNORE_THROWN_EXCEPTION_AT, CMD_ENABLE_DONT_TRACE, \ + CMD_SHOW_RETURN_VALUES, ID_TO_MEANING, CMD_GET_DESCRIPTION, InternalGetDescription +from _pydevd_bundle.pydevd_constants import get_thread_id, IS_PY3K, DebugInfoHolder, dict_keys, \ + STATE_RUN + + +def process_net_command(py_db, cmd_id, seq, text): + '''Processes a command received from the Java side + + @param cmd_id: the id of the command + @param seq: the sequence of the command + @param text: the text received in the command + + @note: this method is run as a big switch... after doing some tests, it's not clear whether changing it for + a dict id --> function call will have better performance result. A simple test with xrange(10000000) showed + that the gains from having a fast access to what should be executed are lost because of the function call in + a way that if we had 10 elements in the switch the if..elif are better -- but growing the number of choices + makes the solution with the dispatch look better -- so, if this gets more than 20-25 choices at some time, + it may be worth refactoring it (actually, reordering the ifs so that the ones used mostly come before + probably will give better performance). + ''' + # print(ID_TO_MEANING[str(cmd_id)], repr(text)) + + py_db._main_lock.acquire() + try: + try: + cmd = None + if cmd_id == CMD_RUN: + py_db.ready_to_run = True + + elif cmd_id == CMD_VERSION: + # response is version number + # ide_os should be 'WINDOWS' or 'UNIX'. + ide_os = 'WINDOWS' + + # Breakpoints can be grouped by 'LINE' or by 'ID'. + breakpoints_by = 'LINE' + + splitted = text.split('\t') + if len(splitted) == 1: + _local_version = splitted + + elif len(splitted) == 2: + _local_version, ide_os = splitted + + elif len(splitted) == 3: + _local_version, ide_os, breakpoints_by = splitted + + if breakpoints_by == 'ID': + py_db._set_breakpoints_with_id = True + else: + py_db._set_breakpoints_with_id = False + + pydevd_file_utils.set_ide_os(ide_os) + + cmd = py_db.cmd_factory.make_version_message(seq) + + elif cmd_id == CMD_LIST_THREADS: + # response is a list of threads + cmd = py_db.cmd_factory.make_list_threads_message(seq) + + elif cmd_id == CMD_THREAD_KILL: + int_cmd = InternalTerminateThread(text) + py_db.post_internal_command(int_cmd, text) + + elif cmd_id == CMD_THREAD_SUSPEND: + # Yes, thread suspend is still done at this point, not through an internal command! + t = pydevd_find_thread_by_id(text) + if t and not hasattr(t, 'pydev_do_not_trace'): + additional_info = None + try: + additional_info = t.additional_info + except AttributeError: + pass # that's ok, no info currently set + + if additional_info is not None: + for frame in additional_info.iter_frames(t): + py_db.set_trace_for_frame_and_parents(frame, overwrite_prev_trace=True) + del frame + + py_db.set_suspend(t, CMD_THREAD_SUSPEND) + elif text.startswith('__frame__:'): + sys.stderr.write("Can't suspend tasklet: %s\n" % (text,)) + + elif cmd_id == CMD_THREAD_RUN: + t = pydevd_find_thread_by_id(text) + if t: + t.additional_info.pydev_step_cmd = -1 + t.additional_info.pydev_step_stop = None + t.additional_info.pydev_state = STATE_RUN + + elif text.startswith('__frame__:'): + sys.stderr.write("Can't make tasklet run: %s\n" % (text,)) + + + elif cmd_id == CMD_STEP_INTO or cmd_id == CMD_STEP_OVER or cmd_id == CMD_STEP_RETURN or \ + cmd_id == CMD_STEP_INTO_MY_CODE: + # we received some command to make a single step + t = pydevd_find_thread_by_id(text) + if t: + thread_id = get_thread_id(t) + int_cmd = InternalStepThread(thread_id, cmd_id) + py_db.post_internal_command(int_cmd, thread_id) + + elif text.startswith('__frame__:'): + sys.stderr.write("Can't make tasklet step command: %s\n" % (text,)) + + + elif cmd_id == CMD_RUN_TO_LINE or cmd_id == CMD_SET_NEXT_STATEMENT or cmd_id == CMD_SMART_STEP_INTO: + # we received some command to make a single step + thread_id, line, func_name = text.split('\t', 2) + t = pydevd_find_thread_by_id(thread_id) + if t: + int_cmd = InternalSetNextStatementThread(thread_id, cmd_id, line, func_name) + py_db.post_internal_command(int_cmd, thread_id) + elif thread_id.startswith('__frame__:'): + sys.stderr.write("Can't set next statement in tasklet: %s\n" % (thread_id,)) + + + elif cmd_id == CMD_RELOAD_CODE: + # we received some command to make a reload of a module + module_name = text.strip() + + thread_id = '*' # Any thread + + # Note: not going for the main thread because in this case it'd only do the load + # when we stopped on a breakpoint. + # for tid, t in py_db._running_thread_ids.items(): #Iterate in copy + # thread_name = t.getName() + # + # print thread_name, get_thread_id(t) + # #Note: if possible, try to reload on the main thread + # if thread_name == 'MainThread': + # thread_id = tid + + int_cmd = ReloadCodeCommand(module_name, thread_id) + py_db.post_internal_command(int_cmd, thread_id) + + + elif cmd_id == CMD_CHANGE_VARIABLE: + # the text is: thread\tstackframe\tFRAME|GLOBAL\tattribute_to_change\tvalue_to_change + try: + thread_id, frame_id, scope, attr_and_value = text.split('\t', 3) + + tab_index = attr_and_value.rindex('\t') + attr = attr_and_value[0:tab_index].replace('\t', '.') + value = attr_and_value[tab_index + 1:] + int_cmd = InternalChangeVariable(seq, thread_id, frame_id, scope, attr, value) + py_db.post_internal_command(int_cmd, thread_id) + + except: + traceback.print_exc() + + elif cmd_id == CMD_GET_VARIABLE: + # we received some command to get a variable + # the text is: thread_id\tframe_id\tFRAME|GLOBAL\tattributes* + try: + thread_id, frame_id, scopeattrs = text.split('\t', 2) + + if scopeattrs.find('\t') != -1: # there are attributes beyond scope + scope, attrs = scopeattrs.split('\t', 1) + else: + scope, attrs = (scopeattrs, None) + + int_cmd = InternalGetVariable(seq, thread_id, frame_id, scope, attrs) + py_db.post_internal_command(int_cmd, thread_id) + + except: + traceback.print_exc() + + elif cmd_id == CMD_GET_ARRAY: + # we received some command to get an array variable + # the text is: thread_id\tframe_id\tFRAME|GLOBAL\tname\ttemp\troffs\tcoffs\trows\tcols\tformat + try: + roffset, coffset, rows, cols, format, thread_id, frame_id, scopeattrs = text.split('\t', 7) + + if scopeattrs.find('\t') != -1: # there are attributes beyond scope + scope, attrs = scopeattrs.split('\t', 1) + else: + scope, attrs = (scopeattrs, None) + + int_cmd = InternalGetArray(seq, roffset, coffset, rows, cols, format, thread_id, frame_id, scope, attrs) + py_db.post_internal_command(int_cmd, thread_id) + + except: + traceback.print_exc() + + elif cmd_id == CMD_SHOW_RETURN_VALUES: + try: + show_return_values = text.split('\t')[1] + if int(show_return_values) == 1: + py_db.show_return_values = True + else: + if py_db.show_return_values: + # We should remove saved return values + py_db.remove_return_values_flag = True + py_db.show_return_values = False + pydev_log.debug("Show return values: %s\n" % py_db.show_return_values) + except: + traceback.print_exc() + + elif cmd_id == CMD_GET_COMPLETIONS: + # we received some command to get a variable + # the text is: thread_id\tframe_id\tactivation token + try: + thread_id, frame_id, scope, act_tok = text.split('\t', 3) + + int_cmd = InternalGetCompletions(seq, thread_id, frame_id, act_tok) + py_db.post_internal_command(int_cmd, thread_id) + + except: + traceback.print_exc() + elif cmd_id == CMD_GET_DESCRIPTION: + try: + + thread_id, frame_id, expression = text.split('\t', 2) + int_cmd = InternalGetDescription(seq, thread_id, frame_id, expression) + py_db.post_internal_command(int_cmd, thread_id) + except: + traceback.print_exc() + + elif cmd_id == CMD_GET_FRAME: + thread_id, frame_id, scope = text.split('\t', 2) + + int_cmd = InternalGetFrame(seq, thread_id, frame_id) + py_db.post_internal_command(int_cmd, thread_id) + + elif cmd_id == CMD_SET_BREAK: + # func name: 'None': match anything. Empty: match global, specified: only method context. + # command to add some breakpoint. + # text is file\tline. Add to breakpoints dictionary + suspend_policy = "NONE" + if py_db._set_breakpoints_with_id: + breakpoint_id, type, file, line, func_name, condition, expression = text.split('\t', 6) + + breakpoint_id = int(breakpoint_id) + line = int(line) + + # We must restore new lines and tabs as done in + # AbstractDebugTarget.breakpointAdded + condition = condition.replace("@_@NEW_LINE_CHAR@_@", '\n').\ + replace("@_@TAB_CHAR@_@", '\t').strip() + + expression = expression.replace("@_@NEW_LINE_CHAR@_@", '\n').\ + replace("@_@TAB_CHAR@_@", '\t').strip() + else: + #Note: this else should be removed after PyCharm migrates to setting + #breakpoints by id (and ideally also provides func_name). + type, file, line, func_name, suspend_policy, condition, expression = text.split('\t', 6) + # If we don't have an id given for each breakpoint, consider + # the id to be the line. + breakpoint_id = line = int(line) + + condition = condition.replace("@_@NEW_LINE_CHAR@_@", '\n'). \ + replace("@_@TAB_CHAR@_@", '\t').strip() + + expression = expression.replace("@_@NEW_LINE_CHAR@_@", '\n'). \ + replace("@_@TAB_CHAR@_@", '\t').strip() + + if not IS_PY3K: # In Python 3, the frame object will have unicode for the file, whereas on python 2 it has a byte-array encoded with the filesystem encoding. + file = file.encode(file_system_encoding) + + file = pydevd_file_utils.norm_file_to_server(file) + + if not pydevd_file_utils.exists(file): + sys.stderr.write('pydev debugger: warning: trying to add breakpoint'\ + ' to file that does not exist: %s (will have no effect)\n' % (file,)) + sys.stderr.flush() + + + if len(condition) <= 0 or condition is None or condition == "None": + condition = None + + if len(expression) <= 0 or expression is None or expression == "None": + expression = None + + if type == 'python-line': + breakpoint = LineBreakpoint(line, condition, func_name, expression, suspend_policy) + breakpoints = py_db.breakpoints + file_to_id_to_breakpoint = py_db.file_to_id_to_line_breakpoint + supported_type = True + else: + result = None + plugin = py_db.get_plugin_lazy_init() + if plugin is not None: + result = plugin.add_breakpoint('add_line_breakpoint', py_db, type, file, line, condition, expression, func_name) + if result is not None: + supported_type = True + breakpoint, breakpoints = result + file_to_id_to_breakpoint = py_db.file_to_id_to_plugin_breakpoint + else: + supported_type = False + + if not supported_type: + raise NameError(type) + + if DebugInfoHolder.DEBUG_TRACE_BREAKPOINTS > 0: + pydev_log.debug('Added breakpoint:%s - line:%s - func_name:%s\n' % (file, line, func_name.encode('utf-8'))) + sys.stderr.flush() + + if file in file_to_id_to_breakpoint: + id_to_pybreakpoint = file_to_id_to_breakpoint[file] + else: + id_to_pybreakpoint = file_to_id_to_breakpoint[file] = {} + + id_to_pybreakpoint[breakpoint_id] = breakpoint + py_db.consolidate_breakpoints(file, id_to_pybreakpoint, breakpoints) + if py_db.plugin is not None: + py_db.has_plugin_line_breaks = py_db.plugin.has_line_breaks() + + py_db.set_tracing_for_untraced_contexts_if_not_frame_eval(overwrite_prev_trace=True) + py_db.enable_tracing_in_frames_while_running_if_frame_eval() + + elif cmd_id == CMD_REMOVE_BREAK: + #command to remove some breakpoint + #text is type\file\tid. Remove from breakpoints dictionary + breakpoint_type, file, breakpoint_id = text.split('\t', 2) + + if not IS_PY3K: # In Python 3, the frame object will have unicode for the file, whereas on python 2 it has a byte-array encoded with the filesystem encoding. + file = file.encode(file_system_encoding) + + file = pydevd_file_utils.norm_file_to_server(file) + + try: + breakpoint_id = int(breakpoint_id) + except ValueError: + pydev_log.error('Error removing breakpoint. Expected breakpoint_id to be an int. Found: %s' % (breakpoint_id,)) + + else: + file_to_id_to_breakpoint = None + if breakpoint_type == 'python-line': + breakpoints = py_db.breakpoints + file_to_id_to_breakpoint = py_db.file_to_id_to_line_breakpoint + elif py_db.get_plugin_lazy_init() is not None: + result = py_db.plugin.get_breakpoints(py_db, breakpoint_type) + if result is not None: + file_to_id_to_breakpoint = py_db.file_to_id_to_plugin_breakpoint + breakpoints = result + + if file_to_id_to_breakpoint is None: + pydev_log.error('Error removing breakpoint. Cant handle breakpoint of type %s' % breakpoint_type) + else: + try: + id_to_pybreakpoint = file_to_id_to_breakpoint.get(file, {}) + if DebugInfoHolder.DEBUG_TRACE_BREAKPOINTS > 0: + existing = id_to_pybreakpoint[breakpoint_id] + sys.stderr.write('Removed breakpoint:%s - line:%s - func_name:%s (id: %s)\n' % ( + file, existing.line, existing.func_name.encode('utf-8'), breakpoint_id)) + + del id_to_pybreakpoint[breakpoint_id] + py_db.consolidate_breakpoints(file, id_to_pybreakpoint, breakpoints) + if py_db.plugin is not None: + py_db.has_plugin_line_breaks = py_db.plugin.has_line_breaks() + + except KeyError: + pydev_log.error("Error removing breakpoint: Breakpoint id not found: %s id: %s. Available ids: %s\n" % ( + file, breakpoint_id, dict_keys(id_to_pybreakpoint))) + + + elif cmd_id == CMD_EVALUATE_EXPRESSION or cmd_id == CMD_EXEC_EXPRESSION: + #command to evaluate the given expression + #text is: thread\tstackframe\tLOCAL\texpression + temp_name = "" + try: + thread_id, frame_id, scope, expression, trim, temp_name = text.split('\t', 5) + except ValueError: + thread_id, frame_id, scope, expression, trim = text.split('\t', 4) + int_cmd = InternalEvaluateExpression(seq, thread_id, frame_id, expression, + cmd_id == CMD_EXEC_EXPRESSION, int(trim) == 1, temp_name) + py_db.post_internal_command(int_cmd, thread_id) + + elif cmd_id == CMD_CONSOLE_EXEC: + #command to exec expression in console, in case expression is only partially valid 'False' is returned + #text is: thread\tstackframe\tLOCAL\texpression + + thread_id, frame_id, scope, expression = text.split('\t', 3) + + int_cmd = InternalConsoleExec(seq, thread_id, frame_id, expression) + py_db.post_internal_command(int_cmd, thread_id) + + elif cmd_id == CMD_SET_PY_EXCEPTION: + # Command which receives set of exceptions on which user wants to break the debugger + # text is: break_on_uncaught;break_on_caught;TypeError;ImportError;zipimport.ZipImportError; + # This API is optional and works 'in bulk' -- it's possible + # to get finer-grained control with CMD_ADD_EXCEPTION_BREAK/CMD_REMOVE_EXCEPTION_BREAK + # which allows setting caught/uncaught per exception. + # + splitted = text.split(';') + py_db.break_on_uncaught_exceptions = {} + py_db.break_on_caught_exceptions = {} + added = [] + if len(splitted) >= 4: + if splitted[0] == 'true': + break_on_uncaught = True + else: + break_on_uncaught = False + + if splitted[1] == 'true': + break_on_caught = True + else: + break_on_caught = False + + if splitted[2] == 'true': + py_db.break_on_exceptions_thrown_in_same_context = True + else: + py_db.break_on_exceptions_thrown_in_same_context = False + + if splitted[3] == 'true': + py_db.ignore_exceptions_thrown_in_lines_with_ignore_exception = True + else: + py_db.ignore_exceptions_thrown_in_lines_with_ignore_exception = False + + for exception_type in splitted[4:]: + exception_type = exception_type.strip() + if not exception_type: + continue + + exception_breakpoint = py_db.add_break_on_exception( + exception_type, + notify_always=break_on_caught, + notify_on_terminate=break_on_uncaught, + notify_on_first_raise_only=False, + ) + if exception_breakpoint is None: + continue + added.append(exception_breakpoint) + + py_db.update_after_exceptions_added(added) + if break_on_caught: + py_db.enable_tracing_in_frames_while_running_if_frame_eval() + + else: + sys.stderr.write("Error when setting exception list. Received: %s\n" % (text,)) + + elif cmd_id == CMD_GET_FILE_CONTENTS: + + if not IS_PY3K: # In Python 3, the frame object will have unicode for the file, whereas on python 2 it has a byte-array encoded with the filesystem encoding. + text = text.encode(file_system_encoding) + + if os.path.exists(text): + f = open(text, 'r') + try: + source = f.read() + finally: + f.close() + cmd = py_db.cmd_factory.make_get_file_contents(seq, source) + + elif cmd_id == CMD_SET_PROPERTY_TRACE: + # Command which receives whether to trace property getter/setter/deleter + # text is feature_state(true/false);disable_getter/disable_setter/disable_deleter + if text != "": + splitted = text.split(';') + if len(splitted) >= 3: + if py_db.disable_property_trace is False and splitted[0] == 'true': + # Replacing property by custom property only when the debugger starts + pydevd_traceproperty.replace_builtin_property() + py_db.disable_property_trace = True + # Enable/Disable tracing of the property getter + if splitted[1] == 'true': + py_db.disable_property_getter_trace = True + else: + py_db.disable_property_getter_trace = False + # Enable/Disable tracing of the property setter + if splitted[2] == 'true': + py_db.disable_property_setter_trace = True + else: + py_db.disable_property_setter_trace = False + # Enable/Disable tracing of the property deleter + if splitted[3] == 'true': + py_db.disable_property_deleter_trace = True + else: + py_db.disable_property_deleter_trace = False + else: + # User hasn't configured any settings for property tracing + pass + + elif cmd_id == CMD_ADD_EXCEPTION_BREAK: + if text.find('\t') != -1: + exception, notify_always, notify_on_terminate, ignore_libraries = text.split('\t', 3) + else: + exception, notify_always, notify_on_terminate, ignore_libraries = text, 0, 0, 0 + + if exception.find('-') != -1: + breakpoint_type, exception = exception.split('-') + else: + breakpoint_type = 'python' + + if breakpoint_type == 'python': + if int(notify_always) == 1: + pydev_log.warn("Deprecated parameter: 'notify always' policy removed in PyCharm\n") + exception_breakpoint = py_db.add_break_on_exception( + exception, + notify_always=int(notify_always) > 0, + notify_on_terminate = int(notify_on_terminate) == 1, + notify_on_first_raise_only=int(notify_always) == 2, + ignore_libraries=int(ignore_libraries) > 0 + ) + + if exception_breakpoint is not None: + py_db.update_after_exceptions_added([exception_breakpoint]) + if notify_always: + py_db.enable_tracing_in_frames_while_running_if_frame_eval() + else: + supported_type = False + plugin = py_db.get_plugin_lazy_init() + if plugin is not None: + supported_type = plugin.add_breakpoint('add_exception_breakpoint', py_db, breakpoint_type, exception) + + if supported_type: + py_db.has_plugin_exception_breaks = py_db.plugin.has_exception_breaks() + py_db.enable_tracing_in_frames_while_running_if_frame_eval() + else: + raise NameError(breakpoint_type) + + + + elif cmd_id == CMD_REMOVE_EXCEPTION_BREAK: + exception = text + if exception.find('-') != -1: + exception_type, exception = exception.split('-') + else: + exception_type = 'python' + + if exception_type == 'python': + try: + cp = py_db.break_on_uncaught_exceptions.copy() + cp.pop(exception, None) + py_db.break_on_uncaught_exceptions = cp + + cp = py_db.break_on_caught_exceptions.copy() + cp.pop(exception, None) + py_db.break_on_caught_exceptions = cp + except: + pydev_log.debug("Error while removing exception %s"%sys.exc_info()[0]) + update_exception_hook(py_db) + else: + supported_type = False + + # I.e.: no need to initialize lazy (if we didn't have it in the first place, we can't remove + # anything from it anyways). + plugin = py_db.plugin + if plugin is not None: + supported_type = plugin.remove_exception_breakpoint(py_db, exception_type, exception) + + if supported_type: + py_db.has_plugin_exception_breaks = py_db.plugin.has_exception_breaks() + else: + raise NameError(exception_type) + if len(py_db.break_on_caught_exceptions) == 0 and not py_db.has_plugin_exception_breaks: + py_db.disable_tracing_while_running_if_frame_eval() + + elif cmd_id == CMD_LOAD_SOURCE: + path = text + try: + f = open(path, 'r') + source = f.read() + py_db.cmd_factory.make_load_source_message(seq, source, py_db) + except: + return py_db.cmd_factory.make_error_message(seq, pydevd_tracing.get_exception_traceback_str()) + + elif cmd_id == CMD_ADD_DJANGO_EXCEPTION_BREAK: + exception = text + plugin = py_db.get_plugin_lazy_init() + if plugin is not None: + plugin.add_breakpoint('add_exception_breakpoint', py_db, 'django', exception) + py_db.has_plugin_exception_breaks = py_db.plugin.has_exception_breaks() + py_db.enable_tracing_in_frames_while_running_if_frame_eval() + + elif cmd_id == CMD_REMOVE_DJANGO_EXCEPTION_BREAK: + exception = text + + # I.e.: no need to initialize lazy (if we didn't have it in the first place, we can't remove + # anything from it anyways). + plugin = py_db.plugin + if plugin is not None: + plugin.remove_exception_breakpoint(py_db, 'django', exception) + py_db.has_plugin_exception_breaks = py_db.plugin.has_exception_breaks() + if len(py_db.break_on_caught_exceptions) == 0 and not py_db.has_plugin_exception_breaks: + py_db.disable_tracing_while_running_if_frame_eval() + + elif cmd_id == CMD_EVALUATE_CONSOLE_EXPRESSION: + # Command which takes care for the debug console communication + if text != "": + thread_id, frame_id, console_command = text.split('\t', 2) + console_command, line = console_command.split('\t') + + if console_command == 'EVALUATE': + int_cmd = InternalEvaluateConsoleExpression( + seq, thread_id, frame_id, line, buffer_output=True) + + elif console_command == 'EVALUATE_UNBUFFERED': + int_cmd = InternalEvaluateConsoleExpression( + seq, thread_id, frame_id, line, buffer_output=False) + + elif console_command == 'GET_COMPLETIONS': + int_cmd = InternalConsoleGetCompletions(seq, thread_id, frame_id, line) + + else: + raise ValueError('Unrecognized command: %s' % (console_command,)) + + py_db.post_internal_command(int_cmd, thread_id) + + elif cmd_id == CMD_RUN_CUSTOM_OPERATION: + # Command which runs a custom operation + if text != "": + try: + location, custom = text.split('||', 1) + except: + sys.stderr.write('Custom operation now needs a || separator. Found: %s\n' % (text,)) + raise + + thread_id, frame_id, scopeattrs = location.split('\t', 2) + + if scopeattrs.find('\t') != -1: # there are attributes beyond scope + scope, attrs = scopeattrs.split('\t', 1) + else: + scope, attrs = (scopeattrs, None) + + # : style: EXECFILE or EXEC + # : encoded_code_or_file: file to execute or code + # : fname: name of function to be executed in the resulting namespace + style, encoded_code_or_file, fnname = custom.split('\t', 3) + int_cmd = InternalRunCustomOperation(seq, thread_id, frame_id, scope, attrs, + style, encoded_code_or_file, fnname) + py_db.post_internal_command(int_cmd, thread_id) + + elif cmd_id == CMD_IGNORE_THROWN_EXCEPTION_AT: + if text: + replace = 'REPLACE:' # Not all 3.x versions support u'REPLACE:', so, doing workaround. + if not IS_PY3K: + replace = unicode(replace) + + if text.startswith(replace): + text = text[8:] + py_db.filename_to_lines_where_exceptions_are_ignored.clear() + + if text: + for line in text.split('||'): # Can be bulk-created (one in each line) + filename, line_number = line.split('|') + if not IS_PY3K: + filename = filename.encode(file_system_encoding) + + filename = pydevd_file_utils.norm_file_to_server(filename) + + if os.path.exists(filename): + lines_ignored = py_db.filename_to_lines_where_exceptions_are_ignored.get(filename) + if lines_ignored is None: + lines_ignored = py_db.filename_to_lines_where_exceptions_are_ignored[filename] = {} + lines_ignored[int(line_number)] = 1 + else: + sys.stderr.write('pydev debugger: warning: trying to ignore exception thrown'\ + ' on file that does not exist: %s (will have no effect)\n' % (filename,)) + + elif cmd_id == CMD_ENABLE_DONT_TRACE: + if text: + true_str = 'true' # Not all 3.x versions support u'str', so, doing workaround. + if not IS_PY3K: + true_str = unicode(true_str) + + mode = text.strip() == true_str + pydevd_dont_trace.trace_filter(mode) + + else: + #I have no idea what this is all about + cmd = py_db.cmd_factory.make_error_message(seq, "unexpected command " + str(cmd_id)) + + if cmd is not None: + py_db.writer.add_command(cmd) + del cmd + + except Exception: + traceback.print_exc() + try: + from StringIO import StringIO + except ImportError: + from io import StringIO + stream = StringIO() + traceback.print_exc(file=stream) + cmd = py_db.cmd_factory.make_error_message( + seq, + "Unexpected exception in process_net_command.\nInitial params: %s. Exception: %s" % ( + ((cmd_id, seq, text), stream.getvalue()) + ) + ) + + py_db.writer.add_command(cmd) + finally: + py_db._main_lock.release() + + diff --git a/ptvsd/pydevd/_pydevd_bundle/pydevd_referrers.py b/ptvsd/pydevd/_pydevd_bundle/pydevd_referrers.py new file mode 100644 index 00000000..a4d502c9 --- /dev/null +++ b/ptvsd/pydevd/_pydevd_bundle/pydevd_referrers.py @@ -0,0 +1,236 @@ +import sys +from _pydevd_bundle import pydevd_xml +from os.path import basename +import traceback +try: + from urllib import quote, quote_plus, unquote, unquote_plus +except: + from urllib.parse import quote, quote_plus, unquote, unquote_plus #@Reimport @UnresolvedImport + +#=================================================================================================== +# print_var_node +#=================================================================================================== +def print_var_node(xml_node, stream): + name = xml_node.getAttribute('name') + value = xml_node.getAttribute('value') + val_type = xml_node.getAttribute('type') + + found_as = xml_node.getAttribute('found_as') + stream.write('Name: ') + stream.write(unquote_plus(name)) + stream.write(', Value: ') + stream.write(unquote_plus(value)) + stream.write(', Type: ') + stream.write(unquote_plus(val_type)) + if found_as: + stream.write(', Found as: %s' % (unquote_plus(found_as),)) + stream.write('\n') + +#=================================================================================================== +# print_referrers +#=================================================================================================== +def print_referrers(obj, stream=None): + if stream is None: + stream = sys.stdout + result = get_referrer_info(obj) + from xml.dom.minidom import parseString + dom = parseString(result) + + xml = dom.getElementsByTagName('xml')[0] + for node in xml.childNodes: + if node.nodeType == node.TEXT_NODE: + continue + + if node.localName == 'for': + stream.write('Searching references for: ') + for child in node.childNodes: + if child.nodeType == node.TEXT_NODE: + continue + print_var_node(child, stream) + + elif node.localName == 'var': + stream.write('Referrer found: ') + print_var_node(node, stream) + + else: + sys.stderr.write('Unhandled node: %s\n' % (node,)) + + return result + + +#=================================================================================================== +# get_referrer_info +#=================================================================================================== +def get_referrer_info(searched_obj): + DEBUG = 0 + if DEBUG: + sys.stderr.write('Getting referrers info.\n') + try: + try: + if searched_obj is None: + ret = ['\n'] + + ret.append('\n') + ret.append(pydevd_xml.var_to_xml( + searched_obj, + 'Skipping getting referrers for None', + additional_in_xml=' id="%s"' % (id(searched_obj),))) + ret.append('\n') + ret.append('') + ret = ''.join(ret) + return ret + + obj_id = id(searched_obj) + + try: + if DEBUG: + sys.stderr.write('Getting referrers...\n') + import gc + referrers = gc.get_referrers(searched_obj) + except: + traceback.print_exc() + ret = ['\n'] + + ret.append('\n') + ret.append(pydevd_xml.var_to_xml( + searched_obj, + 'Exception raised while trying to get_referrers.', + additional_in_xml=' id="%s"' % (id(searched_obj),))) + ret.append('\n') + ret.append('') + ret = ''.join(ret) + return ret + + if DEBUG: + sys.stderr.write('Found %s referrers.\n' % (len(referrers),)) + + curr_frame = sys._getframe() + frame_type = type(curr_frame) + + #Ignore this frame and any caller frame of this frame + + ignore_frames = {} #Should be a set, but it's not available on all python versions. + while curr_frame is not None: + if basename(curr_frame.f_code.co_filename).startswith('pydev'): + ignore_frames[curr_frame] = 1 + curr_frame = curr_frame.f_back + + + ret = ['\n'] + + ret.append('\n') + if DEBUG: + sys.stderr.write('Searching Referrers of obj with id="%s"\n' % (obj_id,)) + + ret.append(pydevd_xml.var_to_xml( + searched_obj, + 'Referrers of obj with id="%s"' % (obj_id,))) + ret.append('\n') + + all_objects = None + + for r in referrers: + try: + if r in ignore_frames: + continue #Skip the references we may add ourselves + except: + pass #Ok: unhashable type checked... + + if r is referrers: + continue + + r_type = type(r) + r_id = str(id(r)) + + representation = str(r_type) + + found_as = '' + if r_type == frame_type: + if DEBUG: + sys.stderr.write('Found frame referrer: %r\n' % (r,)) + for key, val in r.f_locals.items(): + if val is searched_obj: + found_as = key + break + + elif r_type == dict: + if DEBUG: + sys.stderr.write('Found dict referrer: %r\n' % (r,)) + + # Try to check if it's a value in the dict (and under which key it was found) + for key, val in r.items(): + if val is searched_obj: + found_as = key + if DEBUG: + sys.stderr.write(' Found as %r in dict\n' % (found_as,)) + break + + #Ok, there's one annoying thing: many times we find it in a dict from an instance, + #but with this we don't directly have the class, only the dict, so, to workaround that + #we iterate over all reachable objects ad check if one of those has the given dict. + if all_objects is None: + all_objects = gc.get_objects() + + for x in all_objects: + try: + if getattr(x, '__dict__', None) is r: + r = x + r_type = type(x) + r_id = str(id(r)) + representation = str(r_type) + break + except: + pass #Just ignore any error here (i.e.: ReferenceError, etc.) + + elif r_type in (tuple, list): + if DEBUG: + sys.stderr.write('Found tuple referrer: %r\n' % (r,)) + + for i, x in enumerate(r): + if x is searched_obj: + found_as = '%s[%s]' % (r_type.__name__, i) + if DEBUG: + sys.stderr.write(' Found as %s in tuple: \n' % (found_as,)) + break + + if found_as: + if not isinstance(found_as, str): + found_as = str(found_as) + found_as = ' found_as="%s"' % (pydevd_xml.make_valid_xml_value(found_as),) + + ret.append(pydevd_xml.var_to_xml( + r, + representation, + additional_in_xml=' id="%s"%s' % (r_id, found_as))) + finally: + if DEBUG: + sys.stderr.write('Done searching for references.\n') + + #If we have any exceptions, don't keep dangling references from this frame to any of our objects. + all_objects = None + referrers = None + searched_obj = None + r = None + x = None + key = None + val = None + curr_frame = None + ignore_frames = None + except: + traceback.print_exc() + ret = ['\n'] + + ret.append('\n') + ret.append(pydevd_xml.var_to_xml( + searched_obj, + 'Error getting referrers for:', + additional_in_xml=' id="%s"' % (id(searched_obj),))) + ret.append('\n') + ret.append('') + ret = ''.join(ret) + return ret + + ret.append('') + ret = ''.join(ret) + return ret + diff --git a/ptvsd/pydevd/_pydevd_bundle/pydevd_reload.py b/ptvsd/pydevd/_pydevd_bundle/pydevd_reload.py new file mode 100644 index 00000000..be89da0b --- /dev/null +++ b/ptvsd/pydevd/_pydevd_bundle/pydevd_reload.py @@ -0,0 +1,453 @@ +""" +Based on the python xreload. + +Changes +====================== + +1. we don't recreate the old namespace from new classes. Rather, we keep the existing namespace, +load a new version of it and update only some of the things we can inplace. That way, we don't break +things such as singletons or end up with a second representation of the same class in memory. + +2. If we find it to be a __metaclass__, we try to update it as a regular class. + +3. We don't remove old attributes (and leave them lying around even if they're no longer used). + +4. Reload hooks were changed + +These changes make it more stable, especially in the common case (where in a debug session only the +contents of a function are changed), besides providing flexibility for users that want to extend +on it. + + + +Hooks +====================== + +Classes/modules can be specially crafted to work with the reload (so that it can, for instance, +update some constant which was changed). + +1. To participate in the change of some attribute: + + In a module: + + __xreload_old_new__(namespace, name, old, new) + + in a class: + + @classmethod + __xreload_old_new__(cls, name, old, new) + + A class or module may include a method called '__xreload_old_new__' which is called when we're + unable to reload a given attribute. + + + +2. To do something after the whole reload is finished: + + In a module: + + __xreload_after_reload_update__(namespace): + + In a class: + + @classmethod + __xreload_after_reload_update__(cls): + + + A class or module may include a method called '__xreload_after_reload_update__' which is called + after the reload finishes. + + +Important: when providing a hook, always use the namespace or cls provided and not anything in the global +namespace, as the global namespace are only temporarily created during the reload and may not reflect the +actual application state (while the cls and namespace passed are). + + +Current limitations +====================== + + +- Attributes/constants are added, but not changed (so singletons and the application state is not + broken -- use provided hooks to workaround it). + +- Code using metaclasses may not always work. + +- Functions and methods using decorators (other than classmethod and staticmethod) are not handled + correctly. + +- Renamings are not handled correctly. + +- Dependent modules are not reloaded. + +- New __slots__ can't be added to existing classes. + + +Info +====================== + +Original: http://svn.python.org/projects/sandbox/trunk/xreload/xreload.py +Note: it seems https://github.com/plone/plone.reload/blob/master/plone/reload/xreload.py enhances it (to check later) + +Interesting alternative: https://code.google.com/p/reimport/ + +Alternative to reload(). + +This works by executing the module in a scratch namespace, and then patching classes, methods and +functions in place. This avoids the need to patch instances. New objects are copied into the +target namespace. + +""" + +import imp +from _pydev_bundle.pydev_imports import Exec +from _pydevd_bundle import pydevd_dont_trace +import sys +import traceback +import types + +NO_DEBUG = 0 +LEVEL1 = 1 +LEVEL2 = 2 + +DEBUG = NO_DEBUG + +def write(*args): + new_lst = [] + for a in args: + new_lst.append(str(a)) + + msg = ' '.join(new_lst) + sys.stdout.write('%s\n' % (msg,)) + +def write_err(*args): + new_lst = [] + for a in args: + new_lst.append(str(a)) + + msg = ' '.join(new_lst) + sys.stderr.write('pydev debugger: %s\n' % (msg,)) + +def notify_info0(*args): + write_err(*args) + +def notify_info(*args): + if DEBUG >= LEVEL1: + write(*args) + +def notify_info2(*args): + if DEBUG >= LEVEL2: + write(*args) + +def notify_error(*args): + write_err(*args) + + + +#======================================================================================================================= +# code_objects_equal +#======================================================================================================================= +def code_objects_equal(code0, code1): + for d in dir(code0): + if d.startswith('_') or 'lineno' in d: + continue + if getattr(code0, d) != getattr(code1, d): + return False + return True + + +#======================================================================================================================= +# xreload +#======================================================================================================================= +def xreload(mod): + """Reload a module in place, updating classes, methods and functions. + + mod: a module object + + Returns a boolean indicating whether a change was done. + """ + r = Reload(mod) + r.apply() + found_change = r.found_change + r = None + pydevd_dont_trace.clear_trace_filter_cache() + return found_change + + +# This isn't actually used... Initially I planned to reload variables which are immutable on the +# namespace, but this can destroy places where we're saving state, which may not be what we want, +# so, we're being conservative and giving the user hooks if he wants to do a reload. +# +# immutable_types = [int, str, float, tuple] #That should be common to all Python versions +# +# for name in 'long basestr unicode frozenset'.split(): +# try: +# immutable_types.append(__builtins__[name]) +# except: +# pass #Just ignore: not all python versions are created equal. +# immutable_types = tuple(immutable_types) + + +#======================================================================================================================= +# Reload +#======================================================================================================================= +class Reload: + + def __init__(self, mod): + self.mod = mod + self.found_change = False + + def apply(self): + mod = self.mod + self._on_finish_callbacks = [] + try: + # Get the module name, e.g. 'foo.bar.whatever' + modname = mod.__name__ + # Get the module namespace (dict) early; this is part of the type check + modns = mod.__dict__ + # Parse it into package name and module name, e.g. 'foo.bar' and 'whatever' + i = modname.rfind(".") + if i >= 0: + pkgname, modname = modname[:i], modname[i + 1:] + else: + pkgname = None + # Compute the search path + if pkgname: + # We're not reloading the package, only the module in it + pkg = sys.modules[pkgname] + path = pkg.__path__ # Search inside the package + else: + # Search the top-level module path + pkg = None + path = None # Make find_module() uses the default search path + # Find the module; may raise ImportError + (stream, filename, (suffix, mode, kind)) = imp.find_module(modname, path) + # Turn it into a code object + try: + # Is it Python source code or byte code read from a file? + if kind not in (imp.PY_COMPILED, imp.PY_SOURCE): + # Fall back to built-in reload() + notify_error('Could not find source to reload (mod: %s)' % (modname,)) + return + if kind == imp.PY_SOURCE: + source = stream.read() + code = compile(source, filename, "exec") + else: + import marshal + code = marshal.load(stream) + finally: + if stream: + stream.close() + # Execute the code. We copy the module dict to a temporary; then + # clear the module dict; then execute the new code in the module + # dict; then swap things back and around. This trick (due to + # Glyph Lefkowitz) ensures that the (readonly) __globals__ + # attribute of methods and functions is set to the correct dict + # object. + new_namespace = modns.copy() + new_namespace.clear() + new_namespace["__name__"] = modns["__name__"] + Exec(code, new_namespace) + # Now we get to the hard part + oldnames = set(modns) + newnames = set(new_namespace) + + # Create new tokens (note: not deleting existing) + for name in newnames - oldnames: + notify_info0('Added:', name, 'to namespace') + self.found_change = True + modns[name] = new_namespace[name] + + # Update in-place what we can + for name in oldnames & newnames: + self._update(modns, name, modns[name], new_namespace[name]) + + self._handle_namespace(modns) + + for c in self._on_finish_callbacks: + c() + del self._on_finish_callbacks[:] + except: + traceback.print_exc() + + + def _handle_namespace(self, namespace, is_class_namespace=False): + on_finish = None + if is_class_namespace: + xreload_after_update = getattr(namespace, '__xreload_after_reload_update__', None) + if xreload_after_update is not None: + self.found_change = True + on_finish = lambda: xreload_after_update() + + elif '__xreload_after_reload_update__' in namespace: + xreload_after_update = namespace['__xreload_after_reload_update__'] + self.found_change = True + on_finish = lambda: xreload_after_update(namespace) + + + if on_finish is not None: + # If a client wants to know about it, give him a chance. + self._on_finish_callbacks.append(on_finish) + + + + def _update(self, namespace, name, oldobj, newobj, is_class_namespace=False): + """Update oldobj, if possible in place, with newobj. + + If oldobj is immutable, this simply returns newobj. + + Args: + oldobj: the object to be updated + newobj: the object used as the source for the update + """ + try: + notify_info2('Updating: ', oldobj) + if oldobj is newobj: + # Probably something imported + return + + if type(oldobj) is not type(newobj): + # Cop-out: if the type changed, give up + notify_error('Type of: %s changed... Skipping.' % (oldobj,)) + return + + if isinstance(newobj, types.FunctionType): + self._update_function(oldobj, newobj) + return + + if isinstance(newobj, types.MethodType): + self._update_method(oldobj, newobj) + return + + if isinstance(newobj, classmethod): + self._update_classmethod(oldobj, newobj) + return + + if isinstance(newobj, staticmethod): + self._update_staticmethod(oldobj, newobj) + return + + if hasattr(types, 'ClassType'): + classtype = (types.ClassType, type) #object is not instance of types.ClassType. + else: + classtype = type + + if isinstance(newobj, classtype): + self._update_class(oldobj, newobj) + return + + # New: dealing with metaclasses. + if hasattr(newobj, '__metaclass__') and hasattr(newobj, '__class__') and newobj.__metaclass__ == newobj.__class__: + self._update_class(oldobj, newobj) + return + + if namespace is not None: + + if oldobj != newobj and str(oldobj) != str(newobj) and repr(oldobj) != repr(newobj): + xreload_old_new = None + if is_class_namespace: + xreload_old_new = getattr(namespace, '__xreload_old_new__', None) + if xreload_old_new is not None: + self.found_change = True + xreload_old_new(name, oldobj, newobj) + + elif '__xreload_old_new__' in namespace: + xreload_old_new = namespace['__xreload_old_new__'] + xreload_old_new(namespace, name, oldobj, newobj) + self.found_change = True + + # Too much information to the user... + # else: + # notify_info0('%s NOT updated. Create __xreload_old_new__(name, old, new) for custom reload' % (name,)) + + except: + notify_error('Exception found when updating %s. Proceeding for other items.' % (name,)) + traceback.print_exc() + + + # All of the following functions have the same signature as _update() + + + def _update_function(self, oldfunc, newfunc): + """Update a function object.""" + oldfunc.__doc__ = newfunc.__doc__ + oldfunc.__dict__.update(newfunc.__dict__) + + try: + newfunc.__code__ + attr_name = '__code__' + except AttributeError: + newfunc.func_code + attr_name = 'func_code' + + old_code = getattr(oldfunc, attr_name) + new_code = getattr(newfunc, attr_name) + if not code_objects_equal(old_code, new_code): + notify_info0('Updated function code:', oldfunc) + setattr(oldfunc, attr_name, new_code) + self.found_change = True + + try: + oldfunc.__defaults__ = newfunc.__defaults__ + except AttributeError: + oldfunc.func_defaults = newfunc.func_defaults + + return oldfunc + + + def _update_method(self, oldmeth, newmeth): + """Update a method object.""" + # XXX What if im_func is not a function? + if hasattr(oldmeth, 'im_func') and hasattr(newmeth, 'im_func'): + self._update(None, None, oldmeth.im_func, newmeth.im_func) + elif hasattr(oldmeth, '__func__') and hasattr(newmeth, '__func__'): + self._update(None, None, oldmeth.__func__, newmeth.__func__) + return oldmeth + + + def _update_class(self, oldclass, newclass): + """Update a class object.""" + olddict = oldclass.__dict__ + newdict = newclass.__dict__ + + oldnames = set(olddict) + newnames = set(newdict) + + for name in newnames - oldnames: + setattr(oldclass, name, newdict[name]) + notify_info0('Added:', name, 'to', oldclass) + self.found_change = True + + # Note: not removing old things... + # for name in oldnames - newnames: + # notify_info('Removed:', name, 'from', oldclass) + # delattr(oldclass, name) + + for name in (oldnames & newnames) - set(['__dict__', '__doc__']): + self._update(oldclass, name, olddict[name], newdict[name], is_class_namespace=True) + + old_bases = getattr(oldclass, '__bases__', None) + new_bases = getattr(newclass, '__bases__', None) + if str(old_bases) != str(new_bases): + notify_error('Changing the hierarchy of a class is not supported. %s may be inconsistent.' % (oldclass,)) + + self._handle_namespace(oldclass, is_class_namespace=True) + + + def _update_classmethod(self, oldcm, newcm): + """Update a classmethod update.""" + # While we can't modify the classmethod object itself (it has no + # mutable attributes), we *can* extract the underlying function + # (by calling __get__(), which returns a method object) and update + # it in-place. We don't have the class available to pass to + # __get__() but any object except None will do. + self._update(None, None, oldcm.__get__(0), newcm.__get__(0)) + + + def _update_staticmethod(self, oldsm, newsm): + """Update a staticmethod update.""" + # While we can't modify the staticmethod object itself (it has no + # mutable attributes), we *can* extract the underlying function + # (by calling __get__(), which returns it) and update it in-place. + # We don't have the class available to pass to __get__() but any + # object except None will do. + self._update(None, None, oldsm.__get__(0), newsm.__get__(0)) diff --git a/ptvsd/pydevd/_pydevd_bundle/pydevd_resolver.py b/ptvsd/pydevd/_pydevd_bundle/pydevd_resolver.py new file mode 100644 index 00000000..ae80d158 --- /dev/null +++ b/ptvsd/pydevd/_pydevd_bundle/pydevd_resolver.py @@ -0,0 +1,488 @@ +try: + import StringIO +except: + import io as StringIO +import traceback +from os.path import basename + +from _pydevd_bundle import pydevd_constants +from _pydevd_bundle.pydevd_constants import dict_iter_items, dict_keys, xrange + + +# Note: 300 is already a lot to see in the outline (after that the user should really use the shell to get things) +# and this also means we'll pass less information to the client side (which makes debugging faster). +MAX_ITEMS_TO_HANDLE = 300 + +TOO_LARGE_MSG = 'Too large to show contents. Max items to show: ' + str(MAX_ITEMS_TO_HANDLE) +TOO_LARGE_ATTR = 'Unable to handle:' + +#======================================================================================================================= +# UnableToResolveVariableException +#======================================================================================================================= +class UnableToResolveVariableException(Exception): + pass + + +#======================================================================================================================= +# InspectStub +#======================================================================================================================= +class InspectStub: + def isbuiltin(self, _args): + return False + def isroutine(self, object): + return False + +try: + import inspect +except: + inspect = InspectStub() + +try: + import java.lang #@UnresolvedImport +except: + pass + +#types does not include a MethodWrapperType +try: + MethodWrapperType = type([].__str__) +except: + MethodWrapperType = None + + +#======================================================================================================================= +# See: pydevd_extension_api module for resolver interface +#======================================================================================================================= + + +#======================================================================================================================= +# DefaultResolver +#======================================================================================================================= +class DefaultResolver: + ''' + DefaultResolver is the class that'll actually resolve how to show some variable. + ''' + + def resolve(self, var, attribute): + return getattr(var, attribute) + + def get_dictionary(self, var, names=None): + if MethodWrapperType: + return self._getPyDictionary(var, names) + else: + return self._getJyDictionary(var) + + def _getJyDictionary(self, obj): + ret = {} + found = java.util.HashMap() + + original = obj + if hasattr(obj, '__class__') and obj.__class__ == java.lang.Class: + + #get info about superclasses + classes = [] + classes.append(obj) + c = obj.getSuperclass() + while c != None: + classes.append(c) + c = c.getSuperclass() + + #get info about interfaces + interfs = [] + for obj in classes: + interfs.extend(obj.getInterfaces()) + classes.extend(interfs) + + #now is the time when we actually get info on the declared methods and fields + for obj in classes: + + declaredMethods = obj.getDeclaredMethods() + declaredFields = obj.getDeclaredFields() + for i in xrange(len(declaredMethods)): + name = declaredMethods[i].getName() + ret[name] = declaredMethods[i].toString() + found.put(name, 1) + + for i in xrange(len(declaredFields)): + name = declaredFields[i].getName() + found.put(name, 1) + #if declaredFields[i].isAccessible(): + declaredFields[i].setAccessible(True) + #ret[name] = declaredFields[i].get( declaredFields[i] ) + try: + ret[name] = declaredFields[i].get(original) + except: + ret[name] = declaredFields[i].toString() + + #this simple dir does not always get all the info, that's why we have the part before + #(e.g.: if we do a dir on String, some methods that are from other interfaces such as + #charAt don't appear) + try: + d = dir(original) + for name in d: + if found.get(name) is not 1: + ret[name] = getattr(original, name) + except: + #sometimes we're unable to do a dir + pass + + return ret + + def get_names(self, var): + names = dir(var) + if not names and hasattr(var, '__members__'): + names = var.__members__ + return names + + def _getPyDictionary(self, var, names=None): + filterPrivate = False + filterSpecial = True + filterFunction = True + filterBuiltIn = True + + if not names: + names = self.get_names(var) + d = {} + + #Be aware that the order in which the filters are applied attempts to + #optimize the operation by removing as many items as possible in the + #first filters, leaving fewer items for later filters + + if filterBuiltIn or filterFunction: + for n in names: + if filterSpecial: + if n.startswith('__') and n.endswith('__'): + continue + + if filterPrivate: + if n.startswith('_') or n.endswith('__'): + continue + + try: + attr = getattr(var, n) + + #filter builtins? + if filterBuiltIn: + if inspect.isbuiltin(attr): + continue + + #filter functions? + if filterFunction: + if inspect.isroutine(attr) or isinstance(attr, MethodWrapperType): + continue + except: + #if some error occurs getting it, let's put it to the user. + strIO = StringIO.StringIO() + traceback.print_exc(file=strIO) + attr = strIO.getvalue() + + d[ n ] = attr + + return d + + +#======================================================================================================================= +# DictResolver +#======================================================================================================================= +class DictResolver: + + def resolve(self, dict, key): + if key in ('__len__', TOO_LARGE_ATTR): + return None + + if '(' not in key: + #we have to treat that because the dict resolver is also used to directly resolve the global and local + #scopes (which already have the items directly) + try: + return dict[key] + except: + return getattr(dict, key) + + #ok, we have to iterate over the items to find the one that matches the id, because that's the only way + #to actually find the reference from the string we have before. + expected_id = int(key.split('(')[-1][:-1]) + for key, val in dict_iter_items(dict): + if id(key) == expected_id: + return val + + raise UnableToResolveVariableException() + + def key_to_str(self, key): + if isinstance(key, str): + return '%r' % key + else: + if not pydevd_constants.IS_PY3K: + if isinstance(key, unicode): + return "u'%s'" % key + return key + + def get_dictionary(self, dict): + ret = {} + + i = 0 + for key, val in dict_iter_items(dict): + i += 1 + #we need to add the id because otherwise we cannot find the real object to get its contents later on. + key = '%s (%s)' % (self.key_to_str(key), id(key)) + ret[key] = val + if i > MAX_ITEMS_TO_HANDLE: + ret[TOO_LARGE_ATTR] = TOO_LARGE_MSG + break + + ret['__len__'] = len(dict) + # in case if the class extends built-in type and has some additional fields + additional_fields = defaultResolver.get_dictionary(dict) + ret.update(additional_fields) + return ret + + +#======================================================================================================================= +# TupleResolver +#======================================================================================================================= +class TupleResolver: #to enumerate tuples and lists + + def resolve(self, var, attribute): + ''' + @param var: that's the original attribute + @param attribute: that's the key passed in the dict (as a string) + ''' + if attribute in ('__len__', TOO_LARGE_ATTR): + return None + try: + return var[int(attribute)] + except: + return getattr(var, attribute) + + def get_dictionary(self, var): + l = len(var) + d = {} + + format_str = '%0' + str(int(len(str(l)))) + 'd' + + i = 0 + for item in var: + d[format_str % i] = item + i += 1 + + if i > MAX_ITEMS_TO_HANDLE: + d[TOO_LARGE_ATTR] = TOO_LARGE_MSG + break + + d['__len__'] = len(var) + # in case if the class extends built-in type and has some additional fields + additional_fields = defaultResolver.get_dictionary(var) + d.update(additional_fields) + return d + + + +#======================================================================================================================= +# SetResolver +#======================================================================================================================= +class SetResolver: + ''' + Resolves a set as dict id(object)->object + ''' + + def resolve(self, var, attribute): + if attribute in ('__len__', TOO_LARGE_ATTR): + return None + + try: + attribute = int(attribute) + except: + return getattr(var, attribute) + + for v in var: + if id(v) == attribute: + return v + + raise UnableToResolveVariableException('Unable to resolve %s in %s' % (attribute, var)) + + def get_dictionary(self, var): + d = {} + i = 0 + for item in var: + i+= 1 + d[id(item)] = item + + if i > MAX_ITEMS_TO_HANDLE: + d[TOO_LARGE_ATTR] = TOO_LARGE_MSG + break + + + d['__len__'] = len(var) + # in case if the class extends built-in type and has some additional fields + additional_fields = defaultResolver.get_dictionary(var) + d.update(additional_fields) + return d + + +#======================================================================================================================= +# InstanceResolver +#======================================================================================================================= +class InstanceResolver: + + def resolve(self, var, attribute): + field = var.__class__.getDeclaredField(attribute) + field.setAccessible(True) + return field.get(var) + + def get_dictionary(self, obj): + ret = {} + + declaredFields = obj.__class__.getDeclaredFields() + for i in xrange(len(declaredFields)): + name = declaredFields[i].getName() + try: + declaredFields[i].setAccessible(True) + ret[name] = declaredFields[i].get(obj) + except: + traceback.print_exc() + + return ret + + +#======================================================================================================================= +# JyArrayResolver +#======================================================================================================================= +class JyArrayResolver: + ''' + This resolves a regular Object[] array from java + ''' + + def resolve(self, var, attribute): + if attribute == '__len__': + return None + return var[int(attribute)] + + def get_dictionary(self, obj): + ret = {} + + for i in xrange(len(obj)): + ret[ i ] = obj[i] + + ret['__len__'] = len(obj) + return ret + + + + +#======================================================================================================================= +# MultiValueDictResolver +#======================================================================================================================= +class MultiValueDictResolver(DictResolver): + + def resolve(self, dict, key): + if key in ('__len__', TOO_LARGE_ATTR): + return None + + #ok, we have to iterate over the items to find the one that matches the id, because that's the only way + #to actually find the reference from the string we have before. + expected_id = int(key.split('(')[-1][:-1]) + for key in dict_keys(dict): + val = dict.getlist(key) + if id(key) == expected_id: + return val + + raise UnableToResolveVariableException() + + + +#======================================================================================================================= +# DjangoFormResolver +#======================================================================================================================= +class DjangoFormResolver(DefaultResolver): + has_errors_attr = False + + def get_names(self, var): + names = dir(var) + if not names and hasattr(var, '__members__'): + names = var.__members__ + + if "errors" in names: + self.has_errors_attr = True + names.remove("errors") + return names + + def get_dictionary(self, var, names=None): + # Do not call self.errors because it is property and has side effects + d = defaultResolver.get_dictionary(var, self.get_names(var)) + if self.has_errors_attr: + try: + errors_attr = getattr(var, "_errors") + except: + errors_attr = None + d["errors"] = errors_attr + return d + + +#======================================================================================================================= +# DequeResolver +#======================================================================================================================= +class DequeResolver(TupleResolver): + def get_dictionary(self, var): + d = TupleResolver.get_dictionary(self, var) + d['maxlen'] = getattr(var, 'maxlen', None) + return d + + +#======================================================================================================================= +# FrameResolver +#======================================================================================================================= +class FrameResolver: + ''' + This resolves a frame. + ''' + + def resolve(self, obj, attribute): + if attribute == '__internals__': + return defaultResolver.get_dictionary(obj) + + if attribute == 'stack': + return self.get_frame_stack(obj) + + if attribute == 'f_locals': + return obj.f_locals + + return None + + + def get_dictionary(self, obj): + ret = {} + ret['__internals__'] = defaultResolver.get_dictionary(obj) + ret['stack'] = self.get_frame_stack(obj) + ret['f_locals'] = obj.f_locals + return ret + + + def get_frame_stack(self, frame): + ret = [] + if frame is not None: + ret.append(self.get_frame_name(frame)) + + while frame.f_back: + frame = frame.f_back + ret.append(self.get_frame_name(frame)) + + return ret + + def get_frame_name(self, frame): + if frame is None: + return 'None' + try: + name = basename(frame.f_code.co_filename) + return 'frame: %s [%s:%s] id:%s' % (frame.f_code.co_name, name, frame.f_lineno, id(frame)) + except: + return 'frame object' + + +defaultResolver = DefaultResolver() +dictResolver = DictResolver() +tupleResolver = TupleResolver() +instanceResolver = InstanceResolver() +jyArrayResolver = JyArrayResolver() +setResolver = SetResolver() +multiValueDictResolver = MultiValueDictResolver() +djangoFormResolver = DjangoFormResolver() +dequeResolver = DequeResolver() +frameResolver = FrameResolver() diff --git a/ptvsd/pydevd/_pydevd_bundle/pydevd_save_locals.py b/ptvsd/pydevd/_pydevd_bundle/pydevd_save_locals.py new file mode 100644 index 00000000..3c6b0d60 --- /dev/null +++ b/ptvsd/pydevd/_pydevd_bundle/pydevd_save_locals.py @@ -0,0 +1,69 @@ +""" +Utility for saving locals. +""" +import sys + +try: + import types + + frame_type = types.FrameType +except: + frame_type = type(sys._getframe()) + + +def is_save_locals_available(): + return save_locals_impl is not None + + +def save_locals(frame): + """ + Copy values from locals_dict into the fast stack slots in the given frame. + + Note: the 'save_locals' branch had a different approach wrapping the frame (much more code, but it gives ideas + on how to save things partially, not the 'whole' locals). + """ + if not isinstance(frame, frame_type): + # Fix exception when changing Django variable (receiving DjangoTemplateFrame) + return + + if save_locals_impl is not None: + try: + save_locals_impl(frame) + except: + pass + + +def make_save_locals_impl(): + """ + Factory for the 'save_locals_impl' method. This may seem like a complicated pattern but it is essential that the method is created at + module load time. Inner imports after module load time would cause an occasional debugger deadlock due to the importer lock and debugger + lock being taken in different order in different threads. + """ + try: + if '__pypy__' in sys.builtin_module_names: + import __pypy__ # @UnresolvedImport + save_locals = __pypy__.locals_to_fast + except: + pass + else: + if '__pypy__' in sys.builtin_module_names: + def save_locals_pypy_impl(frame): + save_locals(frame) + + return save_locals_pypy_impl + + try: + import ctypes + locals_to_fast = ctypes.pythonapi.PyFrame_LocalsToFast + except: + pass + else: + def save_locals_ctypes_impl(frame): + locals_to_fast(ctypes.py_object(frame), ctypes.c_int(0)) + + return save_locals_ctypes_impl + + return None + + +save_locals_impl = make_save_locals_impl() diff --git a/ptvsd/pydevd/_pydevd_bundle/pydevd_signature.py b/ptvsd/pydevd/_pydevd_bundle/pydevd_signature.py new file mode 100644 index 00000000..6cc1e6f1 --- /dev/null +++ b/ptvsd/pydevd/_pydevd_bundle/pydevd_signature.py @@ -0,0 +1,206 @@ + +try: + import trace +except ImportError: + pass +else: + trace._warn = lambda *args: None # workaround for http://bugs.python.org/issue17143 (PY-8706) + +import os +from _pydevd_bundle.pydevd_comm import CMD_SIGNATURE_CALL_TRACE, NetCommand +from _pydevd_bundle import pydevd_xml +from _pydevd_bundle.pydevd_constants import xrange, dict_iter_items +from _pydevd_bundle import pydevd_utils +from _pydevd_bundle.pydevd_utils import get_clsname_for_code + + +class Signature(object): + def __init__(self, file, name): + self.file = file + self.name = name + self.args = [] + self.args_str = [] + self.return_type = None + + def add_arg(self, name, type): + self.args.append((name, type)) + self.args_str.append("%s:%s"%(name, type)) + + def set_args(self, frame, recursive=False): + self.args = [] + + code = frame.f_code + locals = frame.f_locals + + for i in xrange(0, code.co_argcount): + name = code.co_varnames[i] + class_name = get_type_of_value(locals[name], recursive=recursive) + + self.add_arg(name, class_name) + + def __str__(self): + return "%s %s(%s)"%(self.file, self.name, ", ".join(self.args_str)) + + +def get_type_of_value(value, ignore_module_name=('__main__', '__builtin__', 'builtins'), recursive=False): + tp = type(value) + class_name = tp.__name__ + if class_name == 'instance': # old-style classes + tp = value.__class__ + class_name = tp.__name__ + + if hasattr(tp, '__module__') and tp.__module__ and tp.__module__ not in ignore_module_name: + class_name = "%s.%s"%(tp.__module__, class_name) + + if class_name == 'list': + class_name = 'List' + if len(value) > 0 and recursive: + class_name += '[%s]' % get_type_of_value(value[0], recursive=recursive) + return class_name + + if class_name == 'dict': + class_name = 'Dict' + if len(value) > 0 and recursive: + for (k, v) in dict_iter_items(value): + class_name += '[%s, %s]' % (get_type_of_value(k, recursive=recursive), + get_type_of_value(v, recursive=recursive)) + break + return class_name + + if class_name == 'tuple': + class_name = 'Tuple' + if len(value) > 0 and recursive: + class_name += '[' + class_name += ', '.join(get_type_of_value(v, recursive=recursive) for v in value) + class_name += ']' + + return class_name + + +def _modname(path): + """Return a plausible module name for the path""" + base = os.path.basename(path) + filename, ext = os.path.splitext(base) + return filename + + +class SignatureFactory(object): + def __init__(self): + self._caller_cache = {} + self.cache = CallSignatureCache() + + def is_in_scope(self, filename): + return not pydevd_utils.not_in_project_roots(filename) + + def create_signature(self, frame, filename, with_args=True): + try: + _, modulename, funcname = self.file_module_function_of(frame) + signature = Signature(filename, funcname) + if with_args: + signature.set_args(frame, recursive=True) + return signature + except: + import traceback + traceback.print_exc() + + + def file_module_function_of(self, frame): #this code is take from trace module and fixed to work with new-style classes + code = frame.f_code + filename = code.co_filename + if filename: + modulename = _modname(filename) + else: + modulename = None + + funcname = code.co_name + clsname = None + if code in self._caller_cache: + if self._caller_cache[code] is not None: + clsname = self._caller_cache[code] + else: + self._caller_cache[code] = None + clsname = get_clsname_for_code(code, frame) + if clsname is not None: + # cache the result - assumption is that new.* is + # not called later to disturb this relationship + # _caller_cache could be flushed if functions in + # the new module get called. + self._caller_cache[code] = clsname + + if clsname is not None: + funcname = "%s.%s" % (clsname, funcname) + + return filename, modulename, funcname + + +def get_signature_info(signature): + return signature.file, signature.name, ' '.join([arg[1] for arg in signature.args]) + + +def get_frame_info(frame): + co = frame.f_code + return co.co_name, frame.f_lineno, co.co_filename + + +class CallSignatureCache(object): + def __init__(self): + self.cache = {} + + def add(self, signature): + filename, name, args_type = get_signature_info(signature) + calls_from_file = self.cache.setdefault(filename, {}) + name_calls = calls_from_file.setdefault(name, {}) + name_calls[args_type] = None + + def is_in_cache(self, signature): + filename, name, args_type = get_signature_info(signature) + if args_type in self.cache.get(filename, {}).get(name, {}): + return True + return False + + +def create_signature_message(signature): + cmdTextList = [""] + + cmdTextList.append('' % (pydevd_xml.make_valid_xml_value(signature.file), pydevd_xml.make_valid_xml_value(signature.name))) + + for arg in signature.args: + cmdTextList.append('' % (pydevd_xml.make_valid_xml_value(arg[0]), pydevd_xml.make_valid_xml_value(arg[1]))) + + if signature.return_type is not None: + cmdTextList.append('' % (pydevd_xml.make_valid_xml_value(signature.return_type))) + + cmdTextList.append("") + cmdText = ''.join(cmdTextList) + return NetCommand(CMD_SIGNATURE_CALL_TRACE, 0, cmdText) + + +def send_signature_call_trace(dbg, frame, filename): + if dbg.signature_factory and dbg.signature_factory.is_in_scope(filename): + signature = dbg.signature_factory.create_signature(frame, filename) + if signature is not None: + if dbg.signature_factory.cache is not None: + if not dbg.signature_factory.cache.is_in_cache(signature): + dbg.signature_factory.cache.add(signature) + dbg.writer.add_command(create_signature_message(signature)) + return True + else: + # we don't send signature if it is cached + return False + else: + dbg.writer.add_command(create_signature_message(signature)) + return True + return False + + +def send_signature_return_trace(dbg, frame, filename, return_value): + if dbg.signature_factory and dbg.signature_factory.is_in_scope(filename): + signature = dbg.signature_factory.create_signature(frame, filename, with_args=False) + signature.return_type = get_type_of_value(return_value, recursive=True) + dbg.writer.add_command(create_signature_message(signature)) + return True + + return False + + + diff --git a/ptvsd/pydevd/_pydevd_bundle/pydevd_stackless.py b/ptvsd/pydevd/_pydevd_bundle/pydevd_stackless.py new file mode 100644 index 00000000..04fc09b2 --- /dev/null +++ b/ptvsd/pydevd/_pydevd_bundle/pydevd_stackless.py @@ -0,0 +1,416 @@ +from __future__ import nested_scopes + +import weakref +import sys + +from _pydevd_bundle.pydevd_comm import get_global_debugger +from _pydevd_bundle.pydevd_constants import threading, call_only_once +from _pydevd_bundle.pydevd_constants import dict_items +from _pydevd_bundle.pydevd_custom_frames import update_custom_frame, remove_custom_frame, add_custom_frame +from _pydevd_bundle.pydevd_dont_trace_files import DONT_TRACE +from pydevd_file_utils import get_abs_path_real_path_and_base_from_frame +from pydevd_tracing import SetTrace +import stackless # @UnresolvedImport + + +# Used so that we don't loose the id (because we'll remove when it's not alive and would generate a new id for the +# same tasklet). +class TaskletToLastId: + ''' + So, why not a WeakKeyDictionary? + The problem is that removals from the WeakKeyDictionary will create a new tasklet (as it adds a callback to + remove the key when it's garbage-collected), so, we can get into a recursion. + ''' + + def __init__(self): + self.tasklet_ref_to_last_id = {} + self._i = 0 + + + def get(self, tasklet): + return self.tasklet_ref_to_last_id.get(weakref.ref(tasklet)) + + + def __setitem__(self, tasklet, last_id): + self.tasklet_ref_to_last_id[weakref.ref(tasklet)] = last_id + self._i += 1 + if self._i % 100 == 0: #Collect at each 100 additions to the dict (no need to rush). + for tasklet_ref in list(self.tasklet_ref_to_last_id.keys()): + if tasklet_ref() is None: + del self.tasklet_ref_to_last_id[tasklet_ref] + + +_tasklet_to_last_id = TaskletToLastId() + +#======================================================================================================================= +# _TaskletInfo +#======================================================================================================================= +class _TaskletInfo: + + _last_id = 0 + + def __init__(self, tasklet_weakref, tasklet): + self.frame_id = None + self.tasklet_weakref = tasklet_weakref + + last_id = _tasklet_to_last_id.get(tasklet) + if last_id is None: + _TaskletInfo._last_id += 1 + last_id = _TaskletInfo._last_id + _tasklet_to_last_id[tasklet] = last_id + + self._tasklet_id = last_id + + self.update_name() + + def update_name(self): + tasklet = self.tasklet_weakref() + if tasklet: + if tasklet.blocked: + state = 'blocked' + elif tasklet.paused: + state = 'paused' + elif tasklet.scheduled: + state = 'scheduled' + else: + state = '' + + try: + name = tasklet.name + except AttributeError: + if tasklet.is_main: + name = 'MainTasklet' + else: + name = 'Tasklet-%s' % (self._tasklet_id,) + + thread_id = tasklet.thread_id + if thread_id != -1: + for thread in threading.enumerate(): + if thread.ident == thread_id: + if thread.name: + thread_name = "of %s" % (thread.name,) + else: + thread_name = "of Thread-%s" % (thread.name or str(thread_id),) + break + else: + # should not happen. + thread_name = "of Thread-%s" % (str(thread_id),) + thread = None + else: + # tasklet is no longer bound to a thread, because its thread ended + thread_name = "without thread" + + tid = id(tasklet) + tasklet = None + else: + state = 'dead' + name = 'Tasklet-%s' % (self._tasklet_id,) + thread_name = "" + tid = '-' + self.tasklet_name = '%s %s %s (%s)' % (state, name, thread_name, tid) + + if not hasattr(stackless.tasklet, "trace_function"): + # bug https://bitbucket.org/stackless-dev/stackless/issue/42 + # is not fixed. Stackless releases before 2014 + def update_name(self): + tasklet = self.tasklet_weakref() + if tasklet: + try: + name = tasklet.name + except AttributeError: + if tasklet.is_main: + name = 'MainTasklet' + else: + name = 'Tasklet-%s' % (self._tasklet_id,) + + thread_id = tasklet.thread_id + for thread in threading.enumerate(): + if thread.ident == thread_id: + if thread.name: + thread_name = "of %s" % (thread.name,) + else: + thread_name = "of Thread-%s" % (thread.name or str(thread_id),) + break + else: + # should not happen. + thread_name = "of Thread-%s" % (str(thread_id),) + thread = None + + tid = id(tasklet) + tasklet = None + else: + name = 'Tasklet-%s' % (self._tasklet_id,) + thread_name = "" + tid = '-' + self.tasklet_name = '%s %s (%s)' % (name, thread_name, tid) + +_weak_tasklet_registered_to_info = {} + +#======================================================================================================================= +# get_tasklet_info +#======================================================================================================================= +def get_tasklet_info(tasklet): + return register_tasklet_info(tasklet) + + +#======================================================================================================================= +# register_tasklet_info +#======================================================================================================================= +def register_tasklet_info(tasklet): + r = weakref.ref(tasklet) + info = _weak_tasklet_registered_to_info.get(r) + if info is None: + info = _weak_tasklet_registered_to_info[r] = _TaskletInfo(r, tasklet) + + return info + + +_application_set_schedule_callback = None + +#======================================================================================================================= +# _schedule_callback +#======================================================================================================================= +def _schedule_callback(prev, next): + ''' + Called when a context is stopped or a new context is made runnable. + ''' + try: + if not prev and not next: + return + + current_frame = sys._getframe() + + if next: + register_tasklet_info(next) + + # Ok, making next runnable: set the tracing facility in it. + debugger = get_global_debugger() + if debugger is not None: + next.trace_function = debugger.trace_dispatch + frame = next.frame + if frame is current_frame: + frame = frame.f_back + if hasattr(frame, 'f_trace'): # Note: can be None (but hasattr should cover for that too). + frame.f_trace = debugger.trace_dispatch + + debugger = None + + if prev: + register_tasklet_info(prev) + + try: + for tasklet_ref, tasklet_info in dict_items(_weak_tasklet_registered_to_info): # Make sure it's a copy! + tasklet = tasklet_ref() + if tasklet is None or not tasklet.alive: + # Garbage-collected already! + try: + del _weak_tasklet_registered_to_info[tasklet_ref] + except KeyError: + pass + if tasklet_info.frame_id is not None: + remove_custom_frame(tasklet_info.frame_id) + else: + is_running = stackless.get_thread_info(tasklet.thread_id)[1] is tasklet + if tasklet is prev or (tasklet is not next and not is_running): + # the tasklet won't run after this scheduler action: + # - the tasklet is the previous tasklet + # - it is not the next tasklet and it is not an already running tasklet + frame = tasklet.frame + if frame is current_frame: + frame = frame.f_back + if frame is not None: + base = get_abs_path_real_path_and_base_from_frame(frame)[-1] + # print >>sys.stderr, "SchedCB: %r, %d, '%s', '%s'" % (tasklet, frame.f_lineno, _filename, base) + is_file_to_ignore = base in DONT_TRACE + if not is_file_to_ignore: + tasklet_info.update_name() + if tasklet_info.frame_id is None: + tasklet_info.frame_id = add_custom_frame(frame, tasklet_info.tasklet_name, tasklet.thread_id) + else: + update_custom_frame(tasklet_info.frame_id, frame, tasklet.thread_id, name=tasklet_info.tasklet_name) + + elif tasklet is next or is_running: + if tasklet_info.frame_id is not None: + # Remove info about stackless suspended when it starts to run. + remove_custom_frame(tasklet_info.frame_id) + tasklet_info.frame_id = None + + + finally: + tasklet = None + tasklet_info = None + frame = None + + except: + import traceback;traceback.print_exc() + + if _application_set_schedule_callback is not None: + return _application_set_schedule_callback(prev, next) + +if not hasattr(stackless.tasklet, "trace_function"): + # Older versions of Stackless, released before 2014 + # This code does not work reliable! It is affected by several + # stackless bugs: Stackless issues #44, #42, #40 + def _schedule_callback(prev, next): + ''' + Called when a context is stopped or a new context is made runnable. + ''' + try: + if not prev and not next: + return + + if next: + register_tasklet_info(next) + + # Ok, making next runnable: set the tracing facility in it. + debugger = get_global_debugger() + if debugger is not None and next.frame: + if hasattr(next.frame, 'f_trace'): + next.frame.f_trace = debugger.trace_dispatch + debugger = None + + if prev: + register_tasklet_info(prev) + + try: + for tasklet_ref, tasklet_info in dict_items(_weak_tasklet_registered_to_info): # Make sure it's a copy! + tasklet = tasklet_ref() + if tasklet is None or not tasklet.alive: + # Garbage-collected already! + try: + del _weak_tasklet_registered_to_info[tasklet_ref] + except KeyError: + pass + if tasklet_info.frame_id is not None: + remove_custom_frame(tasklet_info.frame_id) + else: + if tasklet.paused or tasklet.blocked or tasklet.scheduled: + if tasklet.frame and tasklet.frame.f_back: + f_back = tasklet.frame.f_back + base = get_abs_path_real_path_and_base_from_frame(f_back)[-1] + is_file_to_ignore = base in DONT_TRACE + if not is_file_to_ignore: + if tasklet_info.frame_id is None: + tasklet_info.frame_id = add_custom_frame(f_back, tasklet_info.tasklet_name, tasklet.thread_id) + else: + update_custom_frame(tasklet_info.frame_id, f_back, tasklet.thread_id) + + elif tasklet.is_current: + if tasklet_info.frame_id is not None: + # Remove info about stackless suspended when it starts to run. + remove_custom_frame(tasklet_info.frame_id) + tasklet_info.frame_id = None + + finally: + tasklet = None + tasklet_info = None + f_back = None + + except: + import traceback;traceback.print_exc() + + if _application_set_schedule_callback is not None: + return _application_set_schedule_callback(prev, next) + + + _original_setup = stackless.tasklet.setup + + #======================================================================================================================= + # setup + #======================================================================================================================= + def setup(self, *args, **kwargs): + ''' + Called to run a new tasklet: rebind the creation so that we can trace it. + ''' + + f = self.tempval + def new_f(old_f, args, kwargs): + + debugger = get_global_debugger() + if debugger is not None: + SetTrace(debugger.trace_dispatch) + + debugger = None + + # Remove our own traces :) + self.tempval = old_f + register_tasklet_info(self) + + # Hover old_f to see the stackless being created and *args and **kwargs to see its parameters. + return old_f(*args, **kwargs) + + # This is the way to tell stackless that the function it should execute is our function, not the original one. Note: + # setting tempval is the same as calling bind(new_f), but it seems that there's no other way to get the currently + # bound function, so, keeping on using tempval instead of calling bind (which is actually the same thing in a better + # API). + + self.tempval = new_f + + return _original_setup(self, f, args, kwargs) + + #======================================================================================================================= + # __call__ + #======================================================================================================================= + def __call__(self, *args, **kwargs): + ''' + Called to run a new tasklet: rebind the creation so that we can trace it. + ''' + + return setup(self, *args, **kwargs) + + + _original_run = stackless.run + + + #======================================================================================================================= + # run + #======================================================================================================================= + def run(*args, **kwargs): + debugger = get_global_debugger() + if debugger is not None: + SetTrace(debugger.trace_dispatch) + debugger = None + + return _original_run(*args, **kwargs) + + + +#======================================================================================================================= +# patch_stackless +#======================================================================================================================= +def patch_stackless(): + ''' + This function should be called to patch the stackless module so that new tasklets are properly tracked in the + debugger. + ''' + global _application_set_schedule_callback + _application_set_schedule_callback = stackless.set_schedule_callback(_schedule_callback) + + def set_schedule_callback(callable): + global _application_set_schedule_callback + old = _application_set_schedule_callback + _application_set_schedule_callback = callable + return old + + def get_schedule_callback(): + global _application_set_schedule_callback + return _application_set_schedule_callback + + set_schedule_callback.__doc__ = stackless.set_schedule_callback.__doc__ + if hasattr(stackless, "get_schedule_callback"): + get_schedule_callback.__doc__ = stackless.get_schedule_callback.__doc__ + stackless.set_schedule_callback = set_schedule_callback + stackless.get_schedule_callback = get_schedule_callback + + if not hasattr(stackless.tasklet, "trace_function"): + # Older versions of Stackless, released before 2014 + __call__.__doc__ = stackless.tasklet.__call__.__doc__ + stackless.tasklet.__call__ = __call__ + + setup.__doc__ = stackless.tasklet.setup.__doc__ + stackless.tasklet.setup = setup + + run.__doc__ = stackless.run.__doc__ + stackless.run = run + +patch_stackless = call_only_once(patch_stackless) diff --git a/ptvsd/pydevd/_pydevd_bundle/pydevd_trace_api.py b/ptvsd/pydevd/_pydevd_bundle/pydevd_trace_api.py new file mode 100644 index 00000000..b2bdaff4 --- /dev/null +++ b/ptvsd/pydevd/_pydevd_bundle/pydevd_trace_api.py @@ -0,0 +1,41 @@ +def add_line_breakpoint(plugin, pydb, type, file, line, condition, expression, func_name): + return None + +def add_exception_breakpoint(plugin, pydb, type, exception): + return False + +def remove_exception_breakpoint(plugin, pydb, type, exception): + return False + +def get_breakpoints(plugin, pydb): + return None + +def can_not_skip(plugin, pydb, pydb_frame, frame): + return False + +def has_exception_breaks(plugin): + return False + +def has_line_breaks(plugin): + return False + +def cmd_step_into(plugin, pydb, frame, event, args, stop_info, stop): + return False + +def cmd_step_over(plugin, pydb, frame, event, args, stop_info, stop): + return False + +def stop(plugin, pydb, frame, event, args, stop_info, arg, step_cmd): + return False + +def get_breakpoint(plugin, pydb, pydb_frame, frame, event, args): + return None + +def suspend(plugin, pydb, thread, frame): + return None + +def exception_break(plugin, pydb, pydb_frame, frame, args, arg): + return None + +def change_variable(plugin, frame, attr, expression): + return False diff --git a/ptvsd/pydevd/_pydevd_bundle/pydevd_trace_dispatch.py b/ptvsd/pydevd/_pydevd_bundle/pydevd_trace_dispatch.py new file mode 100644 index 00000000..a9047bda --- /dev/null +++ b/ptvsd/pydevd/_pydevd_bundle/pydevd_trace_dispatch.py @@ -0,0 +1,74 @@ +# Defines which version of the trace_dispatch we'll use. +# Should give warning only here if cython is not available but supported. + +import os +import sys +from _pydevd_bundle.pydevd_constants import CYTHON_SUPPORTED + + +use_cython = os.getenv('PYDEVD_USE_CYTHON', None) +dirname = os.path.dirname(os.path.dirname(__file__)) +# Do not show incorrect warning for .egg files for Remote debugger +if not CYTHON_SUPPORTED or dirname.endswith('.egg'): + # Do not try to import cython extensions if cython isn't supported + use_cython = 'NO' + + +def delete_old_compiled_extensions(): + pydev_dir = os.path.dirname(os.path.dirname(__file__)) + _pydevd_bundle_dir = os.path.dirname(__file__) + _pydevd_frame_eval_dir = os.path.join(pydev_dir, '_pydevd_frame_eval') + try: + import shutil + for file in os.listdir(_pydevd_bundle_dir): + if file.startswith("pydevd") and file.endswith(".so"): + os.remove(os.path.join(_pydevd_bundle_dir, file)) + for file in os.listdir(_pydevd_frame_eval_dir): + if file.startswith("pydevd") and file.endswith(".so"): + os.remove(os.path.join(_pydevd_frame_eval_dir, file)) + build_dir = os.path.join(pydev_dir, "build") + if os.path.exists(build_dir): + shutil.rmtree(os.path.join(pydev_dir, "build")) + except OSError: + from _pydev_bundle.pydev_monkey import log_error_once + log_error_once("warning: failed to delete old cython speedups. Please delete all *.so files from the directories " + "\"%s\" and \"%s\"" % (_pydevd_bundle_dir, _pydevd_frame_eval_dir)) + + +if use_cython == 'YES': + # We must import the cython version if forcing cython + from _pydevd_bundle.pydevd_cython_wrapper import trace_dispatch as _trace_dispatch, global_cache_skips, global_cache_frame_skips + def trace_dispatch(py_db, frame, event, arg): + return _trace_dispatch(py_db, frame, event, arg) + +elif use_cython == 'NO': + # Use the regular version if not forcing cython + from _pydevd_bundle.pydevd_trace_dispatch_regular import trace_dispatch, global_cache_skips, global_cache_frame_skips # @UnusedImport + +elif use_cython is None: + # Regular: use fallback if not found and give message to user + try: + from _pydevd_bundle.pydevd_cython_wrapper import trace_dispatch as _trace_dispatch, global_cache_skips, global_cache_frame_skips + def trace_dispatch(py_db, frame, event, arg): + return _trace_dispatch(py_db, frame, event, arg) + + # This version number is always available + from _pydevd_bundle.pydevd_additional_thread_info_regular import version as regular_version + # This version number from the already compiled cython extension + from _pydevd_bundle.pydevd_cython_wrapper import version as cython_version + if cython_version != regular_version: + delete_old_compiled_extensions() + raise ImportError() + + except ImportError: + from _pydevd_bundle.pydevd_additional_thread_info_regular import PyDBAdditionalThreadInfo # @UnusedImport + from _pydevd_bundle.pydevd_trace_dispatch_regular import trace_dispatch, global_cache_skips, global_cache_frame_skips # @UnusedImport + from _pydev_bundle.pydev_monkey import log_error_once + + log_error_once("warning: Debugger speedups using cython not found. Run '\"%s\" \"%s\" build_ext --inplace' to build." % ( + sys.executable, os.path.join(dirname, 'setup_cython.py'))) + +else: + raise RuntimeError('Unexpected value for PYDEVD_USE_CYTHON: %s (accepted: YES, NO)' % (use_cython,)) + + diff --git a/ptvsd/pydevd/_pydevd_bundle/pydevd_trace_dispatch_regular.py b/ptvsd/pydevd/_pydevd_bundle/pydevd_trace_dispatch_regular.py new file mode 100644 index 00000000..06388ccf --- /dev/null +++ b/ptvsd/pydevd/_pydevd_bundle/pydevd_trace_dispatch_regular.py @@ -0,0 +1,229 @@ +import traceback + +from _pydev_bundle.pydev_is_thread_alive import is_thread_alive +from _pydev_imps._pydev_saved_modules import threading +from _pydevd_bundle.pydevd_constants import get_thread_id, IS_IRONPYTHON +from _pydevd_bundle.pydevd_dont_trace_files import DONT_TRACE +from _pydevd_bundle.pydevd_kill_all_pydevd_threads import kill_all_pydev_threads +from pydevd_file_utils import get_abs_path_real_path_and_base_from_frame, NORM_PATHS_AND_BASE_CONTAINER +from pydevd_tracing import SetTrace +# IFDEF CYTHON +# # In Cython, PyDBAdditionalThreadInfo is bundled in the file. +# from cpython.object cimport PyObject +# from cpython.ref cimport Py_INCREF, Py_XDECREF +# ELSE +from _pydevd_bundle.pydevd_additional_thread_info import PyDBAdditionalThreadInfo +from _pydevd_bundle.pydevd_frame import PyDBFrame +# ENDIF + +try: + from _pydevd_bundle.pydevd_signature import send_signature_call_trace +except ImportError: + def send_signature_call_trace(*args, **kwargs): + pass + +threadingCurrentThread = threading.currentThread +get_file_type = DONT_TRACE.get + +# IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated) +# cdef dict global_cache_skips +# cdef dict global_cache_frame_skips +# ELSE +# ENDIF + + +# Cache where we should keep that we completely skipped entering some context. +# It needs to be invalidated when: +# - Breakpoints are changed +# It can be used when running regularly (without step over/step in/step return) +global_cache_skips = {} +global_cache_frame_skips = {} + +def trace_dispatch(py_db, frame, event, arg): + t = threadingCurrentThread() + + if getattr(t, 'pydev_do_not_trace', None): + return None + + try: + additional_info = t.additional_info + if additional_info is None: + raise AttributeError() + except: + additional_info = t.additional_info = PyDBAdditionalThreadInfo() + + thread_tracer = ThreadTracer((py_db, t, additional_info, global_cache_skips, global_cache_frame_skips)) +# IFDEF CYTHON +# t._tracer = thread_tracer # Hack for cython to keep it alive while the thread is alive (just the method in the SetTrace is not enough). +# ELSE +# ENDIF + SetTrace(thread_tracer.__call__) + return thread_tracer.__call__(frame, event, arg) + +# IFDEF CYTHON +# cdef class SafeCallWrapper: +# cdef method_object +# def __init__(self, method_object): +# self.method_object = method_object +# def __call__(self, *args): +# #Cannot use 'self' once inside the delegate call since we are borrowing the self reference f_trace field +# #in the frame, and that reference might get destroyed by set trace on frame and parents +# cdef PyObject* method_obj = self.method_object +# Py_INCREF(method_obj) +# ret = (method_obj)(*args) +# Py_XDECREF (method_obj) +# return SafeCallWrapper(ret) if ret is not None else None +# cdef class ThreadTracer: +# cdef public tuple _args; +# def __init__(self, tuple args): +# self._args = args +# ELSE +class ThreadTracer: + def __init__(self, args): + self._args = args +# ENDIF + + + def __call__(self, frame, event, arg): + ''' This is the callback used when we enter some context in the debugger. + + We also decorate the thread we are in with info about the debugging. + The attributes added are: + pydev_state + pydev_step_stop + pydev_step_cmd + pydev_notify_kill + + :param PyDB py_db: + This is the global debugger (this method should actually be added as a method to it). + ''' + # IFDEF CYTHON + # cdef str filename; + # cdef str base; + # cdef int pydev_step_cmd; + # cdef tuple cache_key; + # cdef dict cache_skips; + # cdef bint is_stepping; + # cdef tuple abs_path_real_path_and_base; + # cdef PyDBAdditionalThreadInfo additional_info; + # ENDIF + # print('ENTER: trace_dispatch', frame.f_code.co_filename, frame.f_lineno, event, frame.f_code.co_name) + py_db, t, additional_info, cache_skips, frame_skips_cache = self._args + pydev_step_cmd = additional_info.pydev_step_cmd + is_stepping = pydev_step_cmd != -1 + + try: + if py_db._finish_debugging_session: + if not py_db._termination_event_set: + #that was not working very well because jython gave some socket errors + try: + if py_db.output_checker is None: + kill_all_pydev_threads() + except: + traceback.print_exc() + py_db._termination_event_set = True + return None + + # if thread is not alive, cancel trace_dispatch processing + if not is_thread_alive(t): + py_db._process_thread_not_alive(get_thread_id(t)) + return None # suspend tracing + + try: + # Make fast path faster! + abs_path_real_path_and_base = NORM_PATHS_AND_BASE_CONTAINER[frame.f_code.co_filename] + except: + abs_path_real_path_and_base = get_abs_path_real_path_and_base_from_frame(frame) + + if py_db.thread_analyser is not None: + py_db.thread_analyser.log_event(frame) + + if py_db.asyncio_analyser is not None: + py_db.asyncio_analyser.log_event(frame) + + filename = abs_path_real_path_and_base[1] + # Note: it's important that the context name is also given because we may hit something once + # in the global context and another in the local context. + cache_key = (frame.f_lineno, frame.f_code.co_name, filename) + if not is_stepping and cache_key in cache_skips: + # print('skipped: trace_dispatch (cache hit)', cache_key, frame.f_lineno, event, frame.f_code.co_name) + return None + + file_type = get_file_type(abs_path_real_path_and_base[-1]) #we don't want to debug threading or anything related to pydevd + + if file_type is not None: + if file_type == 1: # inlining LIB_FILE = 1 + if py_db.not_in_scope(filename): + # print('skipped: trace_dispatch (not in scope)', abs_path_real_path_and_base[-1], frame.f_lineno, event, frame.f_code.co_name, file_type) + cache_skips[cache_key] = 1 + return None + else: + # print('skipped: trace_dispatch', abs_path_real_path_and_base[-1], frame.f_lineno, event, frame.f_code.co_name, file_type) + cache_skips[cache_key] = 1 + return None + + if is_stepping: + if py_db.is_filter_enabled and py_db.is_ignored_by_filters(filename): + # ignore files matching stepping filters + return None + if py_db.is_filter_libraries and py_db.not_in_scope(filename): + # ignore library files while stepping + return None + + # print('trace_dispatch', base, frame.f_lineno, event, frame.f_code.co_name, file_type) + if additional_info.is_tracing: + return None #we don't wan't to trace code invoked from pydevd_frame.trace_dispatch + + if event == 'call' and py_db.signature_factory: + # We can only have a call when entering a context, so, check at this level, not at the PyDBFrame. + send_signature_call_trace(py_db, frame, filename) + + # Just create PyDBFrame directly (removed support for Python versions < 2.5, which required keeping a weak + # reference to the frame). + ret = PyDBFrame((py_db, filename, additional_info, t, frame_skips_cache, (frame.f_code.co_name, frame.f_code.co_firstlineno, filename))).trace_dispatch(frame, event, arg) + if ret is None: + cache_skips[cache_key] = 1 + return None + + # IFDEF CYTHON + # return SafeCallWrapper(ret) + # ELSE + return ret + # ENDIF + + except SystemExit: + return None + + except Exception: + if py_db._finish_debugging_session: + return None # Don't log errors when we're shutting down. + # Log it + try: + if traceback is not None: + # This can actually happen during the interpreter shutdown in Python 2.7 + traceback.print_exc() + except: + # Error logging? We're really in the interpreter shutdown... + # (https://github.com/fabioz/PyDev.Debugger/issues/8) + pass + return None + + +if IS_IRONPYTHON: + # This is far from ideal, as we'll leak frames (we'll always have the last created frame, not really + # the last topmost frame saved -- this should be Ok for our usage, but it may leak frames and things + # may live longer... as IronPython is garbage-collected, things should live longer anyways, so, it + # shouldn't be an issue as big as it's in CPython -- it may still be annoying, but this should + # be a reasonable workaround until IronPython itself is able to provide that functionality). + # + # See: https://github.com/IronLanguages/main/issues/1630 + from _pydevd_bundle.pydevd_additional_thread_info_regular import _tid_to_last_frame + + _original_call = ThreadTracer.__call__ + + def __call__(self, frame, event, arg): + _tid_to_last_frame[self._args[1].ident] = frame + return _original_call(self, frame, event, arg) + + ThreadTracer.__call__ = __call__ + diff --git a/ptvsd/pydevd/_pydevd_bundle/pydevd_traceproperty.py b/ptvsd/pydevd/_pydevd_bundle/pydevd_traceproperty.py new file mode 100644 index 00000000..d5d1fb91 --- /dev/null +++ b/ptvsd/pydevd/_pydevd_bundle/pydevd_traceproperty.py @@ -0,0 +1,108 @@ +'''For debug purpose we are replacing actual builtin property by the debug property +''' +from _pydevd_bundle.pydevd_comm import get_global_debugger +from _pydevd_bundle.pydevd_constants import DebugInfoHolder, IS_PY2 +import pydevd_tracing + +#======================================================================================================================= +# replace_builtin_property +#======================================================================================================================= +def replace_builtin_property(new_property=None): + if new_property is None: + new_property = DebugProperty + original = property + if IS_PY2: + try: + import __builtin__ + __builtin__.__dict__['property'] = new_property + except: + if DebugInfoHolder.DEBUG_TRACE_LEVEL: + import traceback;traceback.print_exc() #@Reimport + else: + try: + import builtins #Python 3.0 does not have the __builtin__ module @UnresolvedImport + builtins.__dict__['property'] = new_property + except: + if DebugInfoHolder.DEBUG_TRACE_LEVEL: + import traceback;traceback.print_exc() #@Reimport + return original + + +#======================================================================================================================= +# DebugProperty +#======================================================================================================================= +class DebugProperty(object): + """A custom property which allows python property to get + controlled by the debugger and selectively disable/re-enable + the tracing. + """ + + + def __init__(self, fget=None, fset=None, fdel=None, doc=None): + self.fget = fget + self.fset = fset + self.fdel = fdel + self.__doc__ = doc + + + def __get__(self, obj, objtype=None): + if obj is None: + return self + global_debugger = get_global_debugger() + try: + if global_debugger is not None and global_debugger.disable_property_getter_trace: + pydevd_tracing.SetTrace(None) + if self.fget is None: + raise AttributeError("unreadable attribute") + return self.fget(obj) + finally: + if global_debugger is not None: + pydevd_tracing.SetTrace(global_debugger.trace_dispatch) + + + def __set__(self, obj, value): + global_debugger = get_global_debugger() + try: + if global_debugger is not None and global_debugger.disable_property_setter_trace: + pydevd_tracing.SetTrace(None) + if self.fset is None: + raise AttributeError("can't set attribute") + self.fset(obj, value) + finally: + if global_debugger is not None: + pydevd_tracing.SetTrace(global_debugger.trace_dispatch) + + + def __delete__(self, obj): + global_debugger = get_global_debugger() + try: + if global_debugger is not None and global_debugger.disable_property_deleter_trace: + pydevd_tracing.SetTrace(None) + if self.fdel is None: + raise AttributeError("can't delete attribute") + self.fdel(obj) + finally: + if global_debugger is not None: + pydevd_tracing.SetTrace(global_debugger.trace_dispatch) + + + def getter(self, fget): + """Overriding getter decorator for the property + """ + self.fget = fget + return self + + + def setter(self, fset): + """Overriding setter decorator for the property + """ + self.fset = fset + return self + + + def deleter(self, fdel): + """Overriding deleter decorator for the property + """ + self.fdel = fdel + return self + diff --git a/ptvsd/pydevd/_pydevd_bundle/pydevd_utils.py b/ptvsd/pydevd/_pydevd_bundle/pydevd_utils.py new file mode 100644 index 00000000..27df7b3c --- /dev/null +++ b/ptvsd/pydevd/_pydevd_bundle/pydevd_utils.py @@ -0,0 +1,205 @@ +from __future__ import nested_scopes +import traceback +import os + +try: + from urllib import quote +except: + from urllib.parse import quote # @UnresolvedImport + +import inspect +from _pydevd_bundle.pydevd_constants import IS_PY3K +import sys +from _pydev_bundle import pydev_log + +def save_main_module(file, module_name): + # patch provided by: Scott Schlesier - when script is run, it does not + # use globals from pydevd: + # This will prevent the pydevd script from contaminating the namespace for the script to be debugged + # pretend pydevd is not the main module, and + # convince the file to be debugged that it was loaded as main + sys.modules[module_name] = sys.modules['__main__'] + sys.modules[module_name].__name__ = module_name + from imp import new_module + + m = new_module('__main__') + sys.modules['__main__'] = m + if hasattr(sys.modules[module_name], '__loader__'): + m.__loader__ = getattr(sys.modules[module_name], '__loader__') + m.__file__ = file + + return m + + +def to_number(x): + if is_string(x): + try: + n = float(x) + return n + except ValueError: + pass + + l = x.find('(') + if l != -1: + y = x[0:l-1] + #print y + try: + n = float(y) + return n + except ValueError: + pass + return None + +def compare_object_attrs_key(x): + if '__len__' == x: + as_number = to_number(x) + if as_number is None: + as_number = 99999999 + # __len__ should appear after other attributes in a list. + return (1, as_number) + else: + return (-1, to_string(x)) + +if IS_PY3K: + def is_string(x): + return isinstance(x, str) + +else: + def is_string(x): + return isinstance(x, basestring) + +def to_string(x): + if is_string(x): + return x + else: + return str(x) + +def print_exc(): + if traceback: + traceback.print_exc() + +if IS_PY3K: + def quote_smart(s, safe='/'): + return quote(s, safe) +else: + def quote_smart(s, safe='/'): + if isinstance(s, unicode): + s = s.encode('utf-8') + + return quote(s, safe) + + +def get_clsname_for_code(code, frame): + clsname = None + if len(code.co_varnames) > 0: + # We are checking the first argument of the function + # (`self` or `cls` for methods). + first_arg_name = code.co_varnames[0] + if first_arg_name in frame.f_locals: + first_arg_obj = frame.f_locals[first_arg_name] + if inspect.isclass(first_arg_obj): # class method + first_arg_class = first_arg_obj + else: # instance method + first_arg_class = first_arg_obj.__class__ + func_name = code.co_name + if hasattr(first_arg_class, func_name): + method = getattr(first_arg_class, func_name) + func_code = None + if hasattr(method, 'func_code'): # Python2 + func_code = method.func_code + elif hasattr(method, '__code__'): # Python3 + func_code = method.__code__ + if func_code and func_code == code: + clsname = first_arg_class.__name__ + + return clsname + + +def _get_project_roots(project_roots_cache=[]): + # Note: the project_roots_cache is the same instance among the many calls to the method + if not project_roots_cache: + roots = os.getenv('IDE_PROJECT_ROOTS', '').split(os.pathsep) + pydev_log.debug("IDE_PROJECT_ROOTS %s\n" % roots) + new_roots = [] + for root in roots: + new_roots.append(os.path.normcase(root)) + project_roots_cache.append(new_roots) + return project_roots_cache[-1] # returns the project roots with case normalized + + +def _get_library_roots(library_roots_cache=[]): + # Note: the project_roots_cache is the same instance among the many calls to the method + if not library_roots_cache: + roots = os.getenv('LIBRARY_ROOTS', '').split(os.pathsep) + pydev_log.debug("LIBRARY_ROOTS %s\n" % roots) + new_roots = [] + for root in roots: + new_roots.append(os.path.normcase(root)) + library_roots_cache.append(new_roots) + return library_roots_cache[-1] # returns the project roots with case normalized + + +def not_in_project_roots(filename, filename_to_not_in_scope_cache={}): + # Note: the filename_to_not_in_scope_cache is the same instance among the many calls to the method + try: + return filename_to_not_in_scope_cache[filename] + except: + project_roots = _get_project_roots() + original_filename = filename + if not os.path.isabs(filename) and not filename.startswith('<'): + filename = os.path.abspath(filename) + filename = os.path.normcase(filename) + for root in project_roots: + if len(root) > 0 and filename.startswith(root): + filename_to_not_in_scope_cache[original_filename] = False + break + else: # for else (only called if the break wasn't reached). + filename_to_not_in_scope_cache[original_filename] = True + + if not filename_to_not_in_scope_cache[original_filename]: + # additional check if interpreter is situated in a project directory + library_roots = _get_library_roots() + for root in library_roots: + if root != '' and filename.startswith(root): + filename_to_not_in_scope_cache[original_filename] = True + + # at this point it must be loaded. + return filename_to_not_in_scope_cache[original_filename] + + +def is_filter_enabled(): + return os.getenv('PYDEVD_FILTERS') is not None + + +def is_filter_libraries(): + is_filter = os.getenv('PYDEVD_FILTER_LIBRARIES') is not None + pydev_log.debug("PYDEVD_FILTER_LIBRARIES %s\n" % is_filter) + return is_filter + + +def _get_stepping_filters(filters_cache=[]): + if not filters_cache: + filters = os.getenv('PYDEVD_FILTERS', '').split(';') + pydev_log.debug("PYDEVD_FILTERS %s\n" % filters) + new_filters = [] + for new_filter in filters: + new_filters.append(new_filter) + filters_cache.append(new_filters) + return filters_cache[-1] + + +def is_ignored_by_filter(filename, filename_to_ignored_by_filters_cache={}): + try: + return filename_to_ignored_by_filters_cache[filename] + except: + import fnmatch + for stepping_filter in _get_stepping_filters(): + if fnmatch.fnmatch(filename, stepping_filter): + pydev_log.debug("File %s ignored by filter %s" % (filename, stepping_filter)) + filename_to_ignored_by_filters_cache[filename] = True + break + else: + filename_to_ignored_by_filters_cache[filename] = False + + return filename_to_ignored_by_filters_cache[filename] + diff --git a/ptvsd/pydevd/_pydevd_bundle/pydevd_vars.py b/ptvsd/pydevd/_pydevd_bundle/pydevd_vars.py new file mode 100644 index 00000000..206ece0d --- /dev/null +++ b/ptvsd/pydevd/_pydevd_bundle/pydevd_vars.py @@ -0,0 +1,608 @@ +""" pydevd_vars deals with variables: + resolution/conversion to XML. +""" +import pickle +from _pydevd_bundle.pydevd_constants import get_frame, get_thread_id, xrange + +from _pydevd_bundle.pydevd_custom_frames import get_custom_frame +from _pydevd_bundle.pydevd_xml import ExceptionOnEvaluate, get_type, var_to_xml +from _pydev_imps._pydev_saved_modules import thread + +try: + from StringIO import StringIO +except ImportError: + from io import StringIO +import sys # @Reimport + +from _pydev_imps._pydev_saved_modules import threading +import traceback +from _pydevd_bundle import pydevd_save_locals +from _pydev_bundle.pydev_imports import Exec, quote, execfile +from _pydevd_bundle.pydevd_utils import to_string + +SENTINEL_VALUE = [] + +# ------------------------------------------------------------------------------------------------------ class for errors + +class VariableError(RuntimeError): pass + + +class FrameNotFoundError(RuntimeError): pass + + +def _iter_frames(initialFrame): + '''NO-YIELD VERSION: Iterates through all the frames starting at the specified frame (which will be the first returned item)''' + # cannot use yield + frames = [] + + while initialFrame is not None: + frames.append(initialFrame) + initialFrame = initialFrame.f_back + + return frames + + +def dump_frames(thread_id): + sys.stdout.write('dumping frames\n') + if thread_id != get_thread_id(threading.currentThread()): + raise VariableError("find_frame: must execute on same thread") + + curFrame = get_frame() + for frame in _iter_frames(curFrame): + sys.stdout.write('%s\n' % pickle.dumps(frame)) + + +# =============================================================================== +# AdditionalFramesContainer +# =============================================================================== +class AdditionalFramesContainer: + lock = thread.allocate_lock() + additional_frames = {} # dict of dicts + + +def add_additional_frame_by_id(thread_id, frames_by_id): + AdditionalFramesContainer.additional_frames[thread_id] = frames_by_id + + +addAdditionalFrameById = add_additional_frame_by_id # Backward compatibility + + +def remove_additional_frame_by_id(thread_id): + del AdditionalFramesContainer.additional_frames[thread_id] + + +removeAdditionalFrameById = remove_additional_frame_by_id # Backward compatibility + + +def has_additional_frames_by_id(thread_id): + return thread_id in AdditionalFramesContainer.additional_frames + + +def get_additional_frames_by_id(thread_id): + return AdditionalFramesContainer.additional_frames.get(thread_id) + + +def find_frame(thread_id, frame_id): + """ returns a frame on the thread that has a given frame_id """ + try: + curr_thread_id = get_thread_id(threading.currentThread()) + if thread_id != curr_thread_id: + try: + return get_custom_frame(thread_id, frame_id) # I.e.: thread_id could be a stackless frame id + thread_id. + except: + pass + + raise VariableError("find_frame: must execute on same thread (%s != %s)" % (thread_id, curr_thread_id)) + + lookingFor = int(frame_id) + + if AdditionalFramesContainer.additional_frames: + if thread_id in AdditionalFramesContainer.additional_frames: + frame = AdditionalFramesContainer.additional_frames[thread_id].get(lookingFor) + + if frame is not None: + return frame + + curFrame = get_frame() + if frame_id == "*": + return curFrame # any frame is specified with "*" + + frameFound = None + + for frame in _iter_frames(curFrame): + if lookingFor == id(frame): + frameFound = frame + del frame + break + + del frame + + # Important: python can hold a reference to the frame from the current context + # if an exception is raised, so, if we don't explicitly add those deletes + # we might have those variables living much more than we'd want to. + + # I.e.: sys.exc_info holding reference to frame that raises exception (so, other places + # need to call sys.exc_clear()) + del curFrame + + if frameFound is None: + msgFrames = '' + i = 0 + + for frame in _iter_frames(get_frame()): + i += 1 + msgFrames += str(id(frame)) + if i % 5 == 0: + msgFrames += '\n' + else: + msgFrames += ' - ' + + errMsg = '''find_frame: frame not found. + Looking for thread_id:%s, frame_id:%s + Current thread_id:%s, available frames: + %s\n + ''' % (thread_id, lookingFor, curr_thread_id, msgFrames) + + sys.stderr.write(errMsg) + return None + + return frameFound + except: + import traceback + traceback.print_exc() + return None + + +def getVariable(thread_id, frame_id, scope, attrs): + """ + returns the value of a variable + + :scope: can be BY_ID, EXPRESSION, GLOBAL, LOCAL, FRAME + + BY_ID means we'll traverse the list of all objects alive to get the object. + + :attrs: after reaching the proper scope, we have to get the attributes until we find + the proper location (i.e.: obj\tattr1\tattr2) + + :note: when BY_ID is used, the frame_id is considered the id of the object to find and + not the frame (as we don't care about the frame in this case). + """ + if scope == 'BY_ID': + if thread_id != get_thread_id(threading.currentThread()): + raise VariableError("getVariable: must execute on same thread") + + try: + import gc + objects = gc.get_objects() + except: + pass # Not all python variants have it. + else: + frame_id = int(frame_id) + for var in objects: + if id(var) == frame_id: + if attrs is not None: + attrList = attrs.split('\t') + for k in attrList: + _type, _typeName, resolver = get_type(var) + var = resolver.resolve(var, k) + + return var + + # If it didn't return previously, we coudn't find it by id (i.e.: alrceady garbage collected). + sys.stderr.write('Unable to find object with id: %s\n' % (frame_id,)) + return None + + frame = find_frame(thread_id, frame_id) + if frame is None: + return {} + + if attrs is not None: + attrList = attrs.split('\t') + else: + attrList = [] + + for attr in attrList: + attr.replace("@_@TAB_CHAR@_@", '\t') + + if scope == 'EXPRESSION': + for count in xrange(len(attrList)): + if count == 0: + # An Expression can be in any scope (globals/locals), therefore it needs to evaluated as an expression + var = evaluate_expression(thread_id, frame_id, attrList[count], False) + else: + _type, _typeName, resolver = get_type(var) + var = resolver.resolve(var, attrList[count]) + else: + if scope == "GLOBAL": + var = frame.f_globals + del attrList[0] # globals are special, and they get a single dummy unused attribute + else: + # in a frame access both locals and globals as Python does + var = {} + var.update(frame.f_globals) + var.update(frame.f_locals) + + for k in attrList: + _type, _typeName, resolver = get_type(var) + var = resolver.resolve(var, k) + + return var + + +def resolve_compound_variable(thread_id, frame_id, scope, attrs): + """ returns the value of the compound variable as a dictionary""" + + var = getVariable(thread_id, frame_id, scope, attrs) + + try: + _type, _typeName, resolver = get_type(var) + return resolver.get_dictionary(var) + except: + sys.stderr.write('Error evaluating: thread_id: %s\nframe_id: %s\nscope: %s\nattrs: %s\n' % ( + thread_id, frame_id, scope, attrs,)) + traceback.print_exc() + + +def resolve_var(var, attrs): + attrList = attrs.split('\t') + + for k in attrList: + type, _typeName, resolver = get_type(var) + + var = resolver.resolve(var, k) + + try: + type, _typeName, resolver = get_type(var) + return resolver.get_dictionary(var) + except: + traceback.print_exc() + + +def custom_operation(thread_id, frame_id, scope, attrs, style, code_or_file, operation_fn_name): + """ + We'll execute the code_or_file and then search in the namespace the operation_fn_name to execute with the given var. + + code_or_file: either some code (i.e.: from pprint import pprint) or a file to be executed. + operation_fn_name: the name of the operation to execute after the exec (i.e.: pprint) + """ + expressionValue = getVariable(thread_id, frame_id, scope, attrs) + + try: + namespace = {'__name__': ''} + if style == "EXECFILE": + namespace['__file__'] = code_or_file + execfile(code_or_file, namespace, namespace) + else: # style == EXEC + namespace['__file__'] = '' + Exec(code_or_file, namespace, namespace) + + return str(namespace[operation_fn_name](expressionValue)) + except: + traceback.print_exc() + + +def eval_in_context(expression, globals, locals): + result = None + try: + result = eval(expression, globals, locals) + except Exception: + s = StringIO() + traceback.print_exc(file=s) + result = s.getvalue() + + try: + try: + etype, value, tb = sys.exc_info() + result = value + finally: + etype = value = tb = None + except: + pass + + result = ExceptionOnEvaluate(result) + + # Ok, we have the initial error message, but let's see if we're dealing with a name mangling error... + try: + if '__' in expression: + # Try to handle '__' name mangling... + split = expression.split('.') + curr = locals.get(split[0]) + for entry in split[1:]: + if entry.startswith('__') and not hasattr(curr, entry): + entry = '_%s%s' % (curr.__class__.__name__, entry) + curr = getattr(curr, entry) + + result = curr + except: + pass + return result + + +def evaluate_expression(thread_id, frame_id, expression, doExec): + '''returns the result of the evaluated expression + @param doExec: determines if we should do an exec or an eval + ''' + frame = find_frame(thread_id, frame_id) + if frame is None: + return + + # Not using frame.f_globals because of https://sourceforge.net/tracker2/?func=detail&aid=2541355&group_id=85796&atid=577329 + # (Names not resolved in generator expression in method) + # See message: http://mail.python.org/pipermail/python-list/2009-January/526522.html + updated_globals = {} + updated_globals.update(frame.f_globals) + updated_globals.update(frame.f_locals) # locals later because it has precedence over the actual globals + + try: + expression = str(expression.replace('@LINE@', '\n')) + + if doExec: + try: + # try to make it an eval (if it is an eval we can print it, otherwise we'll exec it and + # it will have whatever the user actually did) + compiled = compile(expression, '', 'eval') + except: + Exec(expression, updated_globals, frame.f_locals) + pydevd_save_locals.save_locals(frame) + else: + result = eval(compiled, updated_globals, frame.f_locals) + if result is not None: # Only print if it's not None (as python does) + sys.stdout.write('%s\n' % (result,)) + return + + else: + return eval_in_context(expression, updated_globals, frame.f_locals) + finally: + # Should not be kept alive if an exception happens and this frame is kept in the stack. + del updated_globals + del frame + + +def change_attr_expression(thread_id, frame_id, attr, expression, dbg, value=SENTINEL_VALUE): + '''Changes some attribute in a given frame. + ''' + frame = find_frame(thread_id, frame_id) + if frame is None: + return + + try: + expression = expression.replace('@LINE@', '\n') + + if dbg.plugin and value is SENTINEL_VALUE: + result = dbg.plugin.change_variable(frame, attr, expression) + if result: + return result + + if attr[:7] == "Globals": + attr = attr[8:] + if attr in frame.f_globals: + if value is SENTINEL_VALUE: + value = eval(expression, frame.f_globals, frame.f_locals) + frame.f_globals[attr] = value + return frame.f_globals[attr] + else: + if '.' not in attr: # i.e.: if we have a '.', we're changing some attribute of a local var. + if pydevd_save_locals.is_save_locals_available(): + if value is SENTINEL_VALUE: + value = eval(expression, frame.f_globals, frame.f_locals) + frame.f_locals[attr] = value + pydevd_save_locals.save_locals(frame) + return frame.f_locals[attr] + + # default way (only works for changing it in the topmost frame) + if value is SENTINEL_VALUE: + value = eval(expression, frame.f_globals, frame.f_locals) + result = value + Exec('%s=%s' % (attr, expression), frame.f_globals, frame.f_locals) + return result + + + except Exception: + traceback.print_exc() + + +MAXIMUM_ARRAY_SIZE = 100 +MAX_SLICE_SIZE = 1000 + + +def table_like_struct_to_xml(array, name, roffset, coffset, rows, cols, format): + _, type_name, _ = get_type(array) + if type_name == 'ndarray': + array, metaxml, r, c, f = array_to_meta_xml(array, name, format) + xml = metaxml + format = '%' + f + if rows == -1 and cols == -1: + rows = r + cols = c + xml += array_to_xml(array, roffset, coffset, rows, cols, format) + elif type_name == 'DataFrame': + xml = dataframe_to_xml(array, name, roffset, coffset, rows, cols, format) + else: + raise VariableError("Do not know how to convert type %s to table" % (type_name)) + + return "%s" % xml + + +def array_to_xml(array, roffset, coffset, rows, cols, format): + xml = "" + rows = min(rows, MAXIMUM_ARRAY_SIZE) + cols = min(cols, MAXIMUM_ARRAY_SIZE) + + # there is no obvious rule for slicing (at least 5 choices) + if len(array) == 1 and (rows > 1 or cols > 1): + array = array[0] + if array.size > len(array): + array = array[roffset:, coffset:] + rows = min(rows, len(array)) + cols = min(cols, len(array[0])) + if len(array) == 1: + array = array[0] + elif array.size == len(array): + if roffset == 0 and rows == 1: + array = array[coffset:] + cols = min(cols, len(array)) + elif coffset == 0 and cols == 1: + array = array[roffset:] + rows = min(rows, len(array)) + + xml += "" % (rows, cols) + for row in xrange(rows): + xml += "" % to_string(row) + for col in xrange(cols): + value = array + if rows == 1 or cols == 1: + if rows == 1 and cols == 1: + value = array[0] + else: + if rows == 1: + dim = col + else: + dim = row + value = array[dim] + if "ndarray" in str(type(value)): + value = value[0] + else: + value = array[row][col] + value = format % value + xml += var_to_xml(value, '') + return xml + + +def array_to_meta_xml(array, name, format): + type = array.dtype.kind + slice = name + l = len(array.shape) + + # initial load, compute slice + if format == '%': + if l > 2: + slice += '[0]' * (l - 2) + for r in xrange(l - 2): + array = array[0] + if type == 'f': + format = '.5f' + elif type == 'i' or type == 'u': + format = 'd' + else: + format = 's' + else: + format = format.replace('%', '') + + l = len(array.shape) + reslice = "" + if l > 2: + raise Exception("%s has more than 2 dimensions." % slice) + elif l == 1: + # special case with 1D arrays arr[i, :] - row, but arr[:, i] - column with equal shape and ndim + # http://stackoverflow.com/questions/16837946/numpy-a-2-rows-1-column-file-loadtxt-returns-1row-2-columns + # explanation: http://stackoverflow.com/questions/15165170/how-do-i-maintain-row-column-orientation-of-vectors-in-numpy?rq=1 + # we use kind of a hack - get information about memory from C_CONTIGUOUS + is_row = array.flags['C_CONTIGUOUS'] + + if is_row: + rows = 1 + cols = min(len(array), MAX_SLICE_SIZE) + if cols < len(array): + reslice = '[0:%s]' % (cols) + array = array[0:cols] + else: + cols = 1 + rows = min(len(array), MAX_SLICE_SIZE) + if rows < len(array): + reslice = '[0:%s]' % (rows) + array = array[0:rows] + elif l == 2: + rows = min(array.shape[-2], MAX_SLICE_SIZE) + cols = min(array.shape[-1], MAX_SLICE_SIZE) + if cols < array.shape[-1] or rows < array.shape[-2]: + reslice = '[0:%s, 0:%s]' % (rows, cols) + array = array[0:rows, 0:cols] + + # avoid slice duplication + if not slice.endswith(reslice): + slice += reslice + + bounds = (0, 0) + if type in "biufc": + bounds = (array.min(), array.max()) + xml = '' % \ + (slice, rows, cols, format, type, bounds[1], bounds[0]) + return array, xml, rows, cols, format + + + +def dataframe_to_xml(df, name, roffset, coffset, rows, cols, format): + """ + :type df: pandas.core.frame.DataFrame + :type name: str + :type coffset: int + :type roffset: int + :type rows: int + :type cols: int + :type format: str + + + """ + num_rows = min(df.shape[0], MAX_SLICE_SIZE) + num_cols = min(df.shape[1], MAX_SLICE_SIZE) + if (num_rows, num_cols) != df.shape: + df = df.iloc[0:num_rows, 0: num_cols] + slice = '.iloc[0:%s, 0:%s]' % (num_rows, num_cols) + else: + slice = '' + slice = name + slice + xml = '\n' % \ + (slice, num_rows, num_cols) + + if (rows, cols) == (-1, -1): + rows, cols = num_rows, num_cols + + rows = min(rows, MAXIMUM_ARRAY_SIZE) + cols = min(min(cols, MAXIMUM_ARRAY_SIZE), num_cols) + # need to precompute column bounds here before slicing! + col_bounds = [None] * cols + for col in xrange(cols): + dtype = df.dtypes.iloc[coffset + col].kind + if dtype in "biufc": + cvalues = df.iloc[:, coffset + col] + bounds = (cvalues.min(), cvalues.max()) + else: + bounds = (0, 0) + col_bounds[col] = bounds + + df = df.iloc[roffset: roffset + rows, coffset: coffset + cols] + rows, cols = df.shape + + xml += "\n" % (rows, cols) + format = format.replace('%', '') + col_formats = [] + + get_label = lambda label: str(label) if not isinstance(label, tuple) else '/'.join(map(str, label)) + + for col in xrange(cols): + dtype = df.dtypes.iloc[col].kind + if dtype == 'f' and format: + fmt = format + elif dtype == 'f': + fmt = '.5f' + elif dtype == 'i' or dtype == 'u': + fmt= 'd' + else: + fmt= 's' + col_formats.append('%' + fmt) + bounds = col_bounds[col] + + xml += '\n' % \ + (str(col), get_label(df.axes[1].values[col]), dtype, fmt, bounds[1], bounds[0]) + for row, label in enumerate(iter(df.axes[0])): + xml += "\n" % \ + (str(row), get_label(label)) + xml += "\n" + xml += "\n" % (rows, cols) + for row in xrange(rows): + xml += "\n" % str(row) + for col in xrange(cols): + value = df.iat[row, col] + value = col_formats[col] % value + xml += var_to_xml(value, '') + return xml diff --git a/ptvsd/pydevd/_pydevd_bundle/pydevd_vm_type.py b/ptvsd/pydevd/_pydevd_bundle/pydevd_vm_type.py new file mode 100644 index 00000000..d2cf5b67 --- /dev/null +++ b/ptvsd/pydevd/_pydevd_bundle/pydevd_vm_type.py @@ -0,0 +1,41 @@ +import sys + +#======================================================================================================================= +# PydevdVmType +#======================================================================================================================= +class PydevdVmType: + + PYTHON = 'python' + JYTHON = 'jython' + vm_type = None + + +#======================================================================================================================= +# set_vm_type +#======================================================================================================================= +def set_vm_type(vm_type): + PydevdVmType.vm_type = vm_type + + +#======================================================================================================================= +# get_vm_type +#======================================================================================================================= +def get_vm_type(): + if PydevdVmType.vm_type is None: + setup_type() + return PydevdVmType.vm_type + + +#======================================================================================================================= +# setup_type +#======================================================================================================================= +def setup_type(str=None): + if str is not None: + PydevdVmType.vm_type = str + return + + if sys.platform.startswith("java"): + PydevdVmType.vm_type = PydevdVmType.JYTHON + else: + PydevdVmType.vm_type = PydevdVmType.PYTHON + diff --git a/ptvsd/pydevd/_pydevd_bundle/pydevd_xml.py b/ptvsd/pydevd/_pydevd_bundle/pydevd_xml.py new file mode 100644 index 00000000..2c7fdac8 --- /dev/null +++ b/ptvsd/pydevd/_pydevd_bundle/pydevd_xml.py @@ -0,0 +1,362 @@ +from _pydev_bundle import pydev_log +import traceback +from _pydevd_bundle import pydevd_extension_utils +from _pydevd_bundle import pydevd_resolver +import sys +from _pydevd_bundle.pydevd_constants import dict_iter_items, dict_keys, IS_PY3K, \ + MAXIMUM_VARIABLE_REPRESENTATION_SIZE, RETURN_VALUES_DICT +from _pydev_bundle.pydev_imports import quote +from _pydevd_bundle.pydevd_extension_api import TypeResolveProvider, StrPresentationProvider + +try: + import types + + frame_type = types.FrameType +except: + frame_type = None + +try: + from xml.sax.saxutils import escape + + + def make_valid_xml_value(s): + return escape(s, {'"': '"'}) +except: + # Simple replacement if it's not there. + def make_valid_xml_value(s): + return s.replace('<', '<').replace('>', '>').replace('"', '"') + + +class ExceptionOnEvaluate: + def __init__(self, result): + self.result = result + + +_IS_JYTHON = sys.platform.startswith("java") + + +def _create_default_type_map(): + if not _IS_JYTHON: + default_type_map = [ + # None means that it should not be treated as a compound variable + + # isintance does not accept a tuple on some versions of python, so, we must declare it expanded + (type(None), None,), + (int, None), + (float, None), + (complex, None), + (str, None), + (tuple, pydevd_resolver.tupleResolver), + (list, pydevd_resolver.tupleResolver), + (dict, pydevd_resolver.dictResolver), + ] + try: + default_type_map.append((long, None)) # @UndefinedVariable + except: + pass # not available on all python versions + + try: + default_type_map.append((unicode, None)) # @UndefinedVariable + except: + pass # not available on all python versions + + try: + default_type_map.append((set, pydevd_resolver.setResolver)) + except: + pass # not available on all python versions + + try: + default_type_map.append((frozenset, pydevd_resolver.setResolver)) + except: + pass # not available on all python versions + + try: + from django.utils.datastructures import MultiValueDict + default_type_map.insert(0, (MultiValueDict, pydevd_resolver.multiValueDictResolver)) + # we should put it before dict + except: + pass # django may not be installed + + try: + from django.forms import BaseForm + default_type_map.insert(0, (BaseForm, pydevd_resolver.djangoFormResolver)) + # we should put it before instance resolver + except: + pass # django may not be installed + + try: + from collections import deque + default_type_map.append((deque, pydevd_resolver.dequeResolver)) + except: + pass + + if frame_type is not None: + default_type_map.append((frame_type, pydevd_resolver.frameResolver)) + + else: + from org.python import core # @UnresolvedImport + default_type_map = [ + (core.PyNone, None), + (core.PyInteger, None), + (core.PyLong, None), + (core.PyFloat, None), + (core.PyComplex, None), + (core.PyString, None), + (core.PyTuple, pydevd_resolver.tupleResolver), + (core.PyList, pydevd_resolver.tupleResolver), + (core.PyDictionary, pydevd_resolver.dictResolver), + (core.PyStringMap, pydevd_resolver.dictResolver), + ] + if hasattr(core, 'PyJavaInstance'): + # Jython 2.5b3 removed it. + default_type_map.append((core.PyJavaInstance, pydevd_resolver.instanceResolver)) + + return default_type_map + + +class TypeResolveHandler(object): + NO_PROVIDER = [] # Sentinel value (any mutable object to be used as a constant would be valid). + + def __init__(self): + # Note: don't initialize with the types we already know about so that the extensions can override + # the default resolvers that are already available if they want. + self._type_to_resolver_cache = {} + self._type_to_str_provider_cache = {} + self._initialized = False + + def _initialize(self): + self._default_type_map = _create_default_type_map() + self._resolve_providers = pydevd_extension_utils.extensions_of_type(TypeResolveProvider) + self._str_providers = pydevd_extension_utils.extensions_of_type(StrPresentationProvider) + self._initialized = True + + def get_type(self, o): + try: + try: + # Faster than type(o) as we don't need the function call. + type_object = o.__class__ + except: + # Not all objects have __class__ (i.e.: there are bad bindings around). + type_object = type(o) + + type_name = type_object.__name__ + except: + # This happens for org.python.core.InitModule + return 'Unable to get Type', 'Unable to get Type', None + + return self._get_type(o, type_object, type_name) + + def _get_type(self, o, type_object, type_name): + resolver = self._type_to_resolver_cache.get(type_object) + if resolver is not None: + return type_object, type_name, resolver + + if not self._initialized: + self._initialize() + + try: + for resolver in self._resolve_providers: + if resolver.can_provide(type_object, type_name): + # Cache it + self._type_to_resolver_cache[type_object] = resolver + return type_object, type_name, resolver + + for t in self._default_type_map: + if isinstance(o, t[0]): + # Cache it + resolver = t[1] + self._type_to_resolver_cache[type_object] = resolver + return (type_object, type_name, resolver) + except: + traceback.print_exc() + + # No match return default (and cache it). + resolver = pydevd_resolver.defaultResolver + self._type_to_resolver_cache[type_object] = resolver + return type_object, type_name, resolver + + if _IS_JYTHON: + _base_get_type = _get_type + + def _get_type(self, o, type_object, type_name): + if type_name == 'org.python.core.PyJavaInstance': + return type_object, type_name, pydevd_resolver.instanceResolver + + if type_name == 'org.python.core.PyArray': + return type_object, type_name, pydevd_resolver.jyArrayResolver + + return self._base_get_type(o, type_name, type_name) + + def str_from_providers(self, o, type_object, type_name): + provider = self._type_to_str_provider_cache.get(type_object) + + if provider is self.NO_PROVIDER: + return None + + if provider is not None: + return provider.get_str(o) + + if not self._initialized: + self._initialize() + + for provider in self._str_providers: + if provider.can_provide(type_object, type_name): + self._type_to_str_provider_cache[type_object] = provider + return provider.get_str(o) + + self._type_to_str_provider_cache[type_object] = self.NO_PROVIDER + return None + + +_TYPE_RESOLVE_HANDLER = TypeResolveHandler() + +""" +def get_type(o): + Receives object and returns a triple (typeObject, typeString, resolver). + + resolver != None means that variable is a container, and should be displayed as a hierarchy. + + Use the resolver to get its attributes. + + All container objects should have a resolver. +""" +get_type = _TYPE_RESOLVE_HANDLER.get_type + +_str_from_providers = _TYPE_RESOLVE_HANDLER.str_from_providers + + +def return_values_from_dict_to_xml(return_dict): + res = "" + for name, val in dict_iter_items(return_dict): + res += var_to_xml(val, name, additional_in_xml=' isRetVal="True"') + return res + + +def frame_vars_to_xml(frame_f_locals, hidden_ns=None): + """ dumps frame variables to XML + + """ + xml = "" + + keys = dict_keys(frame_f_locals) + if hasattr(keys, 'sort'): + keys.sort() # Python 3.0 does not have it + else: + keys = sorted(keys) # Jython 2.1 does not have it + + return_values_xml = '' + + for k in keys: + try: + v = frame_f_locals[k] + if k == RETURN_VALUES_DICT: + for name, val in dict_iter_items(v): + return_values_xml += var_to_xml(val, name, additional_in_xml=' isRetVal="True"') + + else: + if hidden_ns is not None and k in hidden_ns: + xml += var_to_xml(v, str(k), additional_in_xml=' isIPythonHidden="True"') + else: + xml += var_to_xml(v, str(k)) + except Exception: + traceback.print_exc() + pydev_log.error("Unexpected error, recovered safely.\n") + + # Show return values as the first entry. + return return_values_xml + xml + + +def var_to_xml(val, name, doTrim=True, additional_in_xml=''): + """ single variable or dictionary to xml representation """ + + try: + # This should be faster than isinstance (but we have to protect against not having a '__class__' attribute). + is_exception_on_eval = val.__class__ == ExceptionOnEvaluate + except: + is_exception_on_eval = False + + if is_exception_on_eval: + v = val.result + else: + v = val + + _type, typeName, resolver = get_type(v) + type_qualifier = getattr(_type, "__module__", "") + try: + str_from_provider = _str_from_providers(v, _type, typeName) + if str_from_provider is not None: + value = str_from_provider + elif hasattr(v, '__class__'): + if v.__class__ == frame_type: + value = pydevd_resolver.frameResolver.get_frame_name(v) + + elif v.__class__ in (list, tuple): + if len(v) > 300: + value = '%s: %s' % (str(v.__class__), '' % (len(v),)) + else: + value = '%s: %s' % (str(v.__class__), v) + else: + try: + cName = str(v.__class__) + if cName.find('.') != -1: + cName = cName.split('.')[-1] + + elif cName.find("'") != -1: # does not have '.' (could be something like ) + cName = cName[cName.index("'") + 1:] + + if cName.endswith("'>"): + cName = cName[:-2] + except: + cName = str(v.__class__) + + value = '%s: %s' % (cName, v) + else: + value = str(v) + except: + try: + value = repr(v) + except: + value = 'Unable to get repr for %s' % v.__class__ + + try: + name = quote(name, '/>_= ') # TODO: Fix PY-5834 without using quote + except: + pass + + xml = ' MAXIMUM_VARIABLE_REPRESENTATION_SIZE and doTrim: + value = value[0:MAXIMUM_VARIABLE_REPRESENTATION_SIZE] + value += '...' + + # fix to work with unicode values + try: + if not IS_PY3K: + if value.__class__ == unicode: # @UndefinedVariable + value = value.encode('utf-8') + else: + if value.__class__ == bytes: + value = value.encode('utf-8') + except TypeError: # in java, unicode is a function + pass + + xml_value = ' value="%s"' % (make_valid_xml_value(quote(value, '/>_= '))) + else: + xml_value = '' + + if is_exception_on_eval: + xml_container = ' isErrorOnEval="True"' + else: + if resolver is not None: + xml_container = ' isContainer="True"' + else: + xml_container = '' + + return ''.join((xml, xml_qualifier, xml_value, xml_container, additional_in_xml, ' />\n')) diff --git a/ptvsd/pydevd/_pydevd_frame_eval/__init__.py b/ptvsd/pydevd/_pydevd_frame_eval/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/ptvsd/pydevd/_pydevd_frame_eval/pydevd_frame_eval_cython_wrapper.py b/ptvsd/pydevd/_pydevd_frame_eval/pydevd_frame_eval_cython_wrapper.py new file mode 100644 index 00000000..3b8f1fc6 --- /dev/null +++ b/ptvsd/pydevd/_pydevd_frame_eval/pydevd_frame_eval_cython_wrapper.py @@ -0,0 +1,31 @@ +try: + from _pydevd_frame_eval.pydevd_frame_evaluator import frame_eval_func, stop_frame_eval, enable_cache_frames_without_breaks, dummy_trace_dispatch +except ImportError: + try: + import struct + import sys + try: + is_64bits = sys.maxsize > 2**32 + except: + # In Jython this call fails, but this is Ok, we don't support Jython for speedups anyways. + raise ImportError + plat = '32' + if is_64bits: + plat = '64' + + # We also accept things as: + # + # _pydevd_frame_eval.pydevd_frame_evaluator_win32_27_32 + # _pydevd_frame_eval.pydevd_frame_evaluator_win32_34_64 + # + # to have multiple pre-compiled pyds distributed along the IDE + # (generated by build_tools/build_binaries_windows.py). + + mod_name = 'pydevd_frame_evaluator_%s_%s%s_%s' % (sys.platform, sys.version_info[0], sys.version_info[1], plat) + check_name = '_pydevd_frame_eval.%s' % (mod_name,) + mod = __import__(check_name) + mod = getattr(mod, mod_name) + frame_eval_func, stop_frame_eval, enable_cache_frames_without_breaks, dummy_trace_dispatch = \ + mod.frame_eval_func, mod.stop_frame_eval, mod.enable_cache_frames_without_breaks, mod.dummy_trace_dispatch + except ImportError: + raise \ No newline at end of file diff --git a/ptvsd/pydevd/_pydevd_frame_eval/pydevd_frame_eval_main.py b/ptvsd/pydevd/_pydevd_frame_eval/pydevd_frame_eval_main.py new file mode 100644 index 00000000..47a7188d --- /dev/null +++ b/ptvsd/pydevd/_pydevd_frame_eval/pydevd_frame_eval_main.py @@ -0,0 +1,28 @@ +import os +import sys + +IS_PY36 = False +if sys.version_info[0] == 3 and sys.version_info[1] == 6: + IS_PY36 = True + +frame_eval_func = None +stop_frame_eval = None +enable_cache_frames_without_breaks = None +dummy_trace_dispatch = None + +USE_FRAME_EVAL = os.environ.get('PYDEVD_USE_FRAME_EVAL', None) + +if USE_FRAME_EVAL == 'NO': + frame_eval_func, stop_frame_eval = None, None + +else: + if IS_PY36: + try: + from _pydevd_frame_eval.pydevd_frame_eval_cython_wrapper import frame_eval_func, stop_frame_eval, enable_cache_frames_without_breaks, \ + dummy_trace_dispatch + except ImportError: + from _pydev_bundle.pydev_monkey import log_error_once + + dirname = os.path.dirname(os.path.dirname(__file__)) + log_error_once("warning: Debugger speedups using cython not found. Run '\"%s\" \"%s\" build_ext --inplace' to build." % ( + sys.executable, os.path.join(dirname, 'setup_cython.py'))) diff --git a/ptvsd/pydevd/_pydevd_frame_eval/pydevd_frame_evaluator.c b/ptvsd/pydevd/_pydevd_frame_eval/pydevd_frame_evaluator.c new file mode 100644 index 00000000..43992607 --- /dev/null +++ b/ptvsd/pydevd/_pydevd_frame_eval/pydevd_frame_evaluator.c @@ -0,0 +1,6771 @@ +/* Generated by Cython 0.25.2 */ + +/* BEGIN: Cython Metadata +{ + "distutils": { + "depends": [] + }, + "module_name": "_pydevd_frame_eval.pydevd_frame_evaluator" +} +END: Cython Metadata */ + +#define PY_SSIZE_T_CLEAN +#include "Python.h" +#ifndef Py_PYTHON_H + #error Python headers needed to compile C extensions, please install development version of Python. +#elif PY_VERSION_HEX < 0x02060000 || (0x03000000 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x03020000) + #error Cython requires Python 2.6+ or Python 3.2+. +#else +#define CYTHON_ABI "0_25_2" +#include +#ifndef offsetof + #define offsetof(type, member) ( (size_t) & ((type*)0) -> member ) +#endif +#if !defined(WIN32) && !defined(MS_WINDOWS) + #ifndef __stdcall + #define __stdcall + #endif + #ifndef __cdecl + #define __cdecl + #endif + #ifndef __fastcall + #define __fastcall + #endif +#endif +#ifndef DL_IMPORT + #define DL_IMPORT(t) t +#endif +#ifndef DL_EXPORT + #define DL_EXPORT(t) t +#endif +#ifndef HAVE_LONG_LONG + #if PY_VERSION_HEX >= 0x03030000 || (PY_MAJOR_VERSION == 2 && PY_VERSION_HEX >= 0x02070000) + #define HAVE_LONG_LONG + #endif +#endif +#ifndef PY_LONG_LONG + #define PY_LONG_LONG LONG_LONG +#endif +#ifndef Py_HUGE_VAL + #define Py_HUGE_VAL HUGE_VAL +#endif +#ifdef PYPY_VERSION + #define CYTHON_COMPILING_IN_PYPY 1 + #define CYTHON_COMPILING_IN_PYSTON 0 + #define CYTHON_COMPILING_IN_CPYTHON 0 + #undef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 0 + #undef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 0 + #undef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 0 + #undef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 0 + #undef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #undef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #undef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 1 + #undef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 0 + #undef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 0 + #undef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 0 + #undef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 0 +#elif defined(PYSTON_VERSION) + #define CYTHON_COMPILING_IN_PYPY 0 + #define CYTHON_COMPILING_IN_PYSTON 1 + #define CYTHON_COMPILING_IN_CPYTHON 0 + #ifndef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 1 + #endif + #undef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 0 + #undef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 0 + #ifndef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 1 + #endif + #undef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #undef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #ifndef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 0 + #endif + #ifndef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 1 + #endif + #ifndef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 1 + #endif + #undef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 0 + #undef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 0 +#else + #define CYTHON_COMPILING_IN_PYPY 0 + #define CYTHON_COMPILING_IN_PYSTON 0 + #define CYTHON_COMPILING_IN_CPYTHON 1 + #ifndef CYTHON_USE_TYPE_SLOTS + #define CYTHON_USE_TYPE_SLOTS 1 + #endif + #if PY_MAJOR_VERSION < 3 + #undef CYTHON_USE_ASYNC_SLOTS + #define CYTHON_USE_ASYNC_SLOTS 0 + #elif !defined(CYTHON_USE_ASYNC_SLOTS) + #define CYTHON_USE_ASYNC_SLOTS 1 + #endif + #if PY_VERSION_HEX < 0x02070000 + #undef CYTHON_USE_PYLONG_INTERNALS + #define CYTHON_USE_PYLONG_INTERNALS 0 + #elif !defined(CYTHON_USE_PYLONG_INTERNALS) + #define CYTHON_USE_PYLONG_INTERNALS 1 + #endif + #ifndef CYTHON_USE_PYLIST_INTERNALS + #define CYTHON_USE_PYLIST_INTERNALS 1 + #endif + #ifndef CYTHON_USE_UNICODE_INTERNALS + #define CYTHON_USE_UNICODE_INTERNALS 1 + #endif + #if PY_VERSION_HEX < 0x030300F0 + #undef CYTHON_USE_UNICODE_WRITER + #define CYTHON_USE_UNICODE_WRITER 0 + #elif !defined(CYTHON_USE_UNICODE_WRITER) + #define CYTHON_USE_UNICODE_WRITER 1 + #endif + #ifndef CYTHON_AVOID_BORROWED_REFS + #define CYTHON_AVOID_BORROWED_REFS 0 + #endif + #ifndef CYTHON_ASSUME_SAFE_MACROS + #define CYTHON_ASSUME_SAFE_MACROS 1 + #endif + #ifndef CYTHON_UNPACK_METHODS + #define CYTHON_UNPACK_METHODS 1 + #endif + #ifndef CYTHON_FAST_THREAD_STATE + #define CYTHON_FAST_THREAD_STATE 1 + #endif + #ifndef CYTHON_FAST_PYCALL + #define CYTHON_FAST_PYCALL 1 + #endif +#endif +#if !defined(CYTHON_FAST_PYCCALL) +#define CYTHON_FAST_PYCCALL (CYTHON_FAST_PYCALL && PY_VERSION_HEX >= 0x030600B1) +#endif +#if CYTHON_USE_PYLONG_INTERNALS + #include "longintrepr.h" + #undef SHIFT + #undef BASE + #undef MASK +#endif +#if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x02070600 && !defined(Py_OptimizeFlag) + #define Py_OptimizeFlag 0 +#endif +#define __PYX_BUILD_PY_SSIZE_T "n" +#define CYTHON_FORMAT_SSIZE_T "z" +#if PY_MAJOR_VERSION < 3 + #define __Pyx_BUILTIN_MODULE_NAME "__builtin__" + #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ + PyCode_New(a+k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) + #define __Pyx_DefaultClassType PyClass_Type +#else + #define __Pyx_BUILTIN_MODULE_NAME "builtins" + #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\ + PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos) + #define __Pyx_DefaultClassType PyType_Type +#endif +#ifndef Py_TPFLAGS_CHECKTYPES + #define Py_TPFLAGS_CHECKTYPES 0 +#endif +#ifndef Py_TPFLAGS_HAVE_INDEX + #define Py_TPFLAGS_HAVE_INDEX 0 +#endif +#ifndef Py_TPFLAGS_HAVE_NEWBUFFER + #define Py_TPFLAGS_HAVE_NEWBUFFER 0 +#endif +#ifndef Py_TPFLAGS_HAVE_FINALIZE + #define Py_TPFLAGS_HAVE_FINALIZE 0 +#endif +#ifndef METH_FASTCALL + #define METH_FASTCALL 0x80 + typedef PyObject *(*__Pyx_PyCFunctionFast) (PyObject *self, PyObject **args, + Py_ssize_t nargs, PyObject *kwnames); +#else + #define __Pyx_PyCFunctionFast _PyCFunctionFast +#endif +#if CYTHON_FAST_PYCCALL +#define __Pyx_PyFastCFunction_Check(func)\ + ((PyCFunction_Check(func) && (METH_FASTCALL == (PyCFunction_GET_FLAGS(func) & ~(METH_CLASS | METH_STATIC | METH_COEXIST))))) +#else +#define __Pyx_PyFastCFunction_Check(func) 0 +#endif +#if PY_VERSION_HEX > 0x03030000 && defined(PyUnicode_KIND) + #define CYTHON_PEP393_ENABLED 1 + #define __Pyx_PyUnicode_READY(op) (likely(PyUnicode_IS_READY(op)) ?\ + 0 : _PyUnicode_Ready((PyObject *)(op))) + #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_LENGTH(u) + #define __Pyx_PyUnicode_READ_CHAR(u, i) PyUnicode_READ_CHAR(u, i) + #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) PyUnicode_MAX_CHAR_VALUE(u) + #define __Pyx_PyUnicode_KIND(u) PyUnicode_KIND(u) + #define __Pyx_PyUnicode_DATA(u) PyUnicode_DATA(u) + #define __Pyx_PyUnicode_READ(k, d, i) PyUnicode_READ(k, d, i) + #define __Pyx_PyUnicode_WRITE(k, d, i, ch) PyUnicode_WRITE(k, d, i, ch) + #define __Pyx_PyUnicode_IS_TRUE(u) (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : PyUnicode_GET_SIZE(u))) +#else + #define CYTHON_PEP393_ENABLED 0 + #define PyUnicode_1BYTE_KIND 1 + #define PyUnicode_2BYTE_KIND 2 + #define PyUnicode_4BYTE_KIND 4 + #define __Pyx_PyUnicode_READY(op) (0) + #define __Pyx_PyUnicode_GET_LENGTH(u) PyUnicode_GET_SIZE(u) + #define __Pyx_PyUnicode_READ_CHAR(u, i) ((Py_UCS4)(PyUnicode_AS_UNICODE(u)[i])) + #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u) ((sizeof(Py_UNICODE) == 2) ? 65535 : 1114111) + #define __Pyx_PyUnicode_KIND(u) (sizeof(Py_UNICODE)) + #define __Pyx_PyUnicode_DATA(u) ((void*)PyUnicode_AS_UNICODE(u)) + #define __Pyx_PyUnicode_READ(k, d, i) ((void)(k), (Py_UCS4)(((Py_UNICODE*)d)[i])) + #define __Pyx_PyUnicode_WRITE(k, d, i, ch) (((void)(k)), ((Py_UNICODE*)d)[i] = ch) + #define __Pyx_PyUnicode_IS_TRUE(u) (0 != PyUnicode_GET_SIZE(u)) +#endif +#if CYTHON_COMPILING_IN_PYPY + #define __Pyx_PyUnicode_Concat(a, b) PyNumber_Add(a, b) + #define __Pyx_PyUnicode_ConcatSafe(a, b) PyNumber_Add(a, b) +#else + #define __Pyx_PyUnicode_Concat(a, b) PyUnicode_Concat(a, b) + #define __Pyx_PyUnicode_ConcatSafe(a, b) ((unlikely((a) == Py_None) || unlikely((b) == Py_None)) ?\ + PyNumber_Add(a, b) : __Pyx_PyUnicode_Concat(a, b)) +#endif +#if CYTHON_COMPILING_IN_PYPY && !defined(PyUnicode_Contains) + #define PyUnicode_Contains(u, s) PySequence_Contains(u, s) +#endif +#if CYTHON_COMPILING_IN_PYPY && !defined(PyByteArray_Check) + #define PyByteArray_Check(obj) PyObject_TypeCheck(obj, &PyByteArray_Type) +#endif +#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Format) + #define PyObject_Format(obj, fmt) PyObject_CallMethod(obj, "__format__", "O", fmt) +#endif +#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Malloc) + #define PyObject_Malloc(s) PyMem_Malloc(s) + #define PyObject_Free(p) PyMem_Free(p) + #define PyObject_Realloc(p) PyMem_Realloc(p) +#endif +#if CYTHON_COMPILING_IN_PYSTON + #define __Pyx_PyCode_HasFreeVars(co) PyCode_HasFreeVars(co) + #define __Pyx_PyFrame_SetLineNumber(frame, lineno) PyFrame_SetLineNumber(frame, lineno) +#else + #define __Pyx_PyCode_HasFreeVars(co) (PyCode_GetNumFree(co) > 0) + #define __Pyx_PyFrame_SetLineNumber(frame, lineno) (frame)->f_lineno = (lineno) +#endif +#define __Pyx_PyString_FormatSafe(a, b) ((unlikely((a) == Py_None)) ? PyNumber_Remainder(a, b) : __Pyx_PyString_Format(a, b)) +#define __Pyx_PyUnicode_FormatSafe(a, b) ((unlikely((a) == Py_None)) ? PyNumber_Remainder(a, b) : PyUnicode_Format(a, b)) +#if PY_MAJOR_VERSION >= 3 + #define __Pyx_PyString_Format(a, b) PyUnicode_Format(a, b) +#else + #define __Pyx_PyString_Format(a, b) PyString_Format(a, b) +#endif +#if PY_MAJOR_VERSION < 3 && !defined(PyObject_ASCII) + #define PyObject_ASCII(o) PyObject_Repr(o) +#endif +#if PY_MAJOR_VERSION >= 3 + #define PyBaseString_Type PyUnicode_Type + #define PyStringObject PyUnicodeObject + #define PyString_Type PyUnicode_Type + #define PyString_Check PyUnicode_Check + #define PyString_CheckExact PyUnicode_CheckExact +#endif +#if PY_MAJOR_VERSION >= 3 + #define __Pyx_PyBaseString_Check(obj) PyUnicode_Check(obj) + #define __Pyx_PyBaseString_CheckExact(obj) PyUnicode_CheckExact(obj) +#else + #define __Pyx_PyBaseString_Check(obj) (PyString_Check(obj) || PyUnicode_Check(obj)) + #define __Pyx_PyBaseString_CheckExact(obj) (PyString_CheckExact(obj) || PyUnicode_CheckExact(obj)) +#endif +#ifndef PySet_CheckExact + #define PySet_CheckExact(obj) (Py_TYPE(obj) == &PySet_Type) +#endif +#define __Pyx_TypeCheck(obj, type) PyObject_TypeCheck(obj, (PyTypeObject *)type) +#define __Pyx_PyException_Check(obj) __Pyx_TypeCheck(obj, PyExc_Exception) +#if PY_MAJOR_VERSION >= 3 + #define PyIntObject PyLongObject + #define PyInt_Type PyLong_Type + #define PyInt_Check(op) PyLong_Check(op) + #define PyInt_CheckExact(op) PyLong_CheckExact(op) + #define PyInt_FromString PyLong_FromString + #define PyInt_FromUnicode PyLong_FromUnicode + #define PyInt_FromLong PyLong_FromLong + #define PyInt_FromSize_t PyLong_FromSize_t + #define PyInt_FromSsize_t PyLong_FromSsize_t + #define PyInt_AsLong PyLong_AsLong + #define PyInt_AS_LONG PyLong_AS_LONG + #define PyInt_AsSsize_t PyLong_AsSsize_t + #define PyInt_AsUnsignedLongMask PyLong_AsUnsignedLongMask + #define PyInt_AsUnsignedLongLongMask PyLong_AsUnsignedLongLongMask + #define PyNumber_Int PyNumber_Long +#endif +#if PY_MAJOR_VERSION >= 3 + #define PyBoolObject PyLongObject +#endif +#if PY_MAJOR_VERSION >= 3 && CYTHON_COMPILING_IN_PYPY + #ifndef PyUnicode_InternFromString + #define PyUnicode_InternFromString(s) PyUnicode_FromString(s) + #endif +#endif +#if PY_VERSION_HEX < 0x030200A4 + typedef long Py_hash_t; + #define __Pyx_PyInt_FromHash_t PyInt_FromLong + #define __Pyx_PyInt_AsHash_t PyInt_AsLong +#else + #define __Pyx_PyInt_FromHash_t PyInt_FromSsize_t + #define __Pyx_PyInt_AsHash_t PyInt_AsSsize_t +#endif +#if PY_MAJOR_VERSION >= 3 + #define __Pyx_PyMethod_New(func, self, klass) ((self) ? PyMethod_New(func, self) : PyInstanceMethod_New(func)) +#else + #define __Pyx_PyMethod_New(func, self, klass) PyMethod_New(func, self, klass) +#endif +#if CYTHON_USE_ASYNC_SLOTS + #if PY_VERSION_HEX >= 0x030500B1 + #define __Pyx_PyAsyncMethodsStruct PyAsyncMethods + #define __Pyx_PyType_AsAsync(obj) (Py_TYPE(obj)->tp_as_async) + #else + typedef struct { + unaryfunc am_await; + unaryfunc am_aiter; + unaryfunc am_anext; + } __Pyx_PyAsyncMethodsStruct; + #define __Pyx_PyType_AsAsync(obj) ((__Pyx_PyAsyncMethodsStruct*) (Py_TYPE(obj)->tp_reserved)) + #endif +#else + #define __Pyx_PyType_AsAsync(obj) NULL +#endif +#ifndef CYTHON_RESTRICT + #if defined(__GNUC__) + #define CYTHON_RESTRICT __restrict__ + #elif defined(_MSC_VER) && _MSC_VER >= 1400 + #define CYTHON_RESTRICT __restrict + #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L + #define CYTHON_RESTRICT restrict + #else + #define CYTHON_RESTRICT + #endif +#endif +#ifndef CYTHON_UNUSED +# if defined(__GNUC__) +# if !(defined(__cplusplus)) || (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 4)) +# define CYTHON_UNUSED __attribute__ ((__unused__)) +# else +# define CYTHON_UNUSED +# endif +# elif defined(__ICC) || (defined(__INTEL_COMPILER) && !defined(_MSC_VER)) +# define CYTHON_UNUSED __attribute__ ((__unused__)) +# else +# define CYTHON_UNUSED +# endif +#endif +#ifndef CYTHON_MAYBE_UNUSED_VAR +# if defined(__cplusplus) + template void CYTHON_MAYBE_UNUSED_VAR( const T& ) { } +# else +# define CYTHON_MAYBE_UNUSED_VAR(x) (void)(x) +# endif +#endif +#ifndef CYTHON_NCP_UNUSED +# if CYTHON_COMPILING_IN_CPYTHON +# define CYTHON_NCP_UNUSED +# else +# define CYTHON_NCP_UNUSED CYTHON_UNUSED +# endif +#endif +#define __Pyx_void_to_None(void_result) ((void)(void_result), Py_INCREF(Py_None), Py_None) + +#ifndef CYTHON_INLINE + #if defined(__clang__) + #define CYTHON_INLINE __inline__ __attribute__ ((__unused__)) + #elif defined(__GNUC__) + #define CYTHON_INLINE __inline__ + #elif defined(_MSC_VER) + #define CYTHON_INLINE __inline + #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L + #define CYTHON_INLINE inline + #else + #define CYTHON_INLINE + #endif +#endif + +#if defined(WIN32) || defined(MS_WINDOWS) + #define _USE_MATH_DEFINES +#endif +#include +#ifdef NAN +#define __PYX_NAN() ((float) NAN) +#else +static CYTHON_INLINE float __PYX_NAN() { + float value; + memset(&value, 0xFF, sizeof(value)); + return value; +} +#endif +#if defined(__CYGWIN__) && defined(_LDBL_EQ_DBL) +#define __Pyx_truncl trunc +#else +#define __Pyx_truncl truncl +#endif + + +#define __PYX_ERR(f_index, lineno, Ln_error) \ +{ \ + __pyx_filename = __pyx_f[f_index]; __pyx_lineno = lineno; __pyx_clineno = __LINE__; goto Ln_error; \ +} + +#if PY_MAJOR_VERSION >= 3 + #define __Pyx_PyNumber_Divide(x,y) PyNumber_TrueDivide(x,y) + #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceTrueDivide(x,y) +#else + #define __Pyx_PyNumber_Divide(x,y) PyNumber_Divide(x,y) + #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceDivide(x,y) +#endif + +#ifndef __PYX_EXTERN_C + #ifdef __cplusplus + #define __PYX_EXTERN_C extern "C" + #else + #define __PYX_EXTERN_C extern + #endif +#endif + +#define __PYX_HAVE___pydevd_frame_eval__pydevd_frame_evaluator +#define __PYX_HAVE_API___pydevd_frame_eval__pydevd_frame_evaluator +#include "frameobject.h" +#include "code.h" +#include "pystate.h" +#include "ceval.h" +#ifdef _OPENMP +#include +#endif /* _OPENMP */ + +#ifdef PYREX_WITHOUT_ASSERTIONS +#define CYTHON_WITHOUT_ASSERTIONS +#endif + +typedef struct {PyObject **p; const char *s; const Py_ssize_t n; const char* encoding; + const char is_unicode; const char is_str; const char intern; } __Pyx_StringTabEntry; + +#define __PYX_DEFAULT_STRING_ENCODING_IS_ASCII 0 +#define __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT 0 +#define __PYX_DEFAULT_STRING_ENCODING "" +#define __Pyx_PyObject_FromString __Pyx_PyBytes_FromString +#define __Pyx_PyObject_FromStringAndSize __Pyx_PyBytes_FromStringAndSize +#define __Pyx_uchar_cast(c) ((unsigned char)c) +#define __Pyx_long_cast(x) ((long)x) +#define __Pyx_fits_Py_ssize_t(v, type, is_signed) (\ + (sizeof(type) < sizeof(Py_ssize_t)) ||\ + (sizeof(type) > sizeof(Py_ssize_t) &&\ + likely(v < (type)PY_SSIZE_T_MAX ||\ + v == (type)PY_SSIZE_T_MAX) &&\ + (!is_signed || likely(v > (type)PY_SSIZE_T_MIN ||\ + v == (type)PY_SSIZE_T_MIN))) ||\ + (sizeof(type) == sizeof(Py_ssize_t) &&\ + (is_signed || likely(v < (type)PY_SSIZE_T_MAX ||\ + v == (type)PY_SSIZE_T_MAX))) ) +#if defined (__cplusplus) && __cplusplus >= 201103L + #include + #define __Pyx_sst_abs(value) std::abs(value) +#elif SIZEOF_INT >= SIZEOF_SIZE_T + #define __Pyx_sst_abs(value) abs(value) +#elif SIZEOF_LONG >= SIZEOF_SIZE_T + #define __Pyx_sst_abs(value) labs(value) +#elif defined (_MSC_VER) && defined (_M_X64) + #define __Pyx_sst_abs(value) _abs64(value) +#elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L + #define __Pyx_sst_abs(value) llabs(value) +#elif defined (__GNUC__) + #define __Pyx_sst_abs(value) __builtin_llabs(value) +#else + #define __Pyx_sst_abs(value) ((value<0) ? -value : value) +#endif +static CYTHON_INLINE char* __Pyx_PyObject_AsString(PyObject*); +static CYTHON_INLINE char* __Pyx_PyObject_AsStringAndSize(PyObject*, Py_ssize_t* length); +#define __Pyx_PyByteArray_FromString(s) PyByteArray_FromStringAndSize((const char*)s, strlen((const char*)s)) +#define __Pyx_PyByteArray_FromStringAndSize(s, l) PyByteArray_FromStringAndSize((const char*)s, l) +#define __Pyx_PyBytes_FromString PyBytes_FromString +#define __Pyx_PyBytes_FromStringAndSize PyBytes_FromStringAndSize +static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char*); +#if PY_MAJOR_VERSION < 3 + #define __Pyx_PyStr_FromString __Pyx_PyBytes_FromString + #define __Pyx_PyStr_FromStringAndSize __Pyx_PyBytes_FromStringAndSize +#else + #define __Pyx_PyStr_FromString __Pyx_PyUnicode_FromString + #define __Pyx_PyStr_FromStringAndSize __Pyx_PyUnicode_FromStringAndSize +#endif +#define __Pyx_PyObject_AsSString(s) ((signed char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_AsUString(s) ((unsigned char*) __Pyx_PyObject_AsString(s)) +#define __Pyx_PyObject_FromCString(s) __Pyx_PyObject_FromString((const char*)s) +#define __Pyx_PyBytes_FromCString(s) __Pyx_PyBytes_FromString((const char*)s) +#define __Pyx_PyByteArray_FromCString(s) __Pyx_PyByteArray_FromString((const char*)s) +#define __Pyx_PyStr_FromCString(s) __Pyx_PyStr_FromString((const char*)s) +#define __Pyx_PyUnicode_FromCString(s) __Pyx_PyUnicode_FromString((const char*)s) +#if PY_MAJOR_VERSION < 3 +static CYTHON_INLINE size_t __Pyx_Py_UNICODE_strlen(const Py_UNICODE *u) +{ + const Py_UNICODE *u_end = u; + while (*u_end++) ; + return (size_t)(u_end - u - 1); +} +#else +#define __Pyx_Py_UNICODE_strlen Py_UNICODE_strlen +#endif +#define __Pyx_PyUnicode_FromUnicode(u) PyUnicode_FromUnicode(u, __Pyx_Py_UNICODE_strlen(u)) +#define __Pyx_PyUnicode_FromUnicodeAndLength PyUnicode_FromUnicode +#define __Pyx_PyUnicode_AsUnicode PyUnicode_AsUnicode +#define __Pyx_NewRef(obj) (Py_INCREF(obj), obj) +#define __Pyx_Owned_Py_None(b) __Pyx_NewRef(Py_None) +#define __Pyx_PyBool_FromLong(b) ((b) ? __Pyx_NewRef(Py_True) : __Pyx_NewRef(Py_False)) +static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject*); +static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x); +static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject*); +static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t); +#if CYTHON_ASSUME_SAFE_MACROS +#define __pyx_PyFloat_AsDouble(x) (PyFloat_CheckExact(x) ? PyFloat_AS_DOUBLE(x) : PyFloat_AsDouble(x)) +#else +#define __pyx_PyFloat_AsDouble(x) PyFloat_AsDouble(x) +#endif +#define __pyx_PyFloat_AsFloat(x) ((float) __pyx_PyFloat_AsDouble(x)) +#if PY_MAJOR_VERSION >= 3 +#define __Pyx_PyNumber_Int(x) (PyLong_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Long(x)) +#else +#define __Pyx_PyNumber_Int(x) (PyInt_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Int(x)) +#endif +#define __Pyx_PyNumber_Float(x) (PyFloat_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Float(x)) +#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII +static int __Pyx_sys_getdefaultencoding_not_ascii; +static int __Pyx_init_sys_getdefaultencoding_params(void) { + PyObject* sys; + PyObject* default_encoding = NULL; + PyObject* ascii_chars_u = NULL; + PyObject* ascii_chars_b = NULL; + const char* default_encoding_c; + sys = PyImport_ImportModule("sys"); + if (!sys) goto bad; + default_encoding = PyObject_CallMethod(sys, (char*) "getdefaultencoding", NULL); + Py_DECREF(sys); + if (!default_encoding) goto bad; + default_encoding_c = PyBytes_AsString(default_encoding); + if (!default_encoding_c) goto bad; + if (strcmp(default_encoding_c, "ascii") == 0) { + __Pyx_sys_getdefaultencoding_not_ascii = 0; + } else { + char ascii_chars[128]; + int c; + for (c = 0; c < 128; c++) { + ascii_chars[c] = c; + } + __Pyx_sys_getdefaultencoding_not_ascii = 1; + ascii_chars_u = PyUnicode_DecodeASCII(ascii_chars, 128, NULL); + if (!ascii_chars_u) goto bad; + ascii_chars_b = PyUnicode_AsEncodedString(ascii_chars_u, default_encoding_c, NULL); + if (!ascii_chars_b || !PyBytes_Check(ascii_chars_b) || memcmp(ascii_chars, PyBytes_AS_STRING(ascii_chars_b), 128) != 0) { + PyErr_Format( + PyExc_ValueError, + "This module compiled with c_string_encoding=ascii, but default encoding '%.200s' is not a superset of ascii.", + default_encoding_c); + goto bad; + } + Py_DECREF(ascii_chars_u); + Py_DECREF(ascii_chars_b); + } + Py_DECREF(default_encoding); + return 0; +bad: + Py_XDECREF(default_encoding); + Py_XDECREF(ascii_chars_u); + Py_XDECREF(ascii_chars_b); + return -1; +} +#endif +#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT && PY_MAJOR_VERSION >= 3 +#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_DecodeUTF8(c_str, size, NULL) +#else +#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_Decode(c_str, size, __PYX_DEFAULT_STRING_ENCODING, NULL) +#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT +static char* __PYX_DEFAULT_STRING_ENCODING; +static int __Pyx_init_sys_getdefaultencoding_params(void) { + PyObject* sys; + PyObject* default_encoding = NULL; + char* default_encoding_c; + sys = PyImport_ImportModule("sys"); + if (!sys) goto bad; + default_encoding = PyObject_CallMethod(sys, (char*) (const char*) "getdefaultencoding", NULL); + Py_DECREF(sys); + if (!default_encoding) goto bad; + default_encoding_c = PyBytes_AsString(default_encoding); + if (!default_encoding_c) goto bad; + __PYX_DEFAULT_STRING_ENCODING = (char*) malloc(strlen(default_encoding_c)); + if (!__PYX_DEFAULT_STRING_ENCODING) goto bad; + strcpy(__PYX_DEFAULT_STRING_ENCODING, default_encoding_c); + Py_DECREF(default_encoding); + return 0; +bad: + Py_XDECREF(default_encoding); + return -1; +} +#endif +#endif + + +/* Test for GCC > 2.95 */ +#if defined(__GNUC__) && (__GNUC__ > 2 || (__GNUC__ == 2 && (__GNUC_MINOR__ > 95))) + #define likely(x) __builtin_expect(!!(x), 1) + #define unlikely(x) __builtin_expect(!!(x), 0) +#else /* !__GNUC__ or GCC < 2.95 */ + #define likely(x) (x) + #define unlikely(x) (x) +#endif /* __GNUC__ */ + +static PyObject *__pyx_m; +static PyObject *__pyx_d; +static PyObject *__pyx_b; +static PyObject *__pyx_empty_tuple; +static PyObject *__pyx_empty_bytes; +static PyObject *__pyx_empty_unicode; +static int __pyx_lineno; +static int __pyx_clineno = 0; +static const char * __pyx_cfilenm= __FILE__; +static const char *__pyx_filename; + + +static const char *__pyx_f[] = { + "_pydevd_frame_eval/pydevd_frame_evaluator.pyx", +}; + +/*--- Type declarations ---*/ + +/* --- Runtime support code (head) --- */ +/* Refnanny.proto */ +#ifndef CYTHON_REFNANNY + #define CYTHON_REFNANNY 0 +#endif +#if CYTHON_REFNANNY + typedef struct { + void (*INCREF)(void*, PyObject*, int); + void (*DECREF)(void*, PyObject*, int); + void (*GOTREF)(void*, PyObject*, int); + void (*GIVEREF)(void*, PyObject*, int); + void* (*SetupContext)(const char*, int, const char*); + void (*FinishContext)(void**); + } __Pyx_RefNannyAPIStruct; + static __Pyx_RefNannyAPIStruct *__Pyx_RefNanny = NULL; + static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname); + #define __Pyx_RefNannyDeclarations void *__pyx_refnanny = NULL; +#ifdef WITH_THREAD + #define __Pyx_RefNannySetupContext(name, acquire_gil)\ + if (acquire_gil) {\ + PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();\ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__);\ + PyGILState_Release(__pyx_gilstate_save);\ + } else {\ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__);\ + } +#else + #define __Pyx_RefNannySetupContext(name, acquire_gil)\ + __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__) +#endif + #define __Pyx_RefNannyFinishContext()\ + __Pyx_RefNanny->FinishContext(&__pyx_refnanny) + #define __Pyx_INCREF(r) __Pyx_RefNanny->INCREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_DECREF(r) __Pyx_RefNanny->DECREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_GOTREF(r) __Pyx_RefNanny->GOTREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_GIVEREF(r) __Pyx_RefNanny->GIVEREF(__pyx_refnanny, (PyObject *)(r), __LINE__) + #define __Pyx_XINCREF(r) do { if((r) != NULL) {__Pyx_INCREF(r); }} while(0) + #define __Pyx_XDECREF(r) do { if((r) != NULL) {__Pyx_DECREF(r); }} while(0) + #define __Pyx_XGOTREF(r) do { if((r) != NULL) {__Pyx_GOTREF(r); }} while(0) + #define __Pyx_XGIVEREF(r) do { if((r) != NULL) {__Pyx_GIVEREF(r);}} while(0) +#else + #define __Pyx_RefNannyDeclarations + #define __Pyx_RefNannySetupContext(name, acquire_gil) + #define __Pyx_RefNannyFinishContext() + #define __Pyx_INCREF(r) Py_INCREF(r) + #define __Pyx_DECREF(r) Py_DECREF(r) + #define __Pyx_GOTREF(r) + #define __Pyx_GIVEREF(r) + #define __Pyx_XINCREF(r) Py_XINCREF(r) + #define __Pyx_XDECREF(r) Py_XDECREF(r) + #define __Pyx_XGOTREF(r) + #define __Pyx_XGIVEREF(r) +#endif +#define __Pyx_XDECREF_SET(r, v) do {\ + PyObject *tmp = (PyObject *) r;\ + r = v; __Pyx_XDECREF(tmp);\ + } while (0) +#define __Pyx_DECREF_SET(r, v) do {\ + PyObject *tmp = (PyObject *) r;\ + r = v; __Pyx_DECREF(tmp);\ + } while (0) +#define __Pyx_CLEAR(r) do { PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);} while(0) +#define __Pyx_XCLEAR(r) do { if((r) != NULL) {PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);}} while(0) + +/* PyObjectGetAttrStr.proto */ +#if CYTHON_USE_TYPE_SLOTS +static CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name) { + PyTypeObject* tp = Py_TYPE(obj); + if (likely(tp->tp_getattro)) + return tp->tp_getattro(obj, attr_name); +#if PY_MAJOR_VERSION < 3 + if (likely(tp->tp_getattr)) + return tp->tp_getattr(obj, PyString_AS_STRING(attr_name)); +#endif + return PyObject_GetAttr(obj, attr_name); +} +#else +#define __Pyx_PyObject_GetAttrStr(o,n) PyObject_GetAttr(o,n) +#endif + +/* GetBuiltinName.proto */ +static PyObject *__Pyx_GetBuiltinName(PyObject *name); + +/* GetModuleGlobalName.proto */ +static CYTHON_INLINE PyObject *__Pyx_GetModuleGlobalName(PyObject *name); + +/* PyObjectSetAttrStr.proto */ +#if CYTHON_USE_TYPE_SLOTS +#define __Pyx_PyObject_DelAttrStr(o,n) __Pyx_PyObject_SetAttrStr(o,n,NULL) +static CYTHON_INLINE int __Pyx_PyObject_SetAttrStr(PyObject* obj, PyObject* attr_name, PyObject* value) { + PyTypeObject* tp = Py_TYPE(obj); + if (likely(tp->tp_setattro)) + return tp->tp_setattro(obj, attr_name, value); +#if PY_MAJOR_VERSION < 3 + if (likely(tp->tp_setattr)) + return tp->tp_setattr(obj, PyString_AS_STRING(attr_name), value); +#endif + return PyObject_SetAttr(obj, attr_name, value); +} +#else +#define __Pyx_PyObject_DelAttrStr(o,n) PyObject_DelAttr(o,n) +#define __Pyx_PyObject_SetAttrStr(o,n,v) PyObject_SetAttr(o,n,v) +#endif + +/* RaiseArgTupleInvalid.proto */ +static void __Pyx_RaiseArgtupleInvalid(const char* func_name, int exact, + Py_ssize_t num_min, Py_ssize_t num_max, Py_ssize_t num_found); + +/* RaiseDoubleKeywords.proto */ +static void __Pyx_RaiseDoubleKeywordsError(const char* func_name, PyObject* kw_name); + +/* ParseKeywords.proto */ +static int __Pyx_ParseOptionalKeywords(PyObject *kwds, PyObject **argnames[],\ + PyObject *kwds2, PyObject *values[], Py_ssize_t num_pos_args,\ + const char* function_name); + +/* ArgTypeTest.proto */ +static CYTHON_INLINE int __Pyx_ArgTypeTest(PyObject *obj, PyTypeObject *type, int none_allowed, + const char *name, int exact); + +/* PyCFunctionFastCall.proto */ +#if CYTHON_FAST_PYCCALL +static CYTHON_INLINE PyObject *__Pyx_PyCFunction_FastCall(PyObject *func, PyObject **args, Py_ssize_t nargs); +#else +#define __Pyx_PyCFunction_FastCall(func, args, nargs) (assert(0), NULL) +#endif + +/* PyFunctionFastCall.proto */ +#if CYTHON_FAST_PYCALL +#define __Pyx_PyFunction_FastCall(func, args, nargs)\ + __Pyx_PyFunction_FastCallDict((func), (args), (nargs), NULL) +#if 1 || PY_VERSION_HEX < 0x030600B1 +static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, int nargs, PyObject *kwargs); +#else +#define __Pyx_PyFunction_FastCallDict(func, args, nargs, kwargs) _PyFunction_FastCallDict(func, args, nargs, kwargs) +#endif +#endif + +/* PyObjectCall.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw); +#else +#define __Pyx_PyObject_Call(func, arg, kw) PyObject_Call(func, arg, kw) +#endif + +/* PyObjectCallMethO.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg); +#endif + +/* PyObjectCallOneArg.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg); + +/* PyObjectCallNoArg.proto */ +#if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallNoArg(PyObject *func); +#else +#define __Pyx_PyObject_CallNoArg(func) __Pyx_PyObject_Call(func, __pyx_empty_tuple, NULL) +#endif + +/* PyThreadStateGet.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_PyThreadState_declare PyThreadState *__pyx_tstate; +#define __Pyx_PyThreadState_assign __pyx_tstate = PyThreadState_GET(); +#else +#define __Pyx_PyThreadState_declare +#define __Pyx_PyThreadState_assign +#endif + +/* SaveResetException.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_ExceptionSave(type, value, tb) __Pyx__ExceptionSave(__pyx_tstate, type, value, tb) +static CYTHON_INLINE void __Pyx__ExceptionSave(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); +#define __Pyx_ExceptionReset(type, value, tb) __Pyx__ExceptionReset(__pyx_tstate, type, value, tb) +static CYTHON_INLINE void __Pyx__ExceptionReset(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb); +#else +#define __Pyx_ExceptionSave(type, value, tb) PyErr_GetExcInfo(type, value, tb) +#define __Pyx_ExceptionReset(type, value, tb) PyErr_SetExcInfo(type, value, tb) +#endif + +/* PyErrFetchRestore.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_ErrRestoreWithState(type, value, tb) __Pyx_ErrRestoreInState(PyThreadState_GET(), type, value, tb) +#define __Pyx_ErrFetchWithState(type, value, tb) __Pyx_ErrFetchInState(PyThreadState_GET(), type, value, tb) +#define __Pyx_ErrRestore(type, value, tb) __Pyx_ErrRestoreInState(__pyx_tstate, type, value, tb) +#define __Pyx_ErrFetch(type, value, tb) __Pyx_ErrFetchInState(__pyx_tstate, type, value, tb) +static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb); +static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); +#else +#define __Pyx_ErrRestoreWithState(type, value, tb) PyErr_Restore(type, value, tb) +#define __Pyx_ErrFetchWithState(type, value, tb) PyErr_Fetch(type, value, tb) +#define __Pyx_ErrRestore(type, value, tb) PyErr_Restore(type, value, tb) +#define __Pyx_ErrFetch(type, value, tb) PyErr_Fetch(type, value, tb) +#endif + +/* bytes_tailmatch.proto */ +static int __Pyx_PyBytes_SingleTailmatch(PyObject* self, PyObject* arg, + Py_ssize_t start, Py_ssize_t end, int direction); +static int __Pyx_PyBytes_Tailmatch(PyObject* self, PyObject* substr, + Py_ssize_t start, Py_ssize_t end, int direction); + +/* unicode_tailmatch.proto */ +static int __Pyx_PyUnicode_Tailmatch(PyObject* s, PyObject* substr, + Py_ssize_t start, Py_ssize_t end, int direction); + +/* str_tailmatch.proto */ +static CYTHON_INLINE int __Pyx_PyStr_Tailmatch(PyObject* self, PyObject* arg, Py_ssize_t start, + Py_ssize_t end, int direction); + +/* GetException.proto */ +#if CYTHON_FAST_THREAD_STATE +#define __Pyx_GetException(type, value, tb) __Pyx__GetException(__pyx_tstate, type, value, tb) +static int __Pyx__GetException(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb); +#else +static int __Pyx_GetException(PyObject **type, PyObject **value, PyObject **tb); +#endif + +/* None.proto */ +static CYTHON_INLINE void __Pyx_RaiseUnboundLocalError(const char *varname); + +/* RaiseException.proto */ +static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause); + +/* GetAttr.proto */ +static CYTHON_INLINE PyObject *__Pyx_GetAttr(PyObject *, PyObject *); + +/* GetAttr3.proto */ +static CYTHON_INLINE PyObject *__Pyx_GetAttr3(PyObject *, PyObject *, PyObject *); + +/* GetItemInt.proto */ +#define __Pyx_GetItemInt(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ + (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ + __Pyx_GetItemInt_Fast(o, (Py_ssize_t)i, is_list, wraparound, boundscheck) :\ + (is_list ? (PyErr_SetString(PyExc_IndexError, "list index out of range"), (PyObject*)NULL) :\ + __Pyx_GetItemInt_Generic(o, to_py_func(i)))) +#define __Pyx_GetItemInt_List(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ + (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ + __Pyx_GetItemInt_List_Fast(o, (Py_ssize_t)i, wraparound, boundscheck) :\ + (PyErr_SetString(PyExc_IndexError, "list index out of range"), (PyObject*)NULL)) +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_List_Fast(PyObject *o, Py_ssize_t i, + int wraparound, int boundscheck); +#define __Pyx_GetItemInt_Tuple(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\ + (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\ + __Pyx_GetItemInt_Tuple_Fast(o, (Py_ssize_t)i, wraparound, boundscheck) :\ + (PyErr_SetString(PyExc_IndexError, "tuple index out of range"), (PyObject*)NULL)) +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Tuple_Fast(PyObject *o, Py_ssize_t i, + int wraparound, int boundscheck); +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Generic(PyObject *o, PyObject* j); +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Fast(PyObject *o, Py_ssize_t i, + int is_list, int wraparound, int boundscheck); + +/* RaiseTooManyValuesToUnpack.proto */ +static CYTHON_INLINE void __Pyx_RaiseTooManyValuesError(Py_ssize_t expected); + +/* RaiseNeedMoreValuesToUnpack.proto */ +static CYTHON_INLINE void __Pyx_RaiseNeedMoreValuesError(Py_ssize_t index); + +/* IterFinish.proto */ +static CYTHON_INLINE int __Pyx_IterFinish(void); + +/* UnpackItemEndCheck.proto */ +static int __Pyx_IternextUnpackEndCheck(PyObject *retval, Py_ssize_t expected); + +/* PySequenceContains.proto */ +static CYTHON_INLINE int __Pyx_PySequence_ContainsTF(PyObject* item, PyObject* seq, int eq) { + int result = PySequence_Contains(seq, item); + return unlikely(result < 0) ? result : (result == (eq == Py_EQ)); +} + +/* ListAppend.proto */ +#if CYTHON_USE_PYLIST_INTERNALS && CYTHON_ASSUME_SAFE_MACROS +static CYTHON_INLINE int __Pyx_PyList_Append(PyObject* list, PyObject* x) { + PyListObject* L = (PyListObject*) list; + Py_ssize_t len = Py_SIZE(list); + if (likely(L->allocated > len) & likely(len > (L->allocated >> 1))) { + Py_INCREF(x); + PyList_SET_ITEM(list, len, x); + Py_SIZE(list) = len+1; + return 0; + } + return PyList_Append(list, x); +} +#else +#define __Pyx_PyList_Append(L,x) PyList_Append(L,x) +#endif + +/* WriteUnraisableException.proto */ +static void __Pyx_WriteUnraisable(const char *name, int clineno, + int lineno, const char *filename, + int full_traceback, int nogil); + +/* Import.proto */ +static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level); + +/* ImportFrom.proto */ +static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name); + +/* CalculateMetaclass.proto */ +static PyObject *__Pyx_CalculateMetaclass(PyTypeObject *metaclass, PyObject *bases); + +/* Py3ClassCreate.proto */ +static PyObject *__Pyx_Py3MetaclassPrepare(PyObject *metaclass, PyObject *bases, PyObject *name, PyObject *qualname, + PyObject *mkw, PyObject *modname, PyObject *doc); +static PyObject *__Pyx_Py3ClassCreate(PyObject *metaclass, PyObject *name, PyObject *bases, PyObject *dict, + PyObject *mkw, int calculate_metaclass, int allow_py2_metaclass); + +/* CodeObjectCache.proto */ +typedef struct { + PyCodeObject* code_object; + int code_line; +} __Pyx_CodeObjectCacheEntry; +struct __Pyx_CodeObjectCache { + int count; + int max_count; + __Pyx_CodeObjectCacheEntry* entries; +}; +static struct __Pyx_CodeObjectCache __pyx_code_cache = {0,0,NULL}; +static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line); +static PyCodeObject *__pyx_find_code_object(int code_line); +static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object); + +/* AddTraceback.proto */ +static void __Pyx_AddTraceback(const char *funcname, int c_line, + int py_line, const char *filename); + +/* CIntToPy.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_int(int value); + +/* CIntToPy.proto */ +static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value); + +/* CIntFromPy.proto */ +static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *); + +/* CIntFromPy.proto */ +static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *); + +/* CheckBinaryVersion.proto */ +static int __Pyx_check_binary_version(void); + +/* InitStrings.proto */ +static int __Pyx_InitStrings(__Pyx_StringTabEntry *t); + + +/* Module declarations from 'cpython.mem' */ + +/* Module declarations from '_pydevd_frame_eval.pydevd_frame_evaluator' */ +static PyObject *__pyx_f_18_pydevd_frame_eval_22pydevd_frame_evaluator_dummy_trace_dispatch(PyObject *, PyObject *, PyObject *, int __pyx_skip_dispatch); /*proto*/ +static PyObject *__pyx_f_18_pydevd_frame_eval_22pydevd_frame_evaluator_get_bytecode_while_frame_eval(PyFrameObject *, int); /*proto*/ +#define __Pyx_MODULE_NAME "_pydevd_frame_eval.pydevd_frame_evaluator" +int __pyx_module_is_main__pydevd_frame_eval__pydevd_frame_evaluator = 0; + +/* Implementation of '_pydevd_frame_eval.pydevd_frame_evaluator' */ +static PyObject *__pyx_builtin_AttributeError; +static const char __pyx_k_[] = "/"; +static const char __pyx_k__2[] = "\\"; +static const char __pyx_k_add[] = "add"; +static const char __pyx_k_arg[] = "arg"; +static const char __pyx_k_dis[] = "dis"; +static const char __pyx_k_doc[] = "__doc__"; +static const char __pyx_k_get[] = "get"; +static const char __pyx_k_code[] = "__code__"; +static const char __pyx_k_main[] = "__main__"; +static const char __pyx_k_test[] = "__test__"; +static const char __pyx_k_event[] = "event"; +static const char __pyx_k_frame[] = "frame"; +static const char __pyx_k_index[] = "index"; +static const char __pyx_k_local[] = "local"; +static const char __pyx_k_state[] = "state"; +static const char __pyx_k_f_code[] = "f_code"; +static const char __pyx_k_import[] = "__import__"; +static const char __pyx_k_module[] = "__module__"; +static const char __pyx_k_plugin[] = "plugin"; +static const char __pyx_k_prepare[] = "__prepare__"; +static const char __pyx_k_SetTrace[] = "SetTrace"; +static const char __pyx_k_qualname[] = "__qualname__"; +static const char __pyx_k_f_globals[] = "f_globals"; +static const char __pyx_k_metaclass[] = "__metaclass__"; +static const char __pyx_k_new_value[] = "new_value"; +static const char __pyx_k_threading[] = "threading"; +static const char __pyx_k_DONT_TRACE[] = "DONT_TRACE"; +static const char __pyx_k_is_tracing[] = "is_tracing"; +static const char __pyx_k_breakpoints[] = "breakpoints"; +static const char __pyx_k_co_filename[] = "co_filename"; +static const char __pyx_k_insert_code[] = "insert_code"; +static const char __pyx_k_can_not_skip[] = "can_not_skip"; +static const char __pyx_k_code_objects[] = "code_objects"; +static const char __pyx_k_threading_py[] = "threading.py"; +static const char __pyx_k_currentThread[] = "currentThread"; +static const char __pyx_k_get_file_type[] = "get_file_type"; +static const char __pyx_k_weakrefset_py[] = "_weakrefset.py"; +static const char __pyx_k_AttributeError[] = "AttributeError"; +static const char __pyx_k_findlinestarts[] = "findlinestarts"; +static const char __pyx_k_set_trace_func[] = "set_trace_func"; +static const char __pyx_k_trace_dispatch[] = "trace_dispatch"; +static const char __pyx_k_use_code_extra[] = "use_code_extra"; +static const char __pyx_k_AVOID_RECURSION[] = "AVOID_RECURSION"; +static const char __pyx_k_additional_info[] = "additional_info"; +static const char __pyx_k_frame_eval_func[] = "frame_eval_func"; +static const char __pyx_k_stop_frame_eval[] = "stop_frame_eval"; +static const char __pyx_k_is_use_code_extra[] = "is_use_code_extra"; +static const char __pyx_k_pydevd_file_utils[] = "pydevd_file_utils"; +static const char __pyx_k_NO_BREAKS_IN_FRAME[] = "NO_BREAKS_IN_FRAME"; +static const char __pyx_k_UseCodeExtraHolder[] = "UseCodeExtraHolder"; +static const char __pyx_k_pydev_do_not_trace[] = "pydev_do_not_trace"; +static const char __pyx_k_get_global_debugger[] = "get_global_debugger"; +static const char __pyx_k_update_globals_dict[] = "update_globals_dict"; +static const char __pyx_k_dummy_trace_dispatch[] = "dummy_trace_dispatch"; +static const char __pyx_k_dummy_tracing_holder[] = "dummy_tracing_holder"; +static const char __pyx_k_has_plugin_line_breaks[] = "has_plugin_line_breaks"; +static const char __pyx_k_PyDBAdditionalThreadInfo[] = "PyDBAdditionalThreadInfo"; +static const char __pyx_k_pydev_trace_code_wrapper[] = "pydev_trace_code_wrapper"; +static const char __pyx_k_pydevd_bundle_pydevd_comm[] = "_pydevd_bundle.pydevd_comm"; +static const char __pyx_k_NORM_PATHS_AND_BASE_CONTAINER[] = "NORM_PATHS_AND_BASE_CONTAINER"; +static const char __pyx_k_home_user_work_PyDev_Debugger[] = "/home/user/work/PyDev.Debugger/_pydevd_frame_eval/pydevd_frame_evaluator.pyx"; +static const char __pyx_k_pydevd_frame_eval_pydevd_frame[] = "_pydevd_frame_eval.pydevd_frame_tracing"; +static const char __pyx_k_get_abs_path_real_path_and_base[] = "get_abs_path_real_path_and_base_from_frame"; +static const char __pyx_k_pydev_imps__pydev_saved_modules[] = "_pydev_imps._pydev_saved_modules"; +static const char __pyx_k_pydevd_bundle_pydevd_additional[] = "_pydevd_bundle.pydevd_additional_thread_info"; +static const char __pyx_k_pydevd_bundle_pydevd_dont_trace[] = "_pydevd_bundle.pydevd_dont_trace_files"; +static const char __pyx_k_pydevd_frame_eval_pydevd_modify[] = "_pydevd_frame_eval.pydevd_modify_bytecode"; +static const char __pyx_k_set_trace_for_frame_and_parents[] = "set_trace_for_frame_and_parents"; +static const char __pyx_k_enable_cache_frames_without_brea[] = "enable_cache_frames_without_breaks"; +static const char __pyx_k_pydevd_additional_thread_info_re[] = "pydevd_additional_thread_info_regular.py"; +static const char __pyx_k_pydevd_frame_eval_pydevd_frame_2[] = "_pydevd_frame_eval.pydevd_frame_evaluator"; +static PyObject *__pyx_kp_s_; +static PyObject *__pyx_n_s_AVOID_RECURSION; +static PyObject *__pyx_n_s_AttributeError; +static PyObject *__pyx_n_s_DONT_TRACE; +static PyObject *__pyx_n_s_NORM_PATHS_AND_BASE_CONTAINER; +static PyObject *__pyx_n_s_NO_BREAKS_IN_FRAME; +static PyObject *__pyx_n_s_PyDBAdditionalThreadInfo; +static PyObject *__pyx_n_s_SetTrace; +static PyObject *__pyx_n_s_UseCodeExtraHolder; +static PyObject *__pyx_kp_s__2; +static PyObject *__pyx_n_s_add; +static PyObject *__pyx_n_s_additional_info; +static PyObject *__pyx_n_s_arg; +static PyObject *__pyx_n_s_breakpoints; +static PyObject *__pyx_n_s_can_not_skip; +static PyObject *__pyx_n_s_co_filename; +static PyObject *__pyx_n_s_code; +static PyObject *__pyx_n_s_code_objects; +static PyObject *__pyx_n_s_currentThread; +static PyObject *__pyx_n_s_dis; +static PyObject *__pyx_n_s_doc; +static PyObject *__pyx_n_s_dummy_trace_dispatch; +static PyObject *__pyx_n_s_dummy_tracing_holder; +static PyObject *__pyx_n_s_enable_cache_frames_without_brea; +static PyObject *__pyx_n_s_event; +static PyObject *__pyx_n_s_f_code; +static PyObject *__pyx_n_s_f_globals; +static PyObject *__pyx_n_s_findlinestarts; +static PyObject *__pyx_n_s_frame; +static PyObject *__pyx_n_s_frame_eval_func; +static PyObject *__pyx_n_s_get; +static PyObject *__pyx_n_s_get_abs_path_real_path_and_base; +static PyObject *__pyx_n_s_get_file_type; +static PyObject *__pyx_n_s_get_global_debugger; +static PyObject *__pyx_n_s_has_plugin_line_breaks; +static PyObject *__pyx_kp_s_home_user_work_PyDev_Debugger; +static PyObject *__pyx_n_s_import; +static PyObject *__pyx_n_s_index; +static PyObject *__pyx_n_s_insert_code; +static PyObject *__pyx_n_s_is_tracing; +static PyObject *__pyx_n_s_is_use_code_extra; +static PyObject *__pyx_n_s_local; +static PyObject *__pyx_n_s_main; +static PyObject *__pyx_n_s_metaclass; +static PyObject *__pyx_n_s_module; +static PyObject *__pyx_n_s_new_value; +static PyObject *__pyx_n_s_plugin; +static PyObject *__pyx_n_s_prepare; +static PyObject *__pyx_n_s_pydev_do_not_trace; +static PyObject *__pyx_n_s_pydev_imps__pydev_saved_modules; +static PyObject *__pyx_n_s_pydev_trace_code_wrapper; +static PyObject *__pyx_kp_s_pydevd_additional_thread_info_re; +static PyObject *__pyx_n_s_pydevd_bundle_pydevd_additional; +static PyObject *__pyx_n_s_pydevd_bundle_pydevd_comm; +static PyObject *__pyx_n_s_pydevd_bundle_pydevd_dont_trace; +static PyObject *__pyx_n_s_pydevd_file_utils; +static PyObject *__pyx_n_s_pydevd_frame_eval_pydevd_frame; +static PyObject *__pyx_n_s_pydevd_frame_eval_pydevd_frame_2; +static PyObject *__pyx_n_s_pydevd_frame_eval_pydevd_modify; +static PyObject *__pyx_n_s_qualname; +static PyObject *__pyx_n_s_set_trace_for_frame_and_parents; +static PyObject *__pyx_n_s_set_trace_func; +static PyObject *__pyx_n_s_state; +static PyObject *__pyx_n_s_stop_frame_eval; +static PyObject *__pyx_n_s_test; +static PyObject *__pyx_n_s_threading; +static PyObject *__pyx_kp_s_threading_py; +static PyObject *__pyx_n_s_trace_dispatch; +static PyObject *__pyx_n_s_update_globals_dict; +static PyObject *__pyx_n_s_use_code_extra; +static PyObject *__pyx_kp_s_weakrefset_py; +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_is_use_code_extra(CYTHON_UNUSED PyObject *__pyx_self); /* proto */ +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_2enable_cache_frames_without_breaks(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_new_value); /* proto */ +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_4dummy_trace_dispatch(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_frame, PyObject *__pyx_v_event, PyObject *__pyx_v_arg); /* proto */ +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_6frame_eval_func(CYTHON_UNUSED PyObject *__pyx_self); /* proto */ +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_8stop_frame_eval(CYTHON_UNUSED PyObject *__pyx_self); /* proto */ +static PyObject *__pyx_int_1; +static PyObject *__pyx_int_neg_1; +static PyObject *__pyx_tuple__3; +static PyObject *__pyx_tuple__5; +static PyObject *__pyx_tuple__7; +static PyObject *__pyx_tuple__9; +static PyObject *__pyx_codeobj__4; +static PyObject *__pyx_codeobj__6; +static PyObject *__pyx_codeobj__8; +static PyObject *__pyx_codeobj__10; + +/* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":28 + * + * + * def is_use_code_extra(): # <<<<<<<<<<<<<< + * return UseCodeExtraHolder.use_code_extra + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_1is_use_code_extra(PyObject *__pyx_self, CYTHON_UNUSED PyObject *unused); /*proto*/ +static PyMethodDef __pyx_mdef_18_pydevd_frame_eval_22pydevd_frame_evaluator_1is_use_code_extra = {"is_use_code_extra", (PyCFunction)__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_1is_use_code_extra, METH_NOARGS, 0}; +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_1is_use_code_extra(PyObject *__pyx_self, CYTHON_UNUSED PyObject *unused) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("is_use_code_extra (wrapper)", 0); + __pyx_r = __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_is_use_code_extra(__pyx_self); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_is_use_code_extra(CYTHON_UNUSED PyObject *__pyx_self) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + __Pyx_RefNannySetupContext("is_use_code_extra", 0); + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":29 + * + * def is_use_code_extra(): + * return UseCodeExtraHolder.use_code_extra # <<<<<<<<<<<<<< + * + * + */ + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_UseCodeExtraHolder); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 29, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_use_code_extra); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 29, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_r = __pyx_t_2; + __pyx_t_2 = 0; + goto __pyx_L0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":28 + * + * + * def is_use_code_extra(): # <<<<<<<<<<<<<< + * return UseCodeExtraHolder.use_code_extra + * + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_AddTraceback("_pydevd_frame_eval.pydevd_frame_evaluator.is_use_code_extra", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":33 + * + * # enable using `co_extra` field in order to cache frames without breakpoints + * def enable_cache_frames_without_breaks(new_value): # <<<<<<<<<<<<<< + * UseCodeExtraHolder.use_code_extra = new_value + * + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_3enable_cache_frames_without_breaks(PyObject *__pyx_self, PyObject *__pyx_v_new_value); /*proto*/ +static PyMethodDef __pyx_mdef_18_pydevd_frame_eval_22pydevd_frame_evaluator_3enable_cache_frames_without_breaks = {"enable_cache_frames_without_breaks", (PyCFunction)__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_3enable_cache_frames_without_breaks, METH_O, 0}; +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_3enable_cache_frames_without_breaks(PyObject *__pyx_self, PyObject *__pyx_v_new_value) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("enable_cache_frames_without_breaks (wrapper)", 0); + __pyx_r = __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_2enable_cache_frames_without_breaks(__pyx_self, ((PyObject *)__pyx_v_new_value)); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_2enable_cache_frames_without_breaks(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_new_value) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("enable_cache_frames_without_breaks", 0); + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":34 + * # enable using `co_extra` field in order to cache frames without breakpoints + * def enable_cache_frames_without_breaks(new_value): + * UseCodeExtraHolder.use_code_extra = new_value # <<<<<<<<<<<<<< + * + * + */ + __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_UseCodeExtraHolder); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 34, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (__Pyx_PyObject_SetAttrStr(__pyx_t_1, __pyx_n_s_use_code_extra, __pyx_v_new_value) < 0) __PYX_ERR(0, 34, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":33 + * + * # enable using `co_extra` field in order to cache frames without breakpoints + * def enable_cache_frames_without_breaks(new_value): # <<<<<<<<<<<<<< + * UseCodeExtraHolder.use_code_extra = new_value + * + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("_pydevd_frame_eval.pydevd_frame_evaluator.enable_cache_frames_without_breaks", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":37 + * + * + * cpdef dummy_trace_dispatch(frame, str event, arg): # <<<<<<<<<<<<<< + * return None + * + */ + +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_5dummy_trace_dispatch(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static PyObject *__pyx_f_18_pydevd_frame_eval_22pydevd_frame_evaluator_dummy_trace_dispatch(CYTHON_UNUSED PyObject *__pyx_v_frame, CYTHON_UNUSED PyObject *__pyx_v_event, CYTHON_UNUSED PyObject *__pyx_v_arg, CYTHON_UNUSED int __pyx_skip_dispatch) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("dummy_trace_dispatch", 0); + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":38 + * + * cpdef dummy_trace_dispatch(frame, str event, arg): + * return None # <<<<<<<<<<<<<< + * + * + */ + __Pyx_XDECREF(__pyx_r); + __Pyx_INCREF(Py_None); + __pyx_r = Py_None; + goto __pyx_L0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":37 + * + * + * cpdef dummy_trace_dispatch(frame, str event, arg): # <<<<<<<<<<<<<< + * return None + * + */ + + /* function exit code */ + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* Python wrapper */ +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_5dummy_trace_dispatch(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_5dummy_trace_dispatch(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyObject *__pyx_v_frame = 0; + PyObject *__pyx_v_event = 0; + PyObject *__pyx_v_arg = 0; + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("dummy_trace_dispatch (wrapper)", 0); + { + static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_frame,&__pyx_n_s_event,&__pyx_n_s_arg,0}; + PyObject* values[3] = {0,0,0}; + if (unlikely(__pyx_kwds)) { + Py_ssize_t kw_args; + const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args); + switch (pos_args) { + case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + case 0: break; + default: goto __pyx_L5_argtuple_error; + } + kw_args = PyDict_Size(__pyx_kwds); + switch (pos_args) { + case 0: + if (likely((values[0] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_frame)) != 0)) kw_args--; + else goto __pyx_L5_argtuple_error; + case 1: + if (likely((values[1] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_event)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("dummy_trace_dispatch", 1, 3, 3, 1); __PYX_ERR(0, 37, __pyx_L3_error) + } + case 2: + if (likely((values[2] = PyDict_GetItem(__pyx_kwds, __pyx_n_s_arg)) != 0)) kw_args--; + else { + __Pyx_RaiseArgtupleInvalid("dummy_trace_dispatch", 1, 3, 3, 2); __PYX_ERR(0, 37, __pyx_L3_error) + } + } + if (unlikely(kw_args > 0)) { + if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, "dummy_trace_dispatch") < 0)) __PYX_ERR(0, 37, __pyx_L3_error) + } + } else if (PyTuple_GET_SIZE(__pyx_args) != 3) { + goto __pyx_L5_argtuple_error; + } else { + values[0] = PyTuple_GET_ITEM(__pyx_args, 0); + values[1] = PyTuple_GET_ITEM(__pyx_args, 1); + values[2] = PyTuple_GET_ITEM(__pyx_args, 2); + } + __pyx_v_frame = values[0]; + __pyx_v_event = ((PyObject*)values[1]); + __pyx_v_arg = values[2]; + } + goto __pyx_L4_argument_unpacking_done; + __pyx_L5_argtuple_error:; + __Pyx_RaiseArgtupleInvalid("dummy_trace_dispatch", 1, 3, 3, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 37, __pyx_L3_error) + __pyx_L3_error:; + __Pyx_AddTraceback("_pydevd_frame_eval.pydevd_frame_evaluator.dummy_trace_dispatch", __pyx_clineno, __pyx_lineno, __pyx_filename); + __Pyx_RefNannyFinishContext(); + return NULL; + __pyx_L4_argument_unpacking_done:; + if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_event), (&PyString_Type), 1, "event", 1))) __PYX_ERR(0, 37, __pyx_L1_error) + __pyx_r = __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_4dummy_trace_dispatch(__pyx_self, __pyx_v_frame, __pyx_v_event, __pyx_v_arg); + + /* function exit code */ + goto __pyx_L0; + __pyx_L1_error:; + __pyx_r = NULL; + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_4dummy_trace_dispatch(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_frame, PyObject *__pyx_v_event, PyObject *__pyx_v_arg) { + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + __Pyx_RefNannySetupContext("dummy_trace_dispatch", 0); + __Pyx_XDECREF(__pyx_r); + __pyx_t_1 = __pyx_f_18_pydevd_frame_eval_22pydevd_frame_evaluator_dummy_trace_dispatch(__pyx_v_frame, __pyx_v_event, __pyx_v_arg, 0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 37, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_r = __pyx_t_1; + __pyx_t_1 = 0; + goto __pyx_L0; + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_AddTraceback("_pydevd_frame_eval.pydevd_frame_evaluator.dummy_trace_dispatch", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":41 + * + * + * cdef PyObject* get_bytecode_while_frame_eval(PyFrameObject *frame_obj, int exc): # <<<<<<<<<<<<<< + * frame = frame_obj + * cdef str filepath = frame.f_code.co_filename + */ + +static PyObject *__pyx_f_18_pydevd_frame_eval_22pydevd_frame_evaluator_get_bytecode_while_frame_eval(PyFrameObject *__pyx_v_frame_obj, int __pyx_v_exc) { + PyObject *__pyx_v_frame = NULL; + PyObject *__pyx_v_filepath = 0; + int __pyx_v_skip_file; + void *__pyx_v_extra; + int *__pyx_v_extra_value; + int __pyx_v_thread_index; + PyObject *__pyx_v_file = NULL; + PyObject *__pyx_v_path_separator = NULL; + PyObject *__pyx_v_t = NULL; + PyObject *__pyx_v_additional_info = NULL; + PyObject *__pyx_v_abs_path_real_path_and_base = NULL; + PyObject *__pyx_v_file_type = NULL; + int __pyx_v_was_break; + PyObject *__pyx_v_main_debugger = NULL; + PyObject *__pyx_v_breakpoints = NULL; + PyObject *__pyx_v_code_object = NULL; + PyObject *__pyx_v_breakpoints_to_update = NULL; + CYTHON_UNUSED PyObject *__pyx_v_offset = NULL; + PyObject *__pyx_v_line = NULL; + PyObject *__pyx_v_breakpoint = NULL; + PyObject *__pyx_v_success = NULL; + PyObject *__pyx_v_new_code = NULL; + PyObject *__pyx_v_bp = NULL; + PyObject *__pyx_v_can_not_skip = NULL; + PyObject *__pyx_r; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + int __pyx_t_3; + int __pyx_t_4; + int __pyx_t_5; + PyObject *__pyx_t_6 = NULL; + PyObject *__pyx_t_7 = NULL; + PyObject *__pyx_t_8 = NULL; + PyObject *__pyx_t_9 = NULL; + int __pyx_t_10; + Py_ssize_t __pyx_t_11; + PyObject *(*__pyx_t_12)(PyObject *); + Py_ssize_t __pyx_t_13; + PyObject *__pyx_t_14 = NULL; + PyObject *__pyx_t_15 = NULL; + PyObject *__pyx_t_16 = NULL; + PyObject *__pyx_t_17 = NULL; + PyObject *(*__pyx_t_18)(PyObject *); + int __pyx_t_19; + __Pyx_RefNannySetupContext("get_bytecode_while_frame_eval", 0); + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":42 + * + * cdef PyObject* get_bytecode_while_frame_eval(PyFrameObject *frame_obj, int exc): + * frame = frame_obj # <<<<<<<<<<<<<< + * cdef str filepath = frame.f_code.co_filename + * cdef bint skip_file = exc + */ + __pyx_t_1 = ((PyObject *)__pyx_v_frame_obj); + __Pyx_INCREF(__pyx_t_1); + __pyx_v_frame = __pyx_t_1; + __pyx_t_1 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":43 + * cdef PyObject* get_bytecode_while_frame_eval(PyFrameObject *frame_obj, int exc): + * frame = frame_obj + * cdef str filepath = frame.f_code.co_filename # <<<<<<<<<<<<<< + * cdef bint skip_file = exc + * cdef void* extra = NULL + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_code); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 43, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_co_filename); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 43, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + if (!(likely(PyString_CheckExact(__pyx_t_2))||((__pyx_t_2) == Py_None)||(PyErr_Format(PyExc_TypeError, "Expected %.16s, got %.200s", "str", Py_TYPE(__pyx_t_2)->tp_name), 0))) __PYX_ERR(0, 43, __pyx_L1_error) + __pyx_v_filepath = ((PyObject*)__pyx_t_2); + __pyx_t_2 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":44 + * frame = frame_obj + * cdef str filepath = frame.f_code.co_filename + * cdef bint skip_file = exc # <<<<<<<<<<<<<< + * cdef void* extra = NULL + * cdef int* extra_value = NULL + */ + __pyx_v_skip_file = __pyx_v_exc; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":45 + * cdef str filepath = frame.f_code.co_filename + * cdef bint skip_file = exc + * cdef void* extra = NULL # <<<<<<<<<<<<<< + * cdef int* extra_value = NULL + * cdef int thread_index = -1 + */ + __pyx_v_extra = NULL; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":46 + * cdef bint skip_file = exc + * cdef void* extra = NULL + * cdef int* extra_value = NULL # <<<<<<<<<<<<<< + * cdef int thread_index = -1 + * + */ + __pyx_v_extra_value = NULL; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":47 + * cdef void* extra = NULL + * cdef int* extra_value = NULL + * cdef int thread_index = -1 # <<<<<<<<<<<<<< + * + * if is_use_code_extra is None or AVOID_RECURSION is None: + */ + __pyx_v_thread_index = -1; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":49 + * cdef int thread_index = -1 + * + * if is_use_code_extra is None or AVOID_RECURSION is None: # <<<<<<<<<<<<<< + * # Sometimes during process shutdown these global variables become None + * return _PyEval_EvalFrameDefault(frame_obj, exc) + */ + __pyx_t_2 = __Pyx_GetModuleGlobalName(__pyx_n_s_is_use_code_extra); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 49, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_4 = (__pyx_t_2 == Py_None); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_5 = (__pyx_t_4 != 0); + if (!__pyx_t_5) { + } else { + __pyx_t_3 = __pyx_t_5; + goto __pyx_L4_bool_binop_done; + } + __pyx_t_2 = __Pyx_GetModuleGlobalName(__pyx_n_s_AVOID_RECURSION); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 49, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_5 = (__pyx_t_2 == Py_None); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_4 = (__pyx_t_5 != 0); + __pyx_t_3 = __pyx_t_4; + __pyx_L4_bool_binop_done:; + if (__pyx_t_3) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":51 + * if is_use_code_extra is None or AVOID_RECURSION is None: + * # Sometimes during process shutdown these global variables become None + * return _PyEval_EvalFrameDefault(frame_obj, exc) # <<<<<<<<<<<<<< + * + * if is_use_code_extra(): + */ + __pyx_r = _PyEval_EvalFrameDefault(__pyx_v_frame_obj, __pyx_v_exc); + goto __pyx_L0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":49 + * cdef int thread_index = -1 + * + * if is_use_code_extra is None or AVOID_RECURSION is None: # <<<<<<<<<<<<<< + * # Sometimes during process shutdown these global variables become None + * return _PyEval_EvalFrameDefault(frame_obj, exc) + */ + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":53 + * return _PyEval_EvalFrameDefault(frame_obj, exc) + * + * if is_use_code_extra(): # <<<<<<<<<<<<<< + * extra = PyMem_Malloc(sizeof(int)) + * try: + */ + __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_is_use_code_extra); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 53, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_6 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_1))) { + __pyx_t_6 = PyMethod_GET_SELF(__pyx_t_1); + if (likely(__pyx_t_6)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1); + __Pyx_INCREF(__pyx_t_6); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_1, function); + } + } + if (__pyx_t_6) { + __pyx_t_2 = __Pyx_PyObject_CallOneArg(__pyx_t_1, __pyx_t_6); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 53, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + } else { + __pyx_t_2 = __Pyx_PyObject_CallNoArg(__pyx_t_1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 53, __pyx_L1_error) + } + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_3 = __Pyx_PyObject_IsTrue(__pyx_t_2); if (unlikely(__pyx_t_3 < 0)) __PYX_ERR(0, 53, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + if (__pyx_t_3) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":54 + * + * if is_use_code_extra(): + * extra = PyMem_Malloc(sizeof(int)) # <<<<<<<<<<<<<< + * try: + * thread_index = UseCodeExtraHolder.local.index + */ + __pyx_v_extra = PyMem_Malloc((sizeof(int))); + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":55 + * if is_use_code_extra(): + * extra = PyMem_Malloc(sizeof(int)) + * try: # <<<<<<<<<<<<<< + * thread_index = UseCodeExtraHolder.local.index + * except: + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_7, &__pyx_t_8, &__pyx_t_9); + __Pyx_XGOTREF(__pyx_t_7); + __Pyx_XGOTREF(__pyx_t_8); + __Pyx_XGOTREF(__pyx_t_9); + /*try:*/ { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":56 + * extra = PyMem_Malloc(sizeof(int)) + * try: + * thread_index = UseCodeExtraHolder.local.index # <<<<<<<<<<<<<< + * except: + * pass + */ + __pyx_t_2 = __Pyx_GetModuleGlobalName(__pyx_n_s_UseCodeExtraHolder); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 56, __pyx_L7_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_local); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 56, __pyx_L7_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_index); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 56, __pyx_L7_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_10 = __Pyx_PyInt_As_int(__pyx_t_2); if (unlikely((__pyx_t_10 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 56, __pyx_L7_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_v_thread_index = __pyx_t_10; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":55 + * if is_use_code_extra(): + * extra = PyMem_Malloc(sizeof(int)) + * try: # <<<<<<<<<<<<<< + * thread_index = UseCodeExtraHolder.local.index + * except: + */ + } + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + goto __pyx_L14_try_end; + __pyx_L7_error:; + __Pyx_PyThreadState_assign + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":57 + * try: + * thread_index = UseCodeExtraHolder.local.index + * except: # <<<<<<<<<<<<<< + * pass + * if thread_index != -1: + */ + /*except:*/ { + __Pyx_ErrRestore(0,0,0); + goto __pyx_L8_exception_handled; + } + __pyx_L8_exception_handled:; + __Pyx_PyThreadState_assign + __Pyx_XGIVEREF(__pyx_t_7); + __Pyx_XGIVEREF(__pyx_t_8); + __Pyx_XGIVEREF(__pyx_t_9); + __Pyx_ExceptionReset(__pyx_t_7, __pyx_t_8, __pyx_t_9); + __pyx_L14_try_end:; + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":59 + * except: + * pass + * if thread_index != -1: # <<<<<<<<<<<<<< + * _PyCode_GetExtra( frame.f_code, thread_index, &extra) + * if extra is not NULL: + */ + __pyx_t_3 = ((__pyx_v_thread_index != -1L) != 0); + if (__pyx_t_3) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":60 + * pass + * if thread_index != -1: + * _PyCode_GetExtra( frame.f_code, thread_index, &extra) # <<<<<<<<<<<<<< + * if extra is not NULL: + * extra_value = extra + */ + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_code); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 60, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + _PyCode_GetExtra(((PyObject *)__pyx_t_2), __pyx_v_thread_index, (&__pyx_v_extra)); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":61 + * if thread_index != -1: + * _PyCode_GetExtra( frame.f_code, thread_index, &extra) + * if extra is not NULL: # <<<<<<<<<<<<<< + * extra_value = extra + * if extra_value[0] == NO_BREAKS_IN_FRAME: + */ + __pyx_t_3 = ((__pyx_v_extra != NULL) != 0); + if (__pyx_t_3) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":62 + * _PyCode_GetExtra( frame.f_code, thread_index, &extra) + * if extra is not NULL: + * extra_value = extra # <<<<<<<<<<<<<< + * if extra_value[0] == NO_BREAKS_IN_FRAME: + * return _PyEval_EvalFrameDefault(frame_obj, exc) + */ + __pyx_v_extra_value = ((int *)__pyx_v_extra); + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":63 + * if extra is not NULL: + * extra_value = extra + * if extra_value[0] == NO_BREAKS_IN_FRAME: # <<<<<<<<<<<<<< + * return _PyEval_EvalFrameDefault(frame_obj, exc) + * + */ + __pyx_t_2 = __Pyx_PyInt_From_int((__pyx_v_extra_value[0])); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 63, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_NO_BREAKS_IN_FRAME); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 63, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_6 = PyObject_RichCompare(__pyx_t_2, __pyx_t_1, Py_EQ); __Pyx_XGOTREF(__pyx_t_6); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 63, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_3 = __Pyx_PyObject_IsTrue(__pyx_t_6); if (unlikely(__pyx_t_3 < 0)) __PYX_ERR(0, 63, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + if (__pyx_t_3) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":64 + * extra_value = extra + * if extra_value[0] == NO_BREAKS_IN_FRAME: + * return _PyEval_EvalFrameDefault(frame_obj, exc) # <<<<<<<<<<<<<< + * + * for file in AVOID_RECURSION: + */ + __pyx_r = _PyEval_EvalFrameDefault(__pyx_v_frame_obj, __pyx_v_exc); + goto __pyx_L0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":63 + * if extra is not NULL: + * extra_value = extra + * if extra_value[0] == NO_BREAKS_IN_FRAME: # <<<<<<<<<<<<<< + * return _PyEval_EvalFrameDefault(frame_obj, exc) + * + */ + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":61 + * if thread_index != -1: + * _PyCode_GetExtra( frame.f_code, thread_index, &extra) + * if extra is not NULL: # <<<<<<<<<<<<<< + * extra_value = extra + * if extra_value[0] == NO_BREAKS_IN_FRAME: + */ + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":59 + * except: + * pass + * if thread_index != -1: # <<<<<<<<<<<<<< + * _PyCode_GetExtra( frame.f_code, thread_index, &extra) + * if extra is not NULL: + */ + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":53 + * return _PyEval_EvalFrameDefault(frame_obj, exc) + * + * if is_use_code_extra(): # <<<<<<<<<<<<<< + * extra = PyMem_Malloc(sizeof(int)) + * try: + */ + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":66 + * return _PyEval_EvalFrameDefault(frame_obj, exc) + * + * for file in AVOID_RECURSION: # <<<<<<<<<<<<<< + * # we can't call any other function without this check, because we can get stack overflow + * for path_separator in ('/', '\\'): + */ + __pyx_t_6 = __Pyx_GetModuleGlobalName(__pyx_n_s_AVOID_RECURSION); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 66, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + if (likely(PyList_CheckExact(__pyx_t_6)) || PyTuple_CheckExact(__pyx_t_6)) { + __pyx_t_1 = __pyx_t_6; __Pyx_INCREF(__pyx_t_1); __pyx_t_11 = 0; + __pyx_t_12 = NULL; + } else { + __pyx_t_11 = -1; __pyx_t_1 = PyObject_GetIter(__pyx_t_6); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 66, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_12 = Py_TYPE(__pyx_t_1)->tp_iternext; if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 66, __pyx_L1_error) + } + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + for (;;) { + if (likely(!__pyx_t_12)) { + if (likely(PyList_CheckExact(__pyx_t_1))) { + if (__pyx_t_11 >= PyList_GET_SIZE(__pyx_t_1)) break; + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_6 = PyList_GET_ITEM(__pyx_t_1, __pyx_t_11); __Pyx_INCREF(__pyx_t_6); __pyx_t_11++; if (unlikely(0 < 0)) __PYX_ERR(0, 66, __pyx_L1_error) + #else + __pyx_t_6 = PySequence_ITEM(__pyx_t_1, __pyx_t_11); __pyx_t_11++; if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 66, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + #endif + } else { + if (__pyx_t_11 >= PyTuple_GET_SIZE(__pyx_t_1)) break; + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_6 = PyTuple_GET_ITEM(__pyx_t_1, __pyx_t_11); __Pyx_INCREF(__pyx_t_6); __pyx_t_11++; if (unlikely(0 < 0)) __PYX_ERR(0, 66, __pyx_L1_error) + #else + __pyx_t_6 = PySequence_ITEM(__pyx_t_1, __pyx_t_11); __pyx_t_11++; if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 66, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + #endif + } + } else { + __pyx_t_6 = __pyx_t_12(__pyx_t_1); + if (unlikely(!__pyx_t_6)) { + PyObject* exc_type = PyErr_Occurred(); + if (exc_type) { + if (likely(exc_type == PyExc_StopIteration || PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear(); + else __PYX_ERR(0, 66, __pyx_L1_error) + } + break; + } + __Pyx_GOTREF(__pyx_t_6); + } + __Pyx_XDECREF_SET(__pyx_v_file, __pyx_t_6); + __pyx_t_6 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":68 + * for file in AVOID_RECURSION: + * # we can't call any other function without this check, because we can get stack overflow + * for path_separator in ('/', '\\'): # <<<<<<<<<<<<<< + * if filepath.endswith(path_separator + file): + * skip_file = True + */ + __pyx_t_6 = __pyx_tuple__3; __Pyx_INCREF(__pyx_t_6); __pyx_t_13 = 0; + for (;;) { + if (__pyx_t_13 >= 2) break; + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_2 = PyTuple_GET_ITEM(__pyx_t_6, __pyx_t_13); __Pyx_INCREF(__pyx_t_2); __pyx_t_13++; if (unlikely(0 < 0)) __PYX_ERR(0, 68, __pyx_L1_error) + #else + __pyx_t_2 = PySequence_ITEM(__pyx_t_6, __pyx_t_13); __pyx_t_13++; if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 68, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + #endif + __Pyx_XDECREF_SET(__pyx_v_path_separator, __pyx_t_2); + __pyx_t_2 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":69 + * # we can't call any other function without this check, because we can get stack overflow + * for path_separator in ('/', '\\'): + * if filepath.endswith(path_separator + file): # <<<<<<<<<<<<<< + * skip_file = True + * break + */ + if (unlikely(__pyx_v_filepath == Py_None)) { + PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%s'", "endswith"); + __PYX_ERR(0, 69, __pyx_L1_error) + } + __pyx_t_2 = PyNumber_Add(__pyx_v_path_separator, __pyx_v_file); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 69, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = __Pyx_PyStr_Tailmatch(__pyx_v_filepath, __pyx_t_2, 0, PY_SSIZE_T_MAX, 1); if (unlikely(__pyx_t_3 == -1)) __PYX_ERR(0, 69, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + if ((__pyx_t_3 != 0)) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":70 + * for path_separator in ('/', '\\'): + * if filepath.endswith(path_separator + file): + * skip_file = True # <<<<<<<<<<<<<< + * break + * + */ + __pyx_v_skip_file = 1; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":71 + * if filepath.endswith(path_separator + file): + * skip_file = True + * break # <<<<<<<<<<<<<< + * + * if not skip_file: + */ + goto __pyx_L21_break; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":69 + * # we can't call any other function without this check, because we can get stack overflow + * for path_separator in ('/', '\\'): + * if filepath.endswith(path_separator + file): # <<<<<<<<<<<<<< + * skip_file = True + * break + */ + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":68 + * for file in AVOID_RECURSION: + * # we can't call any other function without this check, because we can get stack overflow + * for path_separator in ('/', '\\'): # <<<<<<<<<<<<<< + * if filepath.endswith(path_separator + file): + * skip_file = True + */ + } + __pyx_L21_break:; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":66 + * return _PyEval_EvalFrameDefault(frame_obj, exc) + * + * for file in AVOID_RECURSION: # <<<<<<<<<<<<<< + * # we can't call any other function without this check, because we can get stack overflow + * for path_separator in ('/', '\\'): + */ + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":73 + * break + * + * if not skip_file: # <<<<<<<<<<<<<< + * try: + * t = threading.currentThread() + */ + __pyx_t_3 = ((!(__pyx_v_skip_file != 0)) != 0); + if (__pyx_t_3) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":74 + * + * if not skip_file: + * try: # <<<<<<<<<<<<<< + * t = threading.currentThread() + * except: + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_9, &__pyx_t_8, &__pyx_t_7); + __Pyx_XGOTREF(__pyx_t_9); + __Pyx_XGOTREF(__pyx_t_8); + __Pyx_XGOTREF(__pyx_t_7); + /*try:*/ { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":75 + * if not skip_file: + * try: + * t = threading.currentThread() # <<<<<<<<<<<<<< + * except: + * skip_file = True + */ + __pyx_t_6 = __Pyx_GetModuleGlobalName(__pyx_n_s_threading); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 75, __pyx_L24_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_6, __pyx_n_s_currentThread); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 75, __pyx_L24_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __pyx_t_6 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_2))) { + __pyx_t_6 = PyMethod_GET_SELF(__pyx_t_2); + if (likely(__pyx_t_6)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2); + __Pyx_INCREF(__pyx_t_6); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_2, function); + } + } + if (__pyx_t_6) { + __pyx_t_1 = __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_t_6); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 75, __pyx_L24_error) + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + } else { + __pyx_t_1 = __Pyx_PyObject_CallNoArg(__pyx_t_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 75, __pyx_L24_error) + } + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_v_t = __pyx_t_1; + __pyx_t_1 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":74 + * + * if not skip_file: + * try: # <<<<<<<<<<<<<< + * t = threading.currentThread() + * except: + */ + } + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + goto __pyx_L31_try_end; + __pyx_L24_error:; + __Pyx_PyThreadState_assign + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":76 + * try: + * t = threading.currentThread() + * except: # <<<<<<<<<<<<<< + * skip_file = True + * + */ + /*except:*/ { + __Pyx_AddTraceback("_pydevd_frame_eval.pydevd_frame_evaluator.get_bytecode_while_frame_eval", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_1, &__pyx_t_2, &__pyx_t_6) < 0) __PYX_ERR(0, 76, __pyx_L26_except_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_GOTREF(__pyx_t_2); + __Pyx_GOTREF(__pyx_t_6); + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":77 + * t = threading.currentThread() + * except: + * skip_file = True # <<<<<<<<<<<<<< + * + * if not skip_file: + */ + __pyx_v_skip_file = 1; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + goto __pyx_L25_exception_handled; + } + __pyx_L26_except_error:; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":74 + * + * if not skip_file: + * try: # <<<<<<<<<<<<<< + * t = threading.currentThread() + * except: + */ + __Pyx_PyThreadState_assign + __Pyx_XGIVEREF(__pyx_t_9); + __Pyx_XGIVEREF(__pyx_t_8); + __Pyx_XGIVEREF(__pyx_t_7); + __Pyx_ExceptionReset(__pyx_t_9, __pyx_t_8, __pyx_t_7); + goto __pyx_L1_error; + __pyx_L25_exception_handled:; + __Pyx_PyThreadState_assign + __Pyx_XGIVEREF(__pyx_t_9); + __Pyx_XGIVEREF(__pyx_t_8); + __Pyx_XGIVEREF(__pyx_t_7); + __Pyx_ExceptionReset(__pyx_t_9, __pyx_t_8, __pyx_t_7); + __pyx_L31_try_end:; + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":73 + * break + * + * if not skip_file: # <<<<<<<<<<<<<< + * try: + * t = threading.currentThread() + */ + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":79 + * skip_file = True + * + * if not skip_file: # <<<<<<<<<<<<<< + * try: + * additional_info = t.additional_info + */ + __pyx_t_3 = ((!(__pyx_v_skip_file != 0)) != 0); + if (__pyx_t_3) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":80 + * + * if not skip_file: + * try: # <<<<<<<<<<<<<< + * additional_info = t.additional_info + * if additional_info is None: + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_7, &__pyx_t_8, &__pyx_t_9); + __Pyx_XGOTREF(__pyx_t_7); + __Pyx_XGOTREF(__pyx_t_8); + __Pyx_XGOTREF(__pyx_t_9); + /*try:*/ { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":81 + * if not skip_file: + * try: + * additional_info = t.additional_info # <<<<<<<<<<<<<< + * if additional_info is None: + * raise AttributeError() + */ + if (unlikely(!__pyx_v_t)) { __Pyx_RaiseUnboundLocalError("t"); __PYX_ERR(0, 81, __pyx_L35_error) } + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_v_t, __pyx_n_s_additional_info); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 81, __pyx_L35_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_v_additional_info = __pyx_t_6; + __pyx_t_6 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":82 + * try: + * additional_info = t.additional_info + * if additional_info is None: # <<<<<<<<<<<<<< + * raise AttributeError() + * except: + */ + __pyx_t_3 = (__pyx_v_additional_info == Py_None); + __pyx_t_4 = (__pyx_t_3 != 0); + if (__pyx_t_4) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":83 + * additional_info = t.additional_info + * if additional_info is None: + * raise AttributeError() # <<<<<<<<<<<<<< + * except: + * additional_info = t.additional_info = PyDBAdditionalThreadInfo() + */ + __pyx_t_6 = __Pyx_PyObject_CallNoArg(__pyx_builtin_AttributeError); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 83, __pyx_L35_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_Raise(__pyx_t_6, 0, 0, 0); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __PYX_ERR(0, 83, __pyx_L35_error) + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":82 + * try: + * additional_info = t.additional_info + * if additional_info is None: # <<<<<<<<<<<<<< + * raise AttributeError() + * except: + */ + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":80 + * + * if not skip_file: + * try: # <<<<<<<<<<<<<< + * additional_info = t.additional_info + * if additional_info is None: + */ + } + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + goto __pyx_L42_try_end; + __pyx_L35_error:; + __Pyx_PyThreadState_assign + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":84 + * if additional_info is None: + * raise AttributeError() + * except: # <<<<<<<<<<<<<< + * additional_info = t.additional_info = PyDBAdditionalThreadInfo() + * # request `co_extra` inside every new thread + */ + /*except:*/ { + __Pyx_AddTraceback("_pydevd_frame_eval.pydevd_frame_evaluator.get_bytecode_while_frame_eval", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_6, &__pyx_t_2, &__pyx_t_1) < 0) __PYX_ERR(0, 84, __pyx_L37_except_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_GOTREF(__pyx_t_2); + __Pyx_GOTREF(__pyx_t_1); + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":85 + * raise AttributeError() + * except: + * additional_info = t.additional_info = PyDBAdditionalThreadInfo() # <<<<<<<<<<<<<< + * # request `co_extra` inside every new thread + * thread_index = _PyEval_RequestCodeExtraIndex(PyMem_Free) + */ + __pyx_t_15 = __Pyx_GetModuleGlobalName(__pyx_n_s_PyDBAdditionalThreadInfo); if (unlikely(!__pyx_t_15)) __PYX_ERR(0, 85, __pyx_L37_except_error) + __Pyx_GOTREF(__pyx_t_15); + __pyx_t_16 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_15))) { + __pyx_t_16 = PyMethod_GET_SELF(__pyx_t_15); + if (likely(__pyx_t_16)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_15); + __Pyx_INCREF(__pyx_t_16); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_15, function); + } + } + if (__pyx_t_16) { + __pyx_t_14 = __Pyx_PyObject_CallOneArg(__pyx_t_15, __pyx_t_16); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 85, __pyx_L37_except_error) + __Pyx_DECREF(__pyx_t_16); __pyx_t_16 = 0; + } else { + __pyx_t_14 = __Pyx_PyObject_CallNoArg(__pyx_t_15); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 85, __pyx_L37_except_error) + } + __Pyx_GOTREF(__pyx_t_14); + __Pyx_DECREF(__pyx_t_15); __pyx_t_15 = 0; + __Pyx_INCREF(__pyx_t_14); + __Pyx_XDECREF_SET(__pyx_v_additional_info, __pyx_t_14); + if (unlikely(!__pyx_v_t)) { __Pyx_RaiseUnboundLocalError("t"); __PYX_ERR(0, 85, __pyx_L37_except_error) } + if (__Pyx_PyObject_SetAttrStr(__pyx_v_t, __pyx_n_s_additional_info, __pyx_t_14) < 0) __PYX_ERR(0, 85, __pyx_L37_except_error) + __Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":87 + * additional_info = t.additional_info = PyDBAdditionalThreadInfo() + * # request `co_extra` inside every new thread + * thread_index = _PyEval_RequestCodeExtraIndex(PyMem_Free) # <<<<<<<<<<<<<< + * UseCodeExtraHolder.local.index = thread_index + * + */ + __pyx_v_thread_index = _PyEval_RequestCodeExtraIndex(PyMem_Free); + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":88 + * # request `co_extra` inside every new thread + * thread_index = _PyEval_RequestCodeExtraIndex(PyMem_Free) + * UseCodeExtraHolder.local.index = thread_index # <<<<<<<<<<<<<< + * + * if additional_info.is_tracing or getattr(t, 'pydev_do_not_trace', None): + */ + __pyx_t_14 = __Pyx_PyInt_From_int(__pyx_v_thread_index); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 88, __pyx_L37_except_error) + __Pyx_GOTREF(__pyx_t_14); + __pyx_t_15 = __Pyx_GetModuleGlobalName(__pyx_n_s_UseCodeExtraHolder); if (unlikely(!__pyx_t_15)) __PYX_ERR(0, 88, __pyx_L37_except_error) + __Pyx_GOTREF(__pyx_t_15); + __pyx_t_16 = __Pyx_PyObject_GetAttrStr(__pyx_t_15, __pyx_n_s_local); if (unlikely(!__pyx_t_16)) __PYX_ERR(0, 88, __pyx_L37_except_error) + __Pyx_GOTREF(__pyx_t_16); + __Pyx_DECREF(__pyx_t_15); __pyx_t_15 = 0; + if (__Pyx_PyObject_SetAttrStr(__pyx_t_16, __pyx_n_s_index, __pyx_t_14) < 0) __PYX_ERR(0, 88, __pyx_L37_except_error) + __Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0; + __Pyx_DECREF(__pyx_t_16); __pyx_t_16 = 0; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + goto __pyx_L36_exception_handled; + } + __pyx_L37_except_error:; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":80 + * + * if not skip_file: + * try: # <<<<<<<<<<<<<< + * additional_info = t.additional_info + * if additional_info is None: + */ + __Pyx_PyThreadState_assign + __Pyx_XGIVEREF(__pyx_t_7); + __Pyx_XGIVEREF(__pyx_t_8); + __Pyx_XGIVEREF(__pyx_t_9); + __Pyx_ExceptionReset(__pyx_t_7, __pyx_t_8, __pyx_t_9); + goto __pyx_L1_error; + __pyx_L36_exception_handled:; + __Pyx_PyThreadState_assign + __Pyx_XGIVEREF(__pyx_t_7); + __Pyx_XGIVEREF(__pyx_t_8); + __Pyx_XGIVEREF(__pyx_t_9); + __Pyx_ExceptionReset(__pyx_t_7, __pyx_t_8, __pyx_t_9); + __pyx_L42_try_end:; + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":90 + * UseCodeExtraHolder.local.index = thread_index + * + * if additional_info.is_tracing or getattr(t, 'pydev_do_not_trace', None): # <<<<<<<<<<<<<< + * return _PyEval_EvalFrameDefault(frame_obj, exc) + * + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_additional_info, __pyx_n_s_is_tracing); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 90, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_3 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_3 < 0)) __PYX_ERR(0, 90, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + if (!__pyx_t_3) { + } else { + __pyx_t_4 = __pyx_t_3; + goto __pyx_L47_bool_binop_done; + } + if (unlikely(!__pyx_v_t)) { __Pyx_RaiseUnboundLocalError("t"); __PYX_ERR(0, 90, __pyx_L1_error) } + __pyx_t_1 = __Pyx_GetAttr3(__pyx_v_t, __pyx_n_s_pydev_do_not_trace, Py_None); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 90, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_3 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_3 < 0)) __PYX_ERR(0, 90, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_4 = __pyx_t_3; + __pyx_L47_bool_binop_done:; + if (__pyx_t_4) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":91 + * + * if additional_info.is_tracing or getattr(t, 'pydev_do_not_trace', None): + * return _PyEval_EvalFrameDefault(frame_obj, exc) # <<<<<<<<<<<<<< + * + * additional_info.is_tracing = True + */ + __pyx_r = _PyEval_EvalFrameDefault(__pyx_v_frame_obj, __pyx_v_exc); + goto __pyx_L0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":90 + * UseCodeExtraHolder.local.index = thread_index + * + * if additional_info.is_tracing or getattr(t, 'pydev_do_not_trace', None): # <<<<<<<<<<<<<< + * return _PyEval_EvalFrameDefault(frame_obj, exc) + * + */ + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":93 + * return _PyEval_EvalFrameDefault(frame_obj, exc) + * + * additional_info.is_tracing = True # <<<<<<<<<<<<<< + * try: + * abs_path_real_path_and_base = NORM_PATHS_AND_BASE_CONTAINER[frame.f_code.co_filename] + */ + if (__Pyx_PyObject_SetAttrStr(__pyx_v_additional_info, __pyx_n_s_is_tracing, Py_True) < 0) __PYX_ERR(0, 93, __pyx_L1_error) + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":94 + * + * additional_info.is_tracing = True + * try: # <<<<<<<<<<<<<< + * abs_path_real_path_and_base = NORM_PATHS_AND_BASE_CONTAINER[frame.f_code.co_filename] + * except: + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_9, &__pyx_t_8, &__pyx_t_7); + __Pyx_XGOTREF(__pyx_t_9); + __Pyx_XGOTREF(__pyx_t_8); + __Pyx_XGOTREF(__pyx_t_7); + /*try:*/ { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":95 + * additional_info.is_tracing = True + * try: + * abs_path_real_path_and_base = NORM_PATHS_AND_BASE_CONTAINER[frame.f_code.co_filename] # <<<<<<<<<<<<<< + * except: + * abs_path_real_path_and_base = get_abs_path_real_path_and_base_from_frame(frame) + */ + __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_NORM_PATHS_AND_BASE_CONTAINER); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 95, __pyx_L49_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_code); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 95, __pyx_L49_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_co_filename); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 95, __pyx_L49_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = PyObject_GetItem(__pyx_t_1, __pyx_t_6); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 95, __pyx_L49_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __pyx_v_abs_path_real_path_and_base = __pyx_t_2; + __pyx_t_2 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":94 + * + * additional_info.is_tracing = True + * try: # <<<<<<<<<<<<<< + * abs_path_real_path_and_base = NORM_PATHS_AND_BASE_CONTAINER[frame.f_code.co_filename] + * except: + */ + } + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + goto __pyx_L56_try_end; + __pyx_L49_error:; + __Pyx_PyThreadState_assign + __Pyx_XDECREF(__pyx_t_15); __pyx_t_15 = 0; + __Pyx_XDECREF(__pyx_t_14); __pyx_t_14 = 0; + __Pyx_XDECREF(__pyx_t_16); __pyx_t_16 = 0; + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":96 + * try: + * abs_path_real_path_and_base = NORM_PATHS_AND_BASE_CONTAINER[frame.f_code.co_filename] + * except: # <<<<<<<<<<<<<< + * abs_path_real_path_and_base = get_abs_path_real_path_and_base_from_frame(frame) + * + */ + /*except:*/ { + __Pyx_AddTraceback("_pydevd_frame_eval.pydevd_frame_evaluator.get_bytecode_while_frame_eval", __pyx_clineno, __pyx_lineno, __pyx_filename); + if (__Pyx_GetException(&__pyx_t_2, &__pyx_t_6, &__pyx_t_1) < 0) __PYX_ERR(0, 96, __pyx_L51_except_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_GOTREF(__pyx_t_6); + __Pyx_GOTREF(__pyx_t_1); + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":97 + * abs_path_real_path_and_base = NORM_PATHS_AND_BASE_CONTAINER[frame.f_code.co_filename] + * except: + * abs_path_real_path_and_base = get_abs_path_real_path_and_base_from_frame(frame) # <<<<<<<<<<<<<< + * + * file_type = get_file_type(abs_path_real_path_and_base[-1]) #we don't want to debug anything related to pydevd + */ + __pyx_t_14 = __Pyx_GetModuleGlobalName(__pyx_n_s_get_abs_path_real_path_and_base); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 97, __pyx_L51_except_error) + __Pyx_GOTREF(__pyx_t_14); + __pyx_t_15 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_14))) { + __pyx_t_15 = PyMethod_GET_SELF(__pyx_t_14); + if (likely(__pyx_t_15)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_14); + __Pyx_INCREF(__pyx_t_15); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_14, function); + } + } + if (!__pyx_t_15) { + __pyx_t_16 = __Pyx_PyObject_CallOneArg(__pyx_t_14, __pyx_v_frame); if (unlikely(!__pyx_t_16)) __PYX_ERR(0, 97, __pyx_L51_except_error) + __Pyx_GOTREF(__pyx_t_16); + } else { + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_14)) { + PyObject *__pyx_temp[2] = {__pyx_t_15, __pyx_v_frame}; + __pyx_t_16 = __Pyx_PyFunction_FastCall(__pyx_t_14, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_16)) __PYX_ERR(0, 97, __pyx_L51_except_error) + __Pyx_XDECREF(__pyx_t_15); __pyx_t_15 = 0; + __Pyx_GOTREF(__pyx_t_16); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_14)) { + PyObject *__pyx_temp[2] = {__pyx_t_15, __pyx_v_frame}; + __pyx_t_16 = __Pyx_PyCFunction_FastCall(__pyx_t_14, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_16)) __PYX_ERR(0, 97, __pyx_L51_except_error) + __Pyx_XDECREF(__pyx_t_15); __pyx_t_15 = 0; + __Pyx_GOTREF(__pyx_t_16); + } else + #endif + { + __pyx_t_17 = PyTuple_New(1+1); if (unlikely(!__pyx_t_17)) __PYX_ERR(0, 97, __pyx_L51_except_error) + __Pyx_GOTREF(__pyx_t_17); + __Pyx_GIVEREF(__pyx_t_15); PyTuple_SET_ITEM(__pyx_t_17, 0, __pyx_t_15); __pyx_t_15 = NULL; + __Pyx_INCREF(__pyx_v_frame); + __Pyx_GIVEREF(__pyx_v_frame); + PyTuple_SET_ITEM(__pyx_t_17, 0+1, __pyx_v_frame); + __pyx_t_16 = __Pyx_PyObject_Call(__pyx_t_14, __pyx_t_17, NULL); if (unlikely(!__pyx_t_16)) __PYX_ERR(0, 97, __pyx_L51_except_error) + __Pyx_GOTREF(__pyx_t_16); + __Pyx_DECREF(__pyx_t_17); __pyx_t_17 = 0; + } + } + __Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0; + __Pyx_XDECREF_SET(__pyx_v_abs_path_real_path_and_base, __pyx_t_16); + __pyx_t_16 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + goto __pyx_L50_exception_handled; + } + __pyx_L51_except_error:; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":94 + * + * additional_info.is_tracing = True + * try: # <<<<<<<<<<<<<< + * abs_path_real_path_and_base = NORM_PATHS_AND_BASE_CONTAINER[frame.f_code.co_filename] + * except: + */ + __Pyx_PyThreadState_assign + __Pyx_XGIVEREF(__pyx_t_9); + __Pyx_XGIVEREF(__pyx_t_8); + __Pyx_XGIVEREF(__pyx_t_7); + __Pyx_ExceptionReset(__pyx_t_9, __pyx_t_8, __pyx_t_7); + goto __pyx_L1_error; + __pyx_L50_exception_handled:; + __Pyx_PyThreadState_assign + __Pyx_XGIVEREF(__pyx_t_9); + __Pyx_XGIVEREF(__pyx_t_8); + __Pyx_XGIVEREF(__pyx_t_7); + __Pyx_ExceptionReset(__pyx_t_9, __pyx_t_8, __pyx_t_7); + __pyx_L56_try_end:; + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":99 + * abs_path_real_path_and_base = get_abs_path_real_path_and_base_from_frame(frame) + * + * file_type = get_file_type(abs_path_real_path_and_base[-1]) #we don't want to debug anything related to pydevd # <<<<<<<<<<<<<< + * if file_type is not None: + * additional_info.is_tracing = False + */ + __pyx_t_6 = __Pyx_GetModuleGlobalName(__pyx_n_s_get_file_type); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 99, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_2 = __Pyx_GetItemInt(__pyx_v_abs_path_real_path_and_base, -1L, long, 1, __Pyx_PyInt_From_long, 0, 1, 1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 99, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_16 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_6))) { + __pyx_t_16 = PyMethod_GET_SELF(__pyx_t_6); + if (likely(__pyx_t_16)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_6); + __Pyx_INCREF(__pyx_t_16); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_6, function); + } + } + if (!__pyx_t_16) { + __pyx_t_1 = __Pyx_PyObject_CallOneArg(__pyx_t_6, __pyx_t_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 99, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_GOTREF(__pyx_t_1); + } else { + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_6)) { + PyObject *__pyx_temp[2] = {__pyx_t_16, __pyx_t_2}; + __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_6, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 99, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_16); __pyx_t_16 = 0; + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_6)) { + PyObject *__pyx_temp[2] = {__pyx_t_16, __pyx_t_2}; + __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_6, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 99, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_16); __pyx_t_16 = 0; + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + } else + #endif + { + __pyx_t_14 = PyTuple_New(1+1); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 99, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_14); + __Pyx_GIVEREF(__pyx_t_16); PyTuple_SET_ITEM(__pyx_t_14, 0, __pyx_t_16); __pyx_t_16 = NULL; + __Pyx_GIVEREF(__pyx_t_2); + PyTuple_SET_ITEM(__pyx_t_14, 0+1, __pyx_t_2); + __pyx_t_2 = 0; + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_6, __pyx_t_14, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 99, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0; + } + } + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __pyx_v_file_type = __pyx_t_1; + __pyx_t_1 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":100 + * + * file_type = get_file_type(abs_path_real_path_and_base[-1]) #we don't want to debug anything related to pydevd + * if file_type is not None: # <<<<<<<<<<<<<< + * additional_info.is_tracing = False + * return _PyEval_EvalFrameDefault(frame_obj, exc) + */ + __pyx_t_4 = (__pyx_v_file_type != Py_None); + __pyx_t_3 = (__pyx_t_4 != 0); + if (__pyx_t_3) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":101 + * file_type = get_file_type(abs_path_real_path_and_base[-1]) #we don't want to debug anything related to pydevd + * if file_type is not None: + * additional_info.is_tracing = False # <<<<<<<<<<<<<< + * return _PyEval_EvalFrameDefault(frame_obj, exc) + * + */ + if (__Pyx_PyObject_SetAttrStr(__pyx_v_additional_info, __pyx_n_s_is_tracing, Py_False) < 0) __PYX_ERR(0, 101, __pyx_L1_error) + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":102 + * if file_type is not None: + * additional_info.is_tracing = False + * return _PyEval_EvalFrameDefault(frame_obj, exc) # <<<<<<<<<<<<<< + * + * was_break = False + */ + __pyx_r = _PyEval_EvalFrameDefault(__pyx_v_frame_obj, __pyx_v_exc); + goto __pyx_L0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":100 + * + * file_type = get_file_type(abs_path_real_path_and_base[-1]) #we don't want to debug anything related to pydevd + * if file_type is not None: # <<<<<<<<<<<<<< + * additional_info.is_tracing = False + * return _PyEval_EvalFrameDefault(frame_obj, exc) + */ + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":104 + * return _PyEval_EvalFrameDefault(frame_obj, exc) + * + * was_break = False # <<<<<<<<<<<<<< + * main_debugger = get_global_debugger() + * breakpoints = main_debugger.breakpoints.get(abs_path_real_path_and_base[1]) + */ + __pyx_v_was_break = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":105 + * + * was_break = False + * main_debugger = get_global_debugger() # <<<<<<<<<<<<<< + * breakpoints = main_debugger.breakpoints.get(abs_path_real_path_and_base[1]) + * code_object = frame.f_code + */ + __pyx_t_6 = __Pyx_GetModuleGlobalName(__pyx_n_s_get_global_debugger); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 105, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_14 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_6))) { + __pyx_t_14 = PyMethod_GET_SELF(__pyx_t_6); + if (likely(__pyx_t_14)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_6); + __Pyx_INCREF(__pyx_t_14); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_6, function); + } + } + if (__pyx_t_14) { + __pyx_t_1 = __Pyx_PyObject_CallOneArg(__pyx_t_6, __pyx_t_14); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 105, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0; + } else { + __pyx_t_1 = __Pyx_PyObject_CallNoArg(__pyx_t_6); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 105, __pyx_L1_error) + } + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __pyx_v_main_debugger = __pyx_t_1; + __pyx_t_1 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":106 + * was_break = False + * main_debugger = get_global_debugger() + * breakpoints = main_debugger.breakpoints.get(abs_path_real_path_and_base[1]) # <<<<<<<<<<<<<< + * code_object = frame.f_code + * if breakpoints: + */ + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_breakpoints); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 106, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_14 = __Pyx_PyObject_GetAttrStr(__pyx_t_6, __pyx_n_s_get); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 106, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_14); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __pyx_t_6 = __Pyx_GetItemInt(__pyx_v_abs_path_real_path_and_base, 1, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 106, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_2 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_14))) { + __pyx_t_2 = PyMethod_GET_SELF(__pyx_t_14); + if (likely(__pyx_t_2)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_14); + __Pyx_INCREF(__pyx_t_2); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_14, function); + } + } + if (!__pyx_t_2) { + __pyx_t_1 = __Pyx_PyObject_CallOneArg(__pyx_t_14, __pyx_t_6); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 106, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_GOTREF(__pyx_t_1); + } else { + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_14)) { + PyObject *__pyx_temp[2] = {__pyx_t_2, __pyx_t_6}; + __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_14, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 106, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_14)) { + PyObject *__pyx_temp[2] = {__pyx_t_2, __pyx_t_6}; + __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_14, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 106, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + } else + #endif + { + __pyx_t_16 = PyTuple_New(1+1); if (unlikely(!__pyx_t_16)) __PYX_ERR(0, 106, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_16); + __Pyx_GIVEREF(__pyx_t_2); PyTuple_SET_ITEM(__pyx_t_16, 0, __pyx_t_2); __pyx_t_2 = NULL; + __Pyx_GIVEREF(__pyx_t_6); + PyTuple_SET_ITEM(__pyx_t_16, 0+1, __pyx_t_6); + __pyx_t_6 = 0; + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_14, __pyx_t_16, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 106, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_16); __pyx_t_16 = 0; + } + } + __Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0; + __pyx_v_breakpoints = __pyx_t_1; + __pyx_t_1 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":107 + * main_debugger = get_global_debugger() + * breakpoints = main_debugger.breakpoints.get(abs_path_real_path_and_base[1]) + * code_object = frame.f_code # <<<<<<<<<<<<<< + * if breakpoints: + * breakpoints_to_update = [] + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_code); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 107, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v_code_object = __pyx_t_1; + __pyx_t_1 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":108 + * breakpoints = main_debugger.breakpoints.get(abs_path_real_path_and_base[1]) + * code_object = frame.f_code + * if breakpoints: # <<<<<<<<<<<<<< + * breakpoints_to_update = [] + * for offset, line in dis.findlinestarts(code_object): + */ + __pyx_t_3 = __Pyx_PyObject_IsTrue(__pyx_v_breakpoints); if (unlikely(__pyx_t_3 < 0)) __PYX_ERR(0, 108, __pyx_L1_error) + if (__pyx_t_3) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":109 + * code_object = frame.f_code + * if breakpoints: + * breakpoints_to_update = [] # <<<<<<<<<<<<<< + * for offset, line in dis.findlinestarts(code_object): + * if line in breakpoints: + */ + __pyx_t_1 = PyList_New(0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 109, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_v_breakpoints_to_update = ((PyObject*)__pyx_t_1); + __pyx_t_1 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":110 + * if breakpoints: + * breakpoints_to_update = [] + * for offset, line in dis.findlinestarts(code_object): # <<<<<<<<<<<<<< + * if line in breakpoints: + * breakpoint = breakpoints[line] + */ + __pyx_t_14 = __Pyx_GetModuleGlobalName(__pyx_n_s_dis); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 110, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_14); + __pyx_t_16 = __Pyx_PyObject_GetAttrStr(__pyx_t_14, __pyx_n_s_findlinestarts); if (unlikely(!__pyx_t_16)) __PYX_ERR(0, 110, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_16); + __Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0; + __pyx_t_14 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_16))) { + __pyx_t_14 = PyMethod_GET_SELF(__pyx_t_16); + if (likely(__pyx_t_14)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_16); + __Pyx_INCREF(__pyx_t_14); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_16, function); + } + } + if (!__pyx_t_14) { + __pyx_t_1 = __Pyx_PyObject_CallOneArg(__pyx_t_16, __pyx_v_code_object); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 110, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + } else { + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_16)) { + PyObject *__pyx_temp[2] = {__pyx_t_14, __pyx_v_code_object}; + __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_16, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 110, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_14); __pyx_t_14 = 0; + __Pyx_GOTREF(__pyx_t_1); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_16)) { + PyObject *__pyx_temp[2] = {__pyx_t_14, __pyx_v_code_object}; + __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_16, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 110, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_14); __pyx_t_14 = 0; + __Pyx_GOTREF(__pyx_t_1); + } else + #endif + { + __pyx_t_6 = PyTuple_New(1+1); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 110, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_GIVEREF(__pyx_t_14); PyTuple_SET_ITEM(__pyx_t_6, 0, __pyx_t_14); __pyx_t_14 = NULL; + __Pyx_INCREF(__pyx_v_code_object); + __Pyx_GIVEREF(__pyx_v_code_object); + PyTuple_SET_ITEM(__pyx_t_6, 0+1, __pyx_v_code_object); + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_16, __pyx_t_6, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 110, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + } + } + __Pyx_DECREF(__pyx_t_16); __pyx_t_16 = 0; + if (likely(PyList_CheckExact(__pyx_t_1)) || PyTuple_CheckExact(__pyx_t_1)) { + __pyx_t_16 = __pyx_t_1; __Pyx_INCREF(__pyx_t_16); __pyx_t_11 = 0; + __pyx_t_12 = NULL; + } else { + __pyx_t_11 = -1; __pyx_t_16 = PyObject_GetIter(__pyx_t_1); if (unlikely(!__pyx_t_16)) __PYX_ERR(0, 110, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_16); + __pyx_t_12 = Py_TYPE(__pyx_t_16)->tp_iternext; if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 110, __pyx_L1_error) + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + for (;;) { + if (likely(!__pyx_t_12)) { + if (likely(PyList_CheckExact(__pyx_t_16))) { + if (__pyx_t_11 >= PyList_GET_SIZE(__pyx_t_16)) break; + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_1 = PyList_GET_ITEM(__pyx_t_16, __pyx_t_11); __Pyx_INCREF(__pyx_t_1); __pyx_t_11++; if (unlikely(0 < 0)) __PYX_ERR(0, 110, __pyx_L1_error) + #else + __pyx_t_1 = PySequence_ITEM(__pyx_t_16, __pyx_t_11); __pyx_t_11++; if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 110, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + #endif + } else { + if (__pyx_t_11 >= PyTuple_GET_SIZE(__pyx_t_16)) break; + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_1 = PyTuple_GET_ITEM(__pyx_t_16, __pyx_t_11); __Pyx_INCREF(__pyx_t_1); __pyx_t_11++; if (unlikely(0 < 0)) __PYX_ERR(0, 110, __pyx_L1_error) + #else + __pyx_t_1 = PySequence_ITEM(__pyx_t_16, __pyx_t_11); __pyx_t_11++; if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 110, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + #endif + } + } else { + __pyx_t_1 = __pyx_t_12(__pyx_t_16); + if (unlikely(!__pyx_t_1)) { + PyObject* exc_type = PyErr_Occurred(); + if (exc_type) { + if (likely(exc_type == PyExc_StopIteration || PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear(); + else __PYX_ERR(0, 110, __pyx_L1_error) + } + break; + } + __Pyx_GOTREF(__pyx_t_1); + } + if ((likely(PyTuple_CheckExact(__pyx_t_1))) || (PyList_CheckExact(__pyx_t_1))) { + PyObject* sequence = __pyx_t_1; + #if !CYTHON_COMPILING_IN_PYPY + Py_ssize_t size = Py_SIZE(sequence); + #else + Py_ssize_t size = PySequence_Size(sequence); + #endif + if (unlikely(size != 2)) { + if (size > 2) __Pyx_RaiseTooManyValuesError(2); + else if (size >= 0) __Pyx_RaiseNeedMoreValuesError(size); + __PYX_ERR(0, 110, __pyx_L1_error) + } + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + if (likely(PyTuple_CheckExact(sequence))) { + __pyx_t_6 = PyTuple_GET_ITEM(sequence, 0); + __pyx_t_14 = PyTuple_GET_ITEM(sequence, 1); + } else { + __pyx_t_6 = PyList_GET_ITEM(sequence, 0); + __pyx_t_14 = PyList_GET_ITEM(sequence, 1); + } + __Pyx_INCREF(__pyx_t_6); + __Pyx_INCREF(__pyx_t_14); + #else + __pyx_t_6 = PySequence_ITEM(sequence, 0); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 110, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_14 = PySequence_ITEM(sequence, 1); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 110, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_14); + #endif + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + } else { + Py_ssize_t index = -1; + __pyx_t_2 = PyObject_GetIter(__pyx_t_1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 110, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_18 = Py_TYPE(__pyx_t_2)->tp_iternext; + index = 0; __pyx_t_6 = __pyx_t_18(__pyx_t_2); if (unlikely(!__pyx_t_6)) goto __pyx_L63_unpacking_failed; + __Pyx_GOTREF(__pyx_t_6); + index = 1; __pyx_t_14 = __pyx_t_18(__pyx_t_2); if (unlikely(!__pyx_t_14)) goto __pyx_L63_unpacking_failed; + __Pyx_GOTREF(__pyx_t_14); + if (__Pyx_IternextUnpackEndCheck(__pyx_t_18(__pyx_t_2), 2) < 0) __PYX_ERR(0, 110, __pyx_L1_error) + __pyx_t_18 = NULL; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + goto __pyx_L64_unpacking_done; + __pyx_L63_unpacking_failed:; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_18 = NULL; + if (__Pyx_IterFinish() == 0) __Pyx_RaiseNeedMoreValuesError(index); + __PYX_ERR(0, 110, __pyx_L1_error) + __pyx_L64_unpacking_done:; + } + __Pyx_XDECREF_SET(__pyx_v_offset, __pyx_t_6); + __pyx_t_6 = 0; + __Pyx_XDECREF_SET(__pyx_v_line, __pyx_t_14); + __pyx_t_14 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":111 + * breakpoints_to_update = [] + * for offset, line in dis.findlinestarts(code_object): + * if line in breakpoints: # <<<<<<<<<<<<<< + * breakpoint = breakpoints[line] + * if code_object not in breakpoint.code_objects: + */ + __pyx_t_3 = (__Pyx_PySequence_ContainsTF(__pyx_v_line, __pyx_v_breakpoints, Py_EQ)); if (unlikely(__pyx_t_3 < 0)) __PYX_ERR(0, 111, __pyx_L1_error) + __pyx_t_4 = (__pyx_t_3 != 0); + if (__pyx_t_4) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":112 + * for offset, line in dis.findlinestarts(code_object): + * if line in breakpoints: + * breakpoint = breakpoints[line] # <<<<<<<<<<<<<< + * if code_object not in breakpoint.code_objects: + * # This check is needed for generator functions, because after each yield the new frame is created + */ + __pyx_t_1 = PyObject_GetItem(__pyx_v_breakpoints, __pyx_v_line); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 112, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_XDECREF_SET(__pyx_v_breakpoint, __pyx_t_1); + __pyx_t_1 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":113 + * if line in breakpoints: + * breakpoint = breakpoints[line] + * if code_object not in breakpoint.code_objects: # <<<<<<<<<<<<<< + * # This check is needed for generator functions, because after each yield the new frame is created + * # but the former code object is used + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_breakpoint, __pyx_n_s_code_objects); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 113, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_4 = (__Pyx_PySequence_ContainsTF(__pyx_v_code_object, __pyx_t_1, Py_NE)); if (unlikely(__pyx_t_4 < 0)) __PYX_ERR(0, 113, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_3 = (__pyx_t_4 != 0); + if (__pyx_t_3) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":116 + * # This check is needed for generator functions, because after each yield the new frame is created + * # but the former code object is used + * success, new_code = insert_code(frame.f_code, pydev_trace_code_wrapper.__code__, line) # <<<<<<<<<<<<<< + * if success: + * breakpoints_to_update.append(breakpoint) + */ + __pyx_t_14 = __Pyx_GetModuleGlobalName(__pyx_n_s_insert_code); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 116, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_14); + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_code); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 116, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_2 = __Pyx_GetModuleGlobalName(__pyx_n_s_pydev_trace_code_wrapper); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 116, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_17 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_code); if (unlikely(!__pyx_t_17)) __PYX_ERR(0, 116, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_17); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = NULL; + __pyx_t_10 = 0; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_14))) { + __pyx_t_2 = PyMethod_GET_SELF(__pyx_t_14); + if (likely(__pyx_t_2)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_14); + __Pyx_INCREF(__pyx_t_2); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_14, function); + __pyx_t_10 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_14)) { + PyObject *__pyx_temp[4] = {__pyx_t_2, __pyx_t_6, __pyx_t_17, __pyx_v_line}; + __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_14, __pyx_temp+1-__pyx_t_10, 3+__pyx_t_10); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 116, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_DECREF(__pyx_t_17); __pyx_t_17 = 0; + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_14)) { + PyObject *__pyx_temp[4] = {__pyx_t_2, __pyx_t_6, __pyx_t_17, __pyx_v_line}; + __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_14, __pyx_temp+1-__pyx_t_10, 3+__pyx_t_10); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 116, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_DECREF(__pyx_t_17); __pyx_t_17 = 0; + } else + #endif + { + __pyx_t_15 = PyTuple_New(3+__pyx_t_10); if (unlikely(!__pyx_t_15)) __PYX_ERR(0, 116, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_15); + if (__pyx_t_2) { + __Pyx_GIVEREF(__pyx_t_2); PyTuple_SET_ITEM(__pyx_t_15, 0, __pyx_t_2); __pyx_t_2 = NULL; + } + __Pyx_GIVEREF(__pyx_t_6); + PyTuple_SET_ITEM(__pyx_t_15, 0+__pyx_t_10, __pyx_t_6); + __Pyx_GIVEREF(__pyx_t_17); + PyTuple_SET_ITEM(__pyx_t_15, 1+__pyx_t_10, __pyx_t_17); + __Pyx_INCREF(__pyx_v_line); + __Pyx_GIVEREF(__pyx_v_line); + PyTuple_SET_ITEM(__pyx_t_15, 2+__pyx_t_10, __pyx_v_line); + __pyx_t_6 = 0; + __pyx_t_17 = 0; + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_14, __pyx_t_15, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 116, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_15); __pyx_t_15 = 0; + } + __Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0; + if ((likely(PyTuple_CheckExact(__pyx_t_1))) || (PyList_CheckExact(__pyx_t_1))) { + PyObject* sequence = __pyx_t_1; + #if !CYTHON_COMPILING_IN_PYPY + Py_ssize_t size = Py_SIZE(sequence); + #else + Py_ssize_t size = PySequence_Size(sequence); + #endif + if (unlikely(size != 2)) { + if (size > 2) __Pyx_RaiseTooManyValuesError(2); + else if (size >= 0) __Pyx_RaiseNeedMoreValuesError(size); + __PYX_ERR(0, 116, __pyx_L1_error) + } + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + if (likely(PyTuple_CheckExact(sequence))) { + __pyx_t_14 = PyTuple_GET_ITEM(sequence, 0); + __pyx_t_15 = PyTuple_GET_ITEM(sequence, 1); + } else { + __pyx_t_14 = PyList_GET_ITEM(sequence, 0); + __pyx_t_15 = PyList_GET_ITEM(sequence, 1); + } + __Pyx_INCREF(__pyx_t_14); + __Pyx_INCREF(__pyx_t_15); + #else + __pyx_t_14 = PySequence_ITEM(sequence, 0); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 116, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_14); + __pyx_t_15 = PySequence_ITEM(sequence, 1); if (unlikely(!__pyx_t_15)) __PYX_ERR(0, 116, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_15); + #endif + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + } else { + Py_ssize_t index = -1; + __pyx_t_17 = PyObject_GetIter(__pyx_t_1); if (unlikely(!__pyx_t_17)) __PYX_ERR(0, 116, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_17); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_18 = Py_TYPE(__pyx_t_17)->tp_iternext; + index = 0; __pyx_t_14 = __pyx_t_18(__pyx_t_17); if (unlikely(!__pyx_t_14)) goto __pyx_L67_unpacking_failed; + __Pyx_GOTREF(__pyx_t_14); + index = 1; __pyx_t_15 = __pyx_t_18(__pyx_t_17); if (unlikely(!__pyx_t_15)) goto __pyx_L67_unpacking_failed; + __Pyx_GOTREF(__pyx_t_15); + if (__Pyx_IternextUnpackEndCheck(__pyx_t_18(__pyx_t_17), 2) < 0) __PYX_ERR(0, 116, __pyx_L1_error) + __pyx_t_18 = NULL; + __Pyx_DECREF(__pyx_t_17); __pyx_t_17 = 0; + goto __pyx_L68_unpacking_done; + __pyx_L67_unpacking_failed:; + __Pyx_DECREF(__pyx_t_17); __pyx_t_17 = 0; + __pyx_t_18 = NULL; + if (__Pyx_IterFinish() == 0) __Pyx_RaiseNeedMoreValuesError(index); + __PYX_ERR(0, 116, __pyx_L1_error) + __pyx_L68_unpacking_done:; + } + __Pyx_XDECREF_SET(__pyx_v_success, __pyx_t_14); + __pyx_t_14 = 0; + __Pyx_XDECREF_SET(__pyx_v_new_code, __pyx_t_15); + __pyx_t_15 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":117 + * # but the former code object is used + * success, new_code = insert_code(frame.f_code, pydev_trace_code_wrapper.__code__, line) + * if success: # <<<<<<<<<<<<<< + * breakpoints_to_update.append(breakpoint) + * Py_INCREF(new_code) + */ + __pyx_t_3 = __Pyx_PyObject_IsTrue(__pyx_v_success); if (unlikely(__pyx_t_3 < 0)) __PYX_ERR(0, 117, __pyx_L1_error) + if (__pyx_t_3) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":118 + * success, new_code = insert_code(frame.f_code, pydev_trace_code_wrapper.__code__, line) + * if success: + * breakpoints_to_update.append(breakpoint) # <<<<<<<<<<<<<< + * Py_INCREF(new_code) + * frame_obj.f_code = new_code + */ + __pyx_t_19 = __Pyx_PyList_Append(__pyx_v_breakpoints_to_update, __pyx_v_breakpoint); if (unlikely(__pyx_t_19 == -1)) __PYX_ERR(0, 118, __pyx_L1_error) + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":119 + * if success: + * breakpoints_to_update.append(breakpoint) + * Py_INCREF(new_code) # <<<<<<<<<<<<<< + * frame_obj.f_code = new_code + * was_break = True + */ + Py_INCREF(__pyx_v_new_code); + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":120 + * breakpoints_to_update.append(breakpoint) + * Py_INCREF(new_code) + * frame_obj.f_code = new_code # <<<<<<<<<<<<<< + * was_break = True + * else: + */ + __pyx_v_frame_obj->f_code = ((PyCodeObject *)__pyx_v_new_code); + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":121 + * Py_INCREF(new_code) + * frame_obj.f_code = new_code + * was_break = True # <<<<<<<<<<<<<< + * else: + * main_debugger.set_trace_for_frame_and_parents(frame) + */ + __pyx_v_was_break = 1; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":117 + * # but the former code object is used + * success, new_code = insert_code(frame.f_code, pydev_trace_code_wrapper.__code__, line) + * if success: # <<<<<<<<<<<<<< + * breakpoints_to_update.append(breakpoint) + * Py_INCREF(new_code) + */ + goto __pyx_L69; + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":123 + * was_break = True + * else: + * main_debugger.set_trace_for_frame_and_parents(frame) # <<<<<<<<<<<<<< + * was_break = False + * break + */ + /*else*/ { + __pyx_t_15 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_set_trace_for_frame_and_parents); if (unlikely(!__pyx_t_15)) __PYX_ERR(0, 123, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_15); + __pyx_t_14 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_15))) { + __pyx_t_14 = PyMethod_GET_SELF(__pyx_t_15); + if (likely(__pyx_t_14)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_15); + __Pyx_INCREF(__pyx_t_14); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_15, function); + } + } + if (!__pyx_t_14) { + __pyx_t_1 = __Pyx_PyObject_CallOneArg(__pyx_t_15, __pyx_v_frame); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 123, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + } else { + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_15)) { + PyObject *__pyx_temp[2] = {__pyx_t_14, __pyx_v_frame}; + __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_15, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 123, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_14); __pyx_t_14 = 0; + __Pyx_GOTREF(__pyx_t_1); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_15)) { + PyObject *__pyx_temp[2] = {__pyx_t_14, __pyx_v_frame}; + __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_15, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 123, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_14); __pyx_t_14 = 0; + __Pyx_GOTREF(__pyx_t_1); + } else + #endif + { + __pyx_t_17 = PyTuple_New(1+1); if (unlikely(!__pyx_t_17)) __PYX_ERR(0, 123, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_17); + __Pyx_GIVEREF(__pyx_t_14); PyTuple_SET_ITEM(__pyx_t_17, 0, __pyx_t_14); __pyx_t_14 = NULL; + __Pyx_INCREF(__pyx_v_frame); + __Pyx_GIVEREF(__pyx_v_frame); + PyTuple_SET_ITEM(__pyx_t_17, 0+1, __pyx_v_frame); + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_15, __pyx_t_17, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 123, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_17); __pyx_t_17 = 0; + } + } + __Pyx_DECREF(__pyx_t_15); __pyx_t_15 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":124 + * else: + * main_debugger.set_trace_for_frame_and_parents(frame) + * was_break = False # <<<<<<<<<<<<<< + * break + * if was_break: + */ + __pyx_v_was_break = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":125 + * main_debugger.set_trace_for_frame_and_parents(frame) + * was_break = False + * break # <<<<<<<<<<<<<< + * if was_break: + * update_globals_dict(frame.f_globals) + */ + goto __pyx_L62_break; + } + __pyx_L69:; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":113 + * if line in breakpoints: + * breakpoint = breakpoints[line] + * if code_object not in breakpoint.code_objects: # <<<<<<<<<<<<<< + * # This check is needed for generator functions, because after each yield the new frame is created + * # but the former code object is used + */ + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":111 + * breakpoints_to_update = [] + * for offset, line in dis.findlinestarts(code_object): + * if line in breakpoints: # <<<<<<<<<<<<<< + * breakpoint = breakpoints[line] + * if code_object not in breakpoint.code_objects: + */ + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":110 + * if breakpoints: + * breakpoints_to_update = [] + * for offset, line in dis.findlinestarts(code_object): # <<<<<<<<<<<<<< + * if line in breakpoints: + * breakpoint = breakpoints[line] + */ + } + __pyx_L62_break:; + __Pyx_DECREF(__pyx_t_16); __pyx_t_16 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":126 + * was_break = False + * break + * if was_break: # <<<<<<<<<<<<<< + * update_globals_dict(frame.f_globals) + * for bp in breakpoints_to_update: + */ + __pyx_t_3 = (__pyx_v_was_break != 0); + if (__pyx_t_3) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":127 + * break + * if was_break: + * update_globals_dict(frame.f_globals) # <<<<<<<<<<<<<< + * for bp in breakpoints_to_update: + * bp.code_objects.add(frame.f_code) + */ + __pyx_t_1 = __Pyx_GetModuleGlobalName(__pyx_n_s_update_globals_dict); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 127, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_15 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_globals); if (unlikely(!__pyx_t_15)) __PYX_ERR(0, 127, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_15); + __pyx_t_17 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_1))) { + __pyx_t_17 = PyMethod_GET_SELF(__pyx_t_1); + if (likely(__pyx_t_17)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1); + __Pyx_INCREF(__pyx_t_17); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_1, function); + } + } + if (!__pyx_t_17) { + __pyx_t_16 = __Pyx_PyObject_CallOneArg(__pyx_t_1, __pyx_t_15); if (unlikely(!__pyx_t_16)) __PYX_ERR(0, 127, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_15); __pyx_t_15 = 0; + __Pyx_GOTREF(__pyx_t_16); + } else { + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_1)) { + PyObject *__pyx_temp[2] = {__pyx_t_17, __pyx_t_15}; + __pyx_t_16 = __Pyx_PyFunction_FastCall(__pyx_t_1, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_16)) __PYX_ERR(0, 127, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_17); __pyx_t_17 = 0; + __Pyx_GOTREF(__pyx_t_16); + __Pyx_DECREF(__pyx_t_15); __pyx_t_15 = 0; + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_1)) { + PyObject *__pyx_temp[2] = {__pyx_t_17, __pyx_t_15}; + __pyx_t_16 = __Pyx_PyCFunction_FastCall(__pyx_t_1, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_16)) __PYX_ERR(0, 127, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_17); __pyx_t_17 = 0; + __Pyx_GOTREF(__pyx_t_16); + __Pyx_DECREF(__pyx_t_15); __pyx_t_15 = 0; + } else + #endif + { + __pyx_t_14 = PyTuple_New(1+1); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 127, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_14); + __Pyx_GIVEREF(__pyx_t_17); PyTuple_SET_ITEM(__pyx_t_14, 0, __pyx_t_17); __pyx_t_17 = NULL; + __Pyx_GIVEREF(__pyx_t_15); + PyTuple_SET_ITEM(__pyx_t_14, 0+1, __pyx_t_15); + __pyx_t_15 = 0; + __pyx_t_16 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_t_14, NULL); if (unlikely(!__pyx_t_16)) __PYX_ERR(0, 127, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_16); + __Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0; + } + } + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_16); __pyx_t_16 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":128 + * if was_break: + * update_globals_dict(frame.f_globals) + * for bp in breakpoints_to_update: # <<<<<<<<<<<<<< + * bp.code_objects.add(frame.f_code) + * else: + */ + __pyx_t_16 = __pyx_v_breakpoints_to_update; __Pyx_INCREF(__pyx_t_16); __pyx_t_11 = 0; + for (;;) { + if (__pyx_t_11 >= PyList_GET_SIZE(__pyx_t_16)) break; + #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + __pyx_t_1 = PyList_GET_ITEM(__pyx_t_16, __pyx_t_11); __Pyx_INCREF(__pyx_t_1); __pyx_t_11++; if (unlikely(0 < 0)) __PYX_ERR(0, 128, __pyx_L1_error) + #else + __pyx_t_1 = PySequence_ITEM(__pyx_t_16, __pyx_t_11); __pyx_t_11++; if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 128, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + #endif + __Pyx_XDECREF_SET(__pyx_v_bp, __pyx_t_1); + __pyx_t_1 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":129 + * update_globals_dict(frame.f_globals) + * for bp in breakpoints_to_update: + * bp.code_objects.add(frame.f_code) # <<<<<<<<<<<<<< + * else: + * if main_debugger.has_plugin_line_breaks: + */ + __pyx_t_14 = __Pyx_PyObject_GetAttrStr(__pyx_v_bp, __pyx_n_s_code_objects); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 129, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_14); + __pyx_t_15 = __Pyx_PyObject_GetAttrStr(__pyx_t_14, __pyx_n_s_add); if (unlikely(!__pyx_t_15)) __PYX_ERR(0, 129, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_15); + __Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0; + __pyx_t_14 = __Pyx_PyObject_GetAttrStr(__pyx_v_frame, __pyx_n_s_f_code); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 129, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_14); + __pyx_t_17 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_15))) { + __pyx_t_17 = PyMethod_GET_SELF(__pyx_t_15); + if (likely(__pyx_t_17)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_15); + __Pyx_INCREF(__pyx_t_17); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_15, function); + } + } + if (!__pyx_t_17) { + __pyx_t_1 = __Pyx_PyObject_CallOneArg(__pyx_t_15, __pyx_t_14); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 129, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0; + __Pyx_GOTREF(__pyx_t_1); + } else { + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_15)) { + PyObject *__pyx_temp[2] = {__pyx_t_17, __pyx_t_14}; + __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_15, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 129, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_17); __pyx_t_17 = 0; + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0; + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_15)) { + PyObject *__pyx_temp[2] = {__pyx_t_17, __pyx_t_14}; + __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_15, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 129, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_17); __pyx_t_17 = 0; + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0; + } else + #endif + { + __pyx_t_6 = PyTuple_New(1+1); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 129, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_GIVEREF(__pyx_t_17); PyTuple_SET_ITEM(__pyx_t_6, 0, __pyx_t_17); __pyx_t_17 = NULL; + __Pyx_GIVEREF(__pyx_t_14); + PyTuple_SET_ITEM(__pyx_t_6, 0+1, __pyx_t_14); + __pyx_t_14 = 0; + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_15, __pyx_t_6, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 129, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + } + } + __Pyx_DECREF(__pyx_t_15); __pyx_t_15 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":128 + * if was_break: + * update_globals_dict(frame.f_globals) + * for bp in breakpoints_to_update: # <<<<<<<<<<<<<< + * bp.code_objects.add(frame.f_code) + * else: + */ + } + __Pyx_DECREF(__pyx_t_16); __pyx_t_16 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":126 + * was_break = False + * break + * if was_break: # <<<<<<<<<<<<<< + * update_globals_dict(frame.f_globals) + * for bp in breakpoints_to_update: + */ + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":108 + * breakpoints = main_debugger.breakpoints.get(abs_path_real_path_and_base[1]) + * code_object = frame.f_code + * if breakpoints: # <<<<<<<<<<<<<< + * breakpoints_to_update = [] + * for offset, line in dis.findlinestarts(code_object): + */ + goto __pyx_L60; + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":131 + * bp.code_objects.add(frame.f_code) + * else: + * if main_debugger.has_plugin_line_breaks: # <<<<<<<<<<<<<< + * can_not_skip = main_debugger.plugin.can_not_skip(main_debugger, None, frame) + * if can_not_skip: + */ + /*else*/ { + __pyx_t_16 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_has_plugin_line_breaks); if (unlikely(!__pyx_t_16)) __PYX_ERR(0, 131, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_16); + __pyx_t_3 = __Pyx_PyObject_IsTrue(__pyx_t_16); if (unlikely(__pyx_t_3 < 0)) __PYX_ERR(0, 131, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_16); __pyx_t_16 = 0; + if (__pyx_t_3) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":132 + * else: + * if main_debugger.has_plugin_line_breaks: + * can_not_skip = main_debugger.plugin.can_not_skip(main_debugger, None, frame) # <<<<<<<<<<<<<< + * if can_not_skip: + * was_break = True + */ + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_plugin); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 132, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __pyx_t_15 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_can_not_skip); if (unlikely(!__pyx_t_15)) __PYX_ERR(0, 132, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_15); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = NULL; + __pyx_t_10 = 0; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_15))) { + __pyx_t_1 = PyMethod_GET_SELF(__pyx_t_15); + if (likely(__pyx_t_1)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_15); + __Pyx_INCREF(__pyx_t_1); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_15, function); + __pyx_t_10 = 1; + } + } + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_15)) { + PyObject *__pyx_temp[4] = {__pyx_t_1, __pyx_v_main_debugger, Py_None, __pyx_v_frame}; + __pyx_t_16 = __Pyx_PyFunction_FastCall(__pyx_t_15, __pyx_temp+1-__pyx_t_10, 3+__pyx_t_10); if (unlikely(!__pyx_t_16)) __PYX_ERR(0, 132, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_GOTREF(__pyx_t_16); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_15)) { + PyObject *__pyx_temp[4] = {__pyx_t_1, __pyx_v_main_debugger, Py_None, __pyx_v_frame}; + __pyx_t_16 = __Pyx_PyCFunction_FastCall(__pyx_t_15, __pyx_temp+1-__pyx_t_10, 3+__pyx_t_10); if (unlikely(!__pyx_t_16)) __PYX_ERR(0, 132, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_GOTREF(__pyx_t_16); + } else + #endif + { + __pyx_t_6 = PyTuple_New(3+__pyx_t_10); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 132, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + if (__pyx_t_1) { + __Pyx_GIVEREF(__pyx_t_1); PyTuple_SET_ITEM(__pyx_t_6, 0, __pyx_t_1); __pyx_t_1 = NULL; + } + __Pyx_INCREF(__pyx_v_main_debugger); + __Pyx_GIVEREF(__pyx_v_main_debugger); + PyTuple_SET_ITEM(__pyx_t_6, 0+__pyx_t_10, __pyx_v_main_debugger); + __Pyx_INCREF(Py_None); + __Pyx_GIVEREF(Py_None); + PyTuple_SET_ITEM(__pyx_t_6, 1+__pyx_t_10, Py_None); + __Pyx_INCREF(__pyx_v_frame); + __Pyx_GIVEREF(__pyx_v_frame); + PyTuple_SET_ITEM(__pyx_t_6, 2+__pyx_t_10, __pyx_v_frame); + __pyx_t_16 = __Pyx_PyObject_Call(__pyx_t_15, __pyx_t_6, NULL); if (unlikely(!__pyx_t_16)) __PYX_ERR(0, 132, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_16); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + } + __Pyx_DECREF(__pyx_t_15); __pyx_t_15 = 0; + __pyx_v_can_not_skip = __pyx_t_16; + __pyx_t_16 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":133 + * if main_debugger.has_plugin_line_breaks: + * can_not_skip = main_debugger.plugin.can_not_skip(main_debugger, None, frame) + * if can_not_skip: # <<<<<<<<<<<<<< + * was_break = True + * main_debugger.SetTrace(main_debugger.trace_dispatch) + */ + __pyx_t_3 = __Pyx_PyObject_IsTrue(__pyx_v_can_not_skip); if (unlikely(__pyx_t_3 < 0)) __PYX_ERR(0, 133, __pyx_L1_error) + if (__pyx_t_3) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":134 + * can_not_skip = main_debugger.plugin.can_not_skip(main_debugger, None, frame) + * if can_not_skip: + * was_break = True # <<<<<<<<<<<<<< + * main_debugger.SetTrace(main_debugger.trace_dispatch) + * main_debugger.set_trace_for_frame_and_parents(frame) + */ + __pyx_v_was_break = 1; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":135 + * if can_not_skip: + * was_break = True + * main_debugger.SetTrace(main_debugger.trace_dispatch) # <<<<<<<<<<<<<< + * main_debugger.set_trace_for_frame_and_parents(frame) + * + */ + __pyx_t_15 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_SetTrace); if (unlikely(!__pyx_t_15)) __PYX_ERR(0, 135, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_15); + __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_trace_dispatch); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 135, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __pyx_t_1 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_15))) { + __pyx_t_1 = PyMethod_GET_SELF(__pyx_t_15); + if (likely(__pyx_t_1)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_15); + __Pyx_INCREF(__pyx_t_1); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_15, function); + } + } + if (!__pyx_t_1) { + __pyx_t_16 = __Pyx_PyObject_CallOneArg(__pyx_t_15, __pyx_t_6); if (unlikely(!__pyx_t_16)) __PYX_ERR(0, 135, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_GOTREF(__pyx_t_16); + } else { + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_15)) { + PyObject *__pyx_temp[2] = {__pyx_t_1, __pyx_t_6}; + __pyx_t_16 = __Pyx_PyFunction_FastCall(__pyx_t_15, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_16)) __PYX_ERR(0, 135, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_GOTREF(__pyx_t_16); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_15)) { + PyObject *__pyx_temp[2] = {__pyx_t_1, __pyx_t_6}; + __pyx_t_16 = __Pyx_PyCFunction_FastCall(__pyx_t_15, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_16)) __PYX_ERR(0, 135, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_GOTREF(__pyx_t_16); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + } else + #endif + { + __pyx_t_14 = PyTuple_New(1+1); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 135, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_14); + __Pyx_GIVEREF(__pyx_t_1); PyTuple_SET_ITEM(__pyx_t_14, 0, __pyx_t_1); __pyx_t_1 = NULL; + __Pyx_GIVEREF(__pyx_t_6); + PyTuple_SET_ITEM(__pyx_t_14, 0+1, __pyx_t_6); + __pyx_t_6 = 0; + __pyx_t_16 = __Pyx_PyObject_Call(__pyx_t_15, __pyx_t_14, NULL); if (unlikely(!__pyx_t_16)) __PYX_ERR(0, 135, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_16); + __Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0; + } + } + __Pyx_DECREF(__pyx_t_15); __pyx_t_15 = 0; + __Pyx_DECREF(__pyx_t_16); __pyx_t_16 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":136 + * was_break = True + * main_debugger.SetTrace(main_debugger.trace_dispatch) + * main_debugger.set_trace_for_frame_and_parents(frame) # <<<<<<<<<<<<<< + * + * if not was_break: + */ + __pyx_t_15 = __Pyx_PyObject_GetAttrStr(__pyx_v_main_debugger, __pyx_n_s_set_trace_for_frame_and_parents); if (unlikely(!__pyx_t_15)) __PYX_ERR(0, 136, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_15); + __pyx_t_14 = NULL; + if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_15))) { + __pyx_t_14 = PyMethod_GET_SELF(__pyx_t_15); + if (likely(__pyx_t_14)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_15); + __Pyx_INCREF(__pyx_t_14); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_15, function); + } + } + if (!__pyx_t_14) { + __pyx_t_16 = __Pyx_PyObject_CallOneArg(__pyx_t_15, __pyx_v_frame); if (unlikely(!__pyx_t_16)) __PYX_ERR(0, 136, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_16); + } else { + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_15)) { + PyObject *__pyx_temp[2] = {__pyx_t_14, __pyx_v_frame}; + __pyx_t_16 = __Pyx_PyFunction_FastCall(__pyx_t_15, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_16)) __PYX_ERR(0, 136, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_14); __pyx_t_14 = 0; + __Pyx_GOTREF(__pyx_t_16); + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_15)) { + PyObject *__pyx_temp[2] = {__pyx_t_14, __pyx_v_frame}; + __pyx_t_16 = __Pyx_PyCFunction_FastCall(__pyx_t_15, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_16)) __PYX_ERR(0, 136, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_14); __pyx_t_14 = 0; + __Pyx_GOTREF(__pyx_t_16); + } else + #endif + { + __pyx_t_6 = PyTuple_New(1+1); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 136, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_6); + __Pyx_GIVEREF(__pyx_t_14); PyTuple_SET_ITEM(__pyx_t_6, 0, __pyx_t_14); __pyx_t_14 = NULL; + __Pyx_INCREF(__pyx_v_frame); + __Pyx_GIVEREF(__pyx_v_frame); + PyTuple_SET_ITEM(__pyx_t_6, 0+1, __pyx_v_frame); + __pyx_t_16 = __Pyx_PyObject_Call(__pyx_t_15, __pyx_t_6, NULL); if (unlikely(!__pyx_t_16)) __PYX_ERR(0, 136, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_16); + __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; + } + } + __Pyx_DECREF(__pyx_t_15); __pyx_t_15 = 0; + __Pyx_DECREF(__pyx_t_16); __pyx_t_16 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":133 + * if main_debugger.has_plugin_line_breaks: + * can_not_skip = main_debugger.plugin.can_not_skip(main_debugger, None, frame) + * if can_not_skip: # <<<<<<<<<<<<<< + * was_break = True + * main_debugger.SetTrace(main_debugger.trace_dispatch) + */ + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":131 + * bp.code_objects.add(frame.f_code) + * else: + * if main_debugger.has_plugin_line_breaks: # <<<<<<<<<<<<<< + * can_not_skip = main_debugger.plugin.can_not_skip(main_debugger, None, frame) + * if can_not_skip: + */ + } + } + __pyx_L60:; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":138 + * main_debugger.set_trace_for_frame_and_parents(frame) + * + * if not was_break: # <<<<<<<<<<<<<< + * extra_value = PyMem_Malloc(sizeof(int)) + * extra_value[0] = NO_BREAKS_IN_FRAME + */ + __pyx_t_3 = ((!(__pyx_v_was_break != 0)) != 0); + if (__pyx_t_3) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":139 + * + * if not was_break: + * extra_value = PyMem_Malloc(sizeof(int)) # <<<<<<<<<<<<<< + * extra_value[0] = NO_BREAKS_IN_FRAME + * try: + */ + __pyx_v_extra_value = ((int *)PyMem_Malloc((sizeof(int)))); + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":140 + * if not was_break: + * extra_value = PyMem_Malloc(sizeof(int)) + * extra_value[0] = NO_BREAKS_IN_FRAME # <<<<<<<<<<<<<< + * try: + * thread_index = UseCodeExtraHolder.local.index + */ + __pyx_t_16 = __Pyx_GetModuleGlobalName(__pyx_n_s_NO_BREAKS_IN_FRAME); if (unlikely(!__pyx_t_16)) __PYX_ERR(0, 140, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_16); + __pyx_t_10 = __Pyx_PyInt_As_int(__pyx_t_16); if (unlikely((__pyx_t_10 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 140, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_16); __pyx_t_16 = 0; + (__pyx_v_extra_value[0]) = __pyx_t_10; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":141 + * extra_value = PyMem_Malloc(sizeof(int)) + * extra_value[0] = NO_BREAKS_IN_FRAME + * try: # <<<<<<<<<<<<<< + * thread_index = UseCodeExtraHolder.local.index + * except: + */ + { + __Pyx_PyThreadState_declare + __Pyx_PyThreadState_assign + __Pyx_ExceptionSave(&__pyx_t_7, &__pyx_t_8, &__pyx_t_9); + __Pyx_XGOTREF(__pyx_t_7); + __Pyx_XGOTREF(__pyx_t_8); + __Pyx_XGOTREF(__pyx_t_9); + /*try:*/ { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":142 + * extra_value[0] = NO_BREAKS_IN_FRAME + * try: + * thread_index = UseCodeExtraHolder.local.index # <<<<<<<<<<<<<< + * except: + * pass + */ + __pyx_t_16 = __Pyx_GetModuleGlobalName(__pyx_n_s_UseCodeExtraHolder); if (unlikely(!__pyx_t_16)) __PYX_ERR(0, 142, __pyx_L76_error) + __Pyx_GOTREF(__pyx_t_16); + __pyx_t_15 = __Pyx_PyObject_GetAttrStr(__pyx_t_16, __pyx_n_s_local); if (unlikely(!__pyx_t_15)) __PYX_ERR(0, 142, __pyx_L76_error) + __Pyx_GOTREF(__pyx_t_15); + __Pyx_DECREF(__pyx_t_16); __pyx_t_16 = 0; + __pyx_t_16 = __Pyx_PyObject_GetAttrStr(__pyx_t_15, __pyx_n_s_index); if (unlikely(!__pyx_t_16)) __PYX_ERR(0, 142, __pyx_L76_error) + __Pyx_GOTREF(__pyx_t_16); + __Pyx_DECREF(__pyx_t_15); __pyx_t_15 = 0; + __pyx_t_10 = __Pyx_PyInt_As_int(__pyx_t_16); if (unlikely((__pyx_t_10 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 142, __pyx_L76_error) + __Pyx_DECREF(__pyx_t_16); __pyx_t_16 = 0; + __pyx_v_thread_index = __pyx_t_10; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":141 + * extra_value = PyMem_Malloc(sizeof(int)) + * extra_value[0] = NO_BREAKS_IN_FRAME + * try: # <<<<<<<<<<<<<< + * thread_index = UseCodeExtraHolder.local.index + * except: + */ + } + __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0; + __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0; + __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0; + goto __pyx_L83_try_end; + __pyx_L76_error:; + __Pyx_PyThreadState_assign + __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_XDECREF(__pyx_t_17); __pyx_t_17 = 0; + __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_XDECREF(__pyx_t_14); __pyx_t_14 = 0; + __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0; + __Pyx_XDECREF(__pyx_t_15); __pyx_t_15 = 0; + __Pyx_XDECREF(__pyx_t_16); __pyx_t_16 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":143 + * try: + * thread_index = UseCodeExtraHolder.local.index + * except: # <<<<<<<<<<<<<< + * pass + * if thread_index != -1: + */ + /*except:*/ { + __Pyx_ErrRestore(0,0,0); + goto __pyx_L77_exception_handled; + } + __pyx_L77_exception_handled:; + __Pyx_PyThreadState_assign + __Pyx_XGIVEREF(__pyx_t_7); + __Pyx_XGIVEREF(__pyx_t_8); + __Pyx_XGIVEREF(__pyx_t_9); + __Pyx_ExceptionReset(__pyx_t_7, __pyx_t_8, __pyx_t_9); + __pyx_L83_try_end:; + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":145 + * except: + * pass + * if thread_index != -1: # <<<<<<<<<<<<<< + * _PyCode_SetExtra( code_object, thread_index, extra_value) + * + */ + __pyx_t_3 = ((__pyx_v_thread_index != -1L) != 0); + if (__pyx_t_3) { + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":146 + * pass + * if thread_index != -1: + * _PyCode_SetExtra( code_object, thread_index, extra_value) # <<<<<<<<<<<<<< + * + * additional_info.is_tracing = False + */ + _PyCode_SetExtra(((PyObject *)__pyx_v_code_object), __pyx_v_thread_index, __pyx_v_extra_value); + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":145 + * except: + * pass + * if thread_index != -1: # <<<<<<<<<<<<<< + * _PyCode_SetExtra( code_object, thread_index, extra_value) + * + */ + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":138 + * main_debugger.set_trace_for_frame_and_parents(frame) + * + * if not was_break: # <<<<<<<<<<<<<< + * extra_value = PyMem_Malloc(sizeof(int)) + * extra_value[0] = NO_BREAKS_IN_FRAME + */ + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":148 + * _PyCode_SetExtra( code_object, thread_index, extra_value) + * + * additional_info.is_tracing = False # <<<<<<<<<<<<<< + * return _PyEval_EvalFrameDefault(frame_obj, exc) + * + */ + if (__Pyx_PyObject_SetAttrStr(__pyx_v_additional_info, __pyx_n_s_is_tracing, Py_False) < 0) __PYX_ERR(0, 148, __pyx_L1_error) + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":79 + * skip_file = True + * + * if not skip_file: # <<<<<<<<<<<<<< + * try: + * additional_info = t.additional_info + */ + } + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":149 + * + * additional_info.is_tracing = False + * return _PyEval_EvalFrameDefault(frame_obj, exc) # <<<<<<<<<<<<<< + * + * def frame_eval_func(): + */ + __pyx_r = _PyEval_EvalFrameDefault(__pyx_v_frame_obj, __pyx_v_exc); + goto __pyx_L0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":41 + * + * + * cdef PyObject* get_bytecode_while_frame_eval(PyFrameObject *frame_obj, int exc): # <<<<<<<<<<<<<< + * frame = frame_obj + * cdef str filepath = frame.f_code.co_filename + */ + + /* function exit code */ + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_6); + __Pyx_XDECREF(__pyx_t_14); + __Pyx_XDECREF(__pyx_t_15); + __Pyx_XDECREF(__pyx_t_16); + __Pyx_XDECREF(__pyx_t_17); + __Pyx_WriteUnraisable("_pydevd_frame_eval.pydevd_frame_evaluator.get_bytecode_while_frame_eval", __pyx_clineno, __pyx_lineno, __pyx_filename, 0, 0); + __pyx_r = 0; + __pyx_L0:; + __Pyx_XDECREF(__pyx_v_frame); + __Pyx_XDECREF(__pyx_v_filepath); + __Pyx_XDECREF(__pyx_v_file); + __Pyx_XDECREF(__pyx_v_path_separator); + __Pyx_XDECREF(__pyx_v_t); + __Pyx_XDECREF(__pyx_v_additional_info); + __Pyx_XDECREF(__pyx_v_abs_path_real_path_and_base); + __Pyx_XDECREF(__pyx_v_file_type); + __Pyx_XDECREF(__pyx_v_main_debugger); + __Pyx_XDECREF(__pyx_v_breakpoints); + __Pyx_XDECREF(__pyx_v_code_object); + __Pyx_XDECREF(__pyx_v_breakpoints_to_update); + __Pyx_XDECREF(__pyx_v_offset); + __Pyx_XDECREF(__pyx_v_line); + __Pyx_XDECREF(__pyx_v_breakpoint); + __Pyx_XDECREF(__pyx_v_success); + __Pyx_XDECREF(__pyx_v_new_code); + __Pyx_XDECREF(__pyx_v_bp); + __Pyx_XDECREF(__pyx_v_can_not_skip); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":151 + * return _PyEval_EvalFrameDefault(frame_obj, exc) + * + * def frame_eval_func(): # <<<<<<<<<<<<<< + * cdef PyThreadState *state = PyThreadState_Get() + * state.interp.eval_frame = get_bytecode_while_frame_eval + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_7frame_eval_func(PyObject *__pyx_self, CYTHON_UNUSED PyObject *unused); /*proto*/ +static PyMethodDef __pyx_mdef_18_pydevd_frame_eval_22pydevd_frame_evaluator_7frame_eval_func = {"frame_eval_func", (PyCFunction)__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_7frame_eval_func, METH_NOARGS, 0}; +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_7frame_eval_func(PyObject *__pyx_self, CYTHON_UNUSED PyObject *unused) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("frame_eval_func (wrapper)", 0); + __pyx_r = __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_6frame_eval_func(__pyx_self); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_6frame_eval_func(CYTHON_UNUSED PyObject *__pyx_self) { + PyThreadState *__pyx_v_state; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + PyObject *__pyx_t_5 = NULL; + __Pyx_RefNannySetupContext("frame_eval_func", 0); + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":152 + * + * def frame_eval_func(): + * cdef PyThreadState *state = PyThreadState_Get() # <<<<<<<<<<<<<< + * state.interp.eval_frame = get_bytecode_while_frame_eval + * global dummy_tracing_holder + */ + __pyx_v_state = PyThreadState_Get(); + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":153 + * def frame_eval_func(): + * cdef PyThreadState *state = PyThreadState_Get() + * state.interp.eval_frame = get_bytecode_while_frame_eval # <<<<<<<<<<<<<< + * global dummy_tracing_holder + * dummy_tracing_holder.set_trace_func(dummy_trace_dispatch) + */ + __pyx_v_state->interp->eval_frame = __pyx_f_18_pydevd_frame_eval_22pydevd_frame_evaluator_get_bytecode_while_frame_eval; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":155 + * state.interp.eval_frame = get_bytecode_while_frame_eval + * global dummy_tracing_holder + * dummy_tracing_holder.set_trace_func(dummy_trace_dispatch) # <<<<<<<<<<<<<< + * + * def stop_frame_eval(): + */ + __pyx_t_2 = __Pyx_GetModuleGlobalName(__pyx_n_s_dummy_tracing_holder); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 155, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_set_trace_func); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 155, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_GetModuleGlobalName(__pyx_n_s_dummy_trace_dispatch); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 155, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_4 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_3))) { + __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3); + if (likely(__pyx_t_4)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3); + __Pyx_INCREF(__pyx_t_4); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_3, function); + } + } + if (!__pyx_t_4) { + __pyx_t_1 = __Pyx_PyObject_CallOneArg(__pyx_t_3, __pyx_t_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 155, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_GOTREF(__pyx_t_1); + } else { + #if CYTHON_FAST_PYCALL + if (PyFunction_Check(__pyx_t_3)) { + PyObject *__pyx_temp[2] = {__pyx_t_4, __pyx_t_2}; + __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_3, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 155, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + } else + #endif + #if CYTHON_FAST_PYCCALL + if (__Pyx_PyFastCFunction_Check(__pyx_t_3)) { + PyObject *__pyx_temp[2] = {__pyx_t_4, __pyx_t_2}; + __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_3, __pyx_temp+1-1, 1+1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 155, __pyx_L1_error) + __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + } else + #endif + { + __pyx_t_5 = PyTuple_New(1+1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 155, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_5); + __Pyx_GIVEREF(__pyx_t_4); PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_t_4); __pyx_t_4 = NULL; + __Pyx_GIVEREF(__pyx_t_2); + PyTuple_SET_ITEM(__pyx_t_5, 0+1, __pyx_t_2); + __pyx_t_2 = 0; + __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_3, __pyx_t_5, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 155, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; + } + } + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":151 + * return _PyEval_EvalFrameDefault(frame_obj, exc) + * + * def frame_eval_func(): # <<<<<<<<<<<<<< + * cdef PyThreadState *state = PyThreadState_Get() + * state.interp.eval_frame = get_bytecode_while_frame_eval + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + __Pyx_XDECREF(__pyx_t_5); + __Pyx_AddTraceback("_pydevd_frame_eval.pydevd_frame_evaluator.frame_eval_func", __pyx_clineno, __pyx_lineno, __pyx_filename); + __pyx_r = NULL; + __pyx_L0:; + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +/* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":157 + * dummy_tracing_holder.set_trace_func(dummy_trace_dispatch) + * + * def stop_frame_eval(): # <<<<<<<<<<<<<< + * cdef PyThreadState *state = PyThreadState_Get() + * state.interp.eval_frame = _PyEval_EvalFrameDefault + */ + +/* Python wrapper */ +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_9stop_frame_eval(PyObject *__pyx_self, CYTHON_UNUSED PyObject *unused); /*proto*/ +static PyMethodDef __pyx_mdef_18_pydevd_frame_eval_22pydevd_frame_evaluator_9stop_frame_eval = {"stop_frame_eval", (PyCFunction)__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_9stop_frame_eval, METH_NOARGS, 0}; +static PyObject *__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_9stop_frame_eval(PyObject *__pyx_self, CYTHON_UNUSED PyObject *unused) { + PyObject *__pyx_r = 0; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("stop_frame_eval (wrapper)", 0); + __pyx_r = __pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_8stop_frame_eval(__pyx_self); + + /* function exit code */ + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyObject *__pyx_pf_18_pydevd_frame_eval_22pydevd_frame_evaluator_8stop_frame_eval(CYTHON_UNUSED PyObject *__pyx_self) { + PyThreadState *__pyx_v_state; + PyObject *__pyx_r = NULL; + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("stop_frame_eval", 0); + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":158 + * + * def stop_frame_eval(): + * cdef PyThreadState *state = PyThreadState_Get() # <<<<<<<<<<<<<< + * state.interp.eval_frame = _PyEval_EvalFrameDefault + */ + __pyx_v_state = PyThreadState_Get(); + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":159 + * def stop_frame_eval(): + * cdef PyThreadState *state = PyThreadState_Get() + * state.interp.eval_frame = _PyEval_EvalFrameDefault # <<<<<<<<<<<<<< + */ + __pyx_v_state->interp->eval_frame = _PyEval_EvalFrameDefault; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":157 + * dummy_tracing_holder.set_trace_func(dummy_trace_dispatch) + * + * def stop_frame_eval(): # <<<<<<<<<<<<<< + * cdef PyThreadState *state = PyThreadState_Get() + * state.interp.eval_frame = _PyEval_EvalFrameDefault + */ + + /* function exit code */ + __pyx_r = Py_None; __Pyx_INCREF(Py_None); + __Pyx_XGIVEREF(__pyx_r); + __Pyx_RefNannyFinishContext(); + return __pyx_r; +} + +static PyMethodDef __pyx_methods[] = { + {"dummy_trace_dispatch", (PyCFunction)__pyx_pw_18_pydevd_frame_eval_22pydevd_frame_evaluator_5dummy_trace_dispatch, METH_VARARGS|METH_KEYWORDS, 0}, + {0, 0, 0, 0} +}; + +#if PY_MAJOR_VERSION >= 3 +static struct PyModuleDef __pyx_moduledef = { + #if PY_VERSION_HEX < 0x03020000 + { PyObject_HEAD_INIT(NULL) NULL, 0, NULL }, + #else + PyModuleDef_HEAD_INIT, + #endif + "pydevd_frame_evaluator", + 0, /* m_doc */ + -1, /* m_size */ + __pyx_methods /* m_methods */, + NULL, /* m_reload */ + NULL, /* m_traverse */ + NULL, /* m_clear */ + NULL /* m_free */ +}; +#endif + +static __Pyx_StringTabEntry __pyx_string_tab[] = { + {&__pyx_kp_s_, __pyx_k_, sizeof(__pyx_k_), 0, 0, 1, 0}, + {&__pyx_n_s_AVOID_RECURSION, __pyx_k_AVOID_RECURSION, sizeof(__pyx_k_AVOID_RECURSION), 0, 0, 1, 1}, + {&__pyx_n_s_AttributeError, __pyx_k_AttributeError, sizeof(__pyx_k_AttributeError), 0, 0, 1, 1}, + {&__pyx_n_s_DONT_TRACE, __pyx_k_DONT_TRACE, sizeof(__pyx_k_DONT_TRACE), 0, 0, 1, 1}, + {&__pyx_n_s_NORM_PATHS_AND_BASE_CONTAINER, __pyx_k_NORM_PATHS_AND_BASE_CONTAINER, sizeof(__pyx_k_NORM_PATHS_AND_BASE_CONTAINER), 0, 0, 1, 1}, + {&__pyx_n_s_NO_BREAKS_IN_FRAME, __pyx_k_NO_BREAKS_IN_FRAME, sizeof(__pyx_k_NO_BREAKS_IN_FRAME), 0, 0, 1, 1}, + {&__pyx_n_s_PyDBAdditionalThreadInfo, __pyx_k_PyDBAdditionalThreadInfo, sizeof(__pyx_k_PyDBAdditionalThreadInfo), 0, 0, 1, 1}, + {&__pyx_n_s_SetTrace, __pyx_k_SetTrace, sizeof(__pyx_k_SetTrace), 0, 0, 1, 1}, + {&__pyx_n_s_UseCodeExtraHolder, __pyx_k_UseCodeExtraHolder, sizeof(__pyx_k_UseCodeExtraHolder), 0, 0, 1, 1}, + {&__pyx_kp_s__2, __pyx_k__2, sizeof(__pyx_k__2), 0, 0, 1, 0}, + {&__pyx_n_s_add, __pyx_k_add, sizeof(__pyx_k_add), 0, 0, 1, 1}, + {&__pyx_n_s_additional_info, __pyx_k_additional_info, sizeof(__pyx_k_additional_info), 0, 0, 1, 1}, + {&__pyx_n_s_arg, __pyx_k_arg, sizeof(__pyx_k_arg), 0, 0, 1, 1}, + {&__pyx_n_s_breakpoints, __pyx_k_breakpoints, sizeof(__pyx_k_breakpoints), 0, 0, 1, 1}, + {&__pyx_n_s_can_not_skip, __pyx_k_can_not_skip, sizeof(__pyx_k_can_not_skip), 0, 0, 1, 1}, + {&__pyx_n_s_co_filename, __pyx_k_co_filename, sizeof(__pyx_k_co_filename), 0, 0, 1, 1}, + {&__pyx_n_s_code, __pyx_k_code, sizeof(__pyx_k_code), 0, 0, 1, 1}, + {&__pyx_n_s_code_objects, __pyx_k_code_objects, sizeof(__pyx_k_code_objects), 0, 0, 1, 1}, + {&__pyx_n_s_currentThread, __pyx_k_currentThread, sizeof(__pyx_k_currentThread), 0, 0, 1, 1}, + {&__pyx_n_s_dis, __pyx_k_dis, sizeof(__pyx_k_dis), 0, 0, 1, 1}, + {&__pyx_n_s_doc, __pyx_k_doc, sizeof(__pyx_k_doc), 0, 0, 1, 1}, + {&__pyx_n_s_dummy_trace_dispatch, __pyx_k_dummy_trace_dispatch, sizeof(__pyx_k_dummy_trace_dispatch), 0, 0, 1, 1}, + {&__pyx_n_s_dummy_tracing_holder, __pyx_k_dummy_tracing_holder, sizeof(__pyx_k_dummy_tracing_holder), 0, 0, 1, 1}, + {&__pyx_n_s_enable_cache_frames_without_brea, __pyx_k_enable_cache_frames_without_brea, sizeof(__pyx_k_enable_cache_frames_without_brea), 0, 0, 1, 1}, + {&__pyx_n_s_event, __pyx_k_event, sizeof(__pyx_k_event), 0, 0, 1, 1}, + {&__pyx_n_s_f_code, __pyx_k_f_code, sizeof(__pyx_k_f_code), 0, 0, 1, 1}, + {&__pyx_n_s_f_globals, __pyx_k_f_globals, sizeof(__pyx_k_f_globals), 0, 0, 1, 1}, + {&__pyx_n_s_findlinestarts, __pyx_k_findlinestarts, sizeof(__pyx_k_findlinestarts), 0, 0, 1, 1}, + {&__pyx_n_s_frame, __pyx_k_frame, sizeof(__pyx_k_frame), 0, 0, 1, 1}, + {&__pyx_n_s_frame_eval_func, __pyx_k_frame_eval_func, sizeof(__pyx_k_frame_eval_func), 0, 0, 1, 1}, + {&__pyx_n_s_get, __pyx_k_get, sizeof(__pyx_k_get), 0, 0, 1, 1}, + {&__pyx_n_s_get_abs_path_real_path_and_base, __pyx_k_get_abs_path_real_path_and_base, sizeof(__pyx_k_get_abs_path_real_path_and_base), 0, 0, 1, 1}, + {&__pyx_n_s_get_file_type, __pyx_k_get_file_type, sizeof(__pyx_k_get_file_type), 0, 0, 1, 1}, + {&__pyx_n_s_get_global_debugger, __pyx_k_get_global_debugger, sizeof(__pyx_k_get_global_debugger), 0, 0, 1, 1}, + {&__pyx_n_s_has_plugin_line_breaks, __pyx_k_has_plugin_line_breaks, sizeof(__pyx_k_has_plugin_line_breaks), 0, 0, 1, 1}, + {&__pyx_kp_s_home_user_work_PyDev_Debugger, __pyx_k_home_user_work_PyDev_Debugger, sizeof(__pyx_k_home_user_work_PyDev_Debugger), 0, 0, 1, 0}, + {&__pyx_n_s_import, __pyx_k_import, sizeof(__pyx_k_import), 0, 0, 1, 1}, + {&__pyx_n_s_index, __pyx_k_index, sizeof(__pyx_k_index), 0, 0, 1, 1}, + {&__pyx_n_s_insert_code, __pyx_k_insert_code, sizeof(__pyx_k_insert_code), 0, 0, 1, 1}, + {&__pyx_n_s_is_tracing, __pyx_k_is_tracing, sizeof(__pyx_k_is_tracing), 0, 0, 1, 1}, + {&__pyx_n_s_is_use_code_extra, __pyx_k_is_use_code_extra, sizeof(__pyx_k_is_use_code_extra), 0, 0, 1, 1}, + {&__pyx_n_s_local, __pyx_k_local, sizeof(__pyx_k_local), 0, 0, 1, 1}, + {&__pyx_n_s_main, __pyx_k_main, sizeof(__pyx_k_main), 0, 0, 1, 1}, + {&__pyx_n_s_metaclass, __pyx_k_metaclass, sizeof(__pyx_k_metaclass), 0, 0, 1, 1}, + {&__pyx_n_s_module, __pyx_k_module, sizeof(__pyx_k_module), 0, 0, 1, 1}, + {&__pyx_n_s_new_value, __pyx_k_new_value, sizeof(__pyx_k_new_value), 0, 0, 1, 1}, + {&__pyx_n_s_plugin, __pyx_k_plugin, sizeof(__pyx_k_plugin), 0, 0, 1, 1}, + {&__pyx_n_s_prepare, __pyx_k_prepare, sizeof(__pyx_k_prepare), 0, 0, 1, 1}, + {&__pyx_n_s_pydev_do_not_trace, __pyx_k_pydev_do_not_trace, sizeof(__pyx_k_pydev_do_not_trace), 0, 0, 1, 1}, + {&__pyx_n_s_pydev_imps__pydev_saved_modules, __pyx_k_pydev_imps__pydev_saved_modules, sizeof(__pyx_k_pydev_imps__pydev_saved_modules), 0, 0, 1, 1}, + {&__pyx_n_s_pydev_trace_code_wrapper, __pyx_k_pydev_trace_code_wrapper, sizeof(__pyx_k_pydev_trace_code_wrapper), 0, 0, 1, 1}, + {&__pyx_kp_s_pydevd_additional_thread_info_re, __pyx_k_pydevd_additional_thread_info_re, sizeof(__pyx_k_pydevd_additional_thread_info_re), 0, 0, 1, 0}, + {&__pyx_n_s_pydevd_bundle_pydevd_additional, __pyx_k_pydevd_bundle_pydevd_additional, sizeof(__pyx_k_pydevd_bundle_pydevd_additional), 0, 0, 1, 1}, + {&__pyx_n_s_pydevd_bundle_pydevd_comm, __pyx_k_pydevd_bundle_pydevd_comm, sizeof(__pyx_k_pydevd_bundle_pydevd_comm), 0, 0, 1, 1}, + {&__pyx_n_s_pydevd_bundle_pydevd_dont_trace, __pyx_k_pydevd_bundle_pydevd_dont_trace, sizeof(__pyx_k_pydevd_bundle_pydevd_dont_trace), 0, 0, 1, 1}, + {&__pyx_n_s_pydevd_file_utils, __pyx_k_pydevd_file_utils, sizeof(__pyx_k_pydevd_file_utils), 0, 0, 1, 1}, + {&__pyx_n_s_pydevd_frame_eval_pydevd_frame, __pyx_k_pydevd_frame_eval_pydevd_frame, sizeof(__pyx_k_pydevd_frame_eval_pydevd_frame), 0, 0, 1, 1}, + {&__pyx_n_s_pydevd_frame_eval_pydevd_frame_2, __pyx_k_pydevd_frame_eval_pydevd_frame_2, sizeof(__pyx_k_pydevd_frame_eval_pydevd_frame_2), 0, 0, 1, 1}, + {&__pyx_n_s_pydevd_frame_eval_pydevd_modify, __pyx_k_pydevd_frame_eval_pydevd_modify, sizeof(__pyx_k_pydevd_frame_eval_pydevd_modify), 0, 0, 1, 1}, + {&__pyx_n_s_qualname, __pyx_k_qualname, sizeof(__pyx_k_qualname), 0, 0, 1, 1}, + {&__pyx_n_s_set_trace_for_frame_and_parents, __pyx_k_set_trace_for_frame_and_parents, sizeof(__pyx_k_set_trace_for_frame_and_parents), 0, 0, 1, 1}, + {&__pyx_n_s_set_trace_func, __pyx_k_set_trace_func, sizeof(__pyx_k_set_trace_func), 0, 0, 1, 1}, + {&__pyx_n_s_state, __pyx_k_state, sizeof(__pyx_k_state), 0, 0, 1, 1}, + {&__pyx_n_s_stop_frame_eval, __pyx_k_stop_frame_eval, sizeof(__pyx_k_stop_frame_eval), 0, 0, 1, 1}, + {&__pyx_n_s_test, __pyx_k_test, sizeof(__pyx_k_test), 0, 0, 1, 1}, + {&__pyx_n_s_threading, __pyx_k_threading, sizeof(__pyx_k_threading), 0, 0, 1, 1}, + {&__pyx_kp_s_threading_py, __pyx_k_threading_py, sizeof(__pyx_k_threading_py), 0, 0, 1, 0}, + {&__pyx_n_s_trace_dispatch, __pyx_k_trace_dispatch, sizeof(__pyx_k_trace_dispatch), 0, 0, 1, 1}, + {&__pyx_n_s_update_globals_dict, __pyx_k_update_globals_dict, sizeof(__pyx_k_update_globals_dict), 0, 0, 1, 1}, + {&__pyx_n_s_use_code_extra, __pyx_k_use_code_extra, sizeof(__pyx_k_use_code_extra), 0, 0, 1, 1}, + {&__pyx_kp_s_weakrefset_py, __pyx_k_weakrefset_py, sizeof(__pyx_k_weakrefset_py), 0, 0, 1, 0}, + {0, 0, 0, 0, 0, 0, 0} +}; +static int __Pyx_InitCachedBuiltins(void) { + __pyx_builtin_AttributeError = __Pyx_GetBuiltinName(__pyx_n_s_AttributeError); if (!__pyx_builtin_AttributeError) __PYX_ERR(0, 83, __pyx_L1_error) + return 0; + __pyx_L1_error:; + return -1; +} + +static int __Pyx_InitCachedConstants(void) { + __Pyx_RefNannyDeclarations + __Pyx_RefNannySetupContext("__Pyx_InitCachedConstants", 0); + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":68 + * for file in AVOID_RECURSION: + * # we can't call any other function without this check, because we can get stack overflow + * for path_separator in ('/', '\\'): # <<<<<<<<<<<<<< + * if filepath.endswith(path_separator + file): + * skip_file = True + */ + __pyx_tuple__3 = PyTuple_Pack(2, __pyx_kp_s_, __pyx_kp_s__2); if (unlikely(!__pyx_tuple__3)) __PYX_ERR(0, 68, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__3); + __Pyx_GIVEREF(__pyx_tuple__3); + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":28 + * + * + * def is_use_code_extra(): # <<<<<<<<<<<<<< + * return UseCodeExtraHolder.use_code_extra + * + */ + __pyx_codeobj__4 = (PyObject*)__Pyx_PyCode_New(0, 0, 0, 0, 0, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_home_user_work_PyDev_Debugger, __pyx_n_s_is_use_code_extra, 28, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__4)) __PYX_ERR(0, 28, __pyx_L1_error) + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":33 + * + * # enable using `co_extra` field in order to cache frames without breakpoints + * def enable_cache_frames_without_breaks(new_value): # <<<<<<<<<<<<<< + * UseCodeExtraHolder.use_code_extra = new_value + * + */ + __pyx_tuple__5 = PyTuple_Pack(1, __pyx_n_s_new_value); if (unlikely(!__pyx_tuple__5)) __PYX_ERR(0, 33, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__5); + __Pyx_GIVEREF(__pyx_tuple__5); + __pyx_codeobj__6 = (PyObject*)__Pyx_PyCode_New(1, 0, 1, 0, 0, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__5, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_home_user_work_PyDev_Debugger, __pyx_n_s_enable_cache_frames_without_brea, 33, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__6)) __PYX_ERR(0, 33, __pyx_L1_error) + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":151 + * return _PyEval_EvalFrameDefault(frame_obj, exc) + * + * def frame_eval_func(): # <<<<<<<<<<<<<< + * cdef PyThreadState *state = PyThreadState_Get() + * state.interp.eval_frame = get_bytecode_while_frame_eval + */ + __pyx_tuple__7 = PyTuple_Pack(1, __pyx_n_s_state); if (unlikely(!__pyx_tuple__7)) __PYX_ERR(0, 151, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__7); + __Pyx_GIVEREF(__pyx_tuple__7); + __pyx_codeobj__8 = (PyObject*)__Pyx_PyCode_New(0, 0, 1, 0, 0, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__7, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_home_user_work_PyDev_Debugger, __pyx_n_s_frame_eval_func, 151, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__8)) __PYX_ERR(0, 151, __pyx_L1_error) + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":157 + * dummy_tracing_holder.set_trace_func(dummy_trace_dispatch) + * + * def stop_frame_eval(): # <<<<<<<<<<<<<< + * cdef PyThreadState *state = PyThreadState_Get() + * state.interp.eval_frame = _PyEval_EvalFrameDefault + */ + __pyx_tuple__9 = PyTuple_Pack(1, __pyx_n_s_state); if (unlikely(!__pyx_tuple__9)) __PYX_ERR(0, 157, __pyx_L1_error) + __Pyx_GOTREF(__pyx_tuple__9); + __Pyx_GIVEREF(__pyx_tuple__9); + __pyx_codeobj__10 = (PyObject*)__Pyx_PyCode_New(0, 0, 1, 0, 0, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__9, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_home_user_work_PyDev_Debugger, __pyx_n_s_stop_frame_eval, 157, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__10)) __PYX_ERR(0, 157, __pyx_L1_error) + __Pyx_RefNannyFinishContext(); + return 0; + __pyx_L1_error:; + __Pyx_RefNannyFinishContext(); + return -1; +} + +static int __Pyx_InitGlobals(void) { + if (__Pyx_InitStrings(__pyx_string_tab) < 0) __PYX_ERR(0, 1, __pyx_L1_error); + __pyx_int_1 = PyInt_FromLong(1); if (unlikely(!__pyx_int_1)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_int_neg_1 = PyInt_FromLong(-1); if (unlikely(!__pyx_int_neg_1)) __PYX_ERR(0, 1, __pyx_L1_error) + return 0; + __pyx_L1_error:; + return -1; +} + +#if PY_MAJOR_VERSION < 3 +PyMODINIT_FUNC initpydevd_frame_evaluator(void); /*proto*/ +PyMODINIT_FUNC initpydevd_frame_evaluator(void) +#else +PyMODINIT_FUNC PyInit_pydevd_frame_evaluator(void); /*proto*/ +PyMODINIT_FUNC PyInit_pydevd_frame_evaluator(void) +#endif +{ + PyObject *__pyx_t_1 = NULL; + PyObject *__pyx_t_2 = NULL; + PyObject *__pyx_t_3 = NULL; + PyObject *__pyx_t_4 = NULL; + __Pyx_RefNannyDeclarations + #if CYTHON_REFNANNY + __Pyx_RefNanny = __Pyx_RefNannyImportAPI("refnanny"); + if (!__Pyx_RefNanny) { + PyErr_Clear(); + __Pyx_RefNanny = __Pyx_RefNannyImportAPI("Cython.Runtime.refnanny"); + if (!__Pyx_RefNanny) + Py_FatalError("failed to import 'refnanny' module"); + } + #endif + __Pyx_RefNannySetupContext("PyMODINIT_FUNC PyInit_pydevd_frame_evaluator(void)", 0); + if (__Pyx_check_binary_version() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_empty_tuple = PyTuple_New(0); if (unlikely(!__pyx_empty_tuple)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_empty_bytes = PyBytes_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_bytes)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_empty_unicode = PyUnicode_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_unicode)) __PYX_ERR(0, 1, __pyx_L1_error) + #ifdef __Pyx_CyFunction_USED + if (__pyx_CyFunction_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_FusedFunction_USED + if (__pyx_FusedFunction_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_Coroutine_USED + if (__pyx_Coroutine_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_Generator_USED + if (__pyx_Generator_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + #ifdef __Pyx_StopAsyncIteration_USED + if (__pyx_StopAsyncIteration_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + /*--- Library function declarations ---*/ + /*--- Threads initialization code ---*/ + #if defined(__PYX_FORCE_INIT_THREADS) && __PYX_FORCE_INIT_THREADS + #ifdef WITH_THREAD /* Python build with threading support? */ + PyEval_InitThreads(); + #endif + #endif + /*--- Module creation code ---*/ + #if PY_MAJOR_VERSION < 3 + __pyx_m = Py_InitModule4("pydevd_frame_evaluator", __pyx_methods, 0, 0, PYTHON_API_VERSION); Py_XINCREF(__pyx_m); + #else + __pyx_m = PyModule_Create(&__pyx_moduledef); + #endif + if (unlikely(!__pyx_m)) __PYX_ERR(0, 1, __pyx_L1_error) + __pyx_d = PyModule_GetDict(__pyx_m); if (unlikely(!__pyx_d)) __PYX_ERR(0, 1, __pyx_L1_error) + Py_INCREF(__pyx_d); + __pyx_b = PyImport_AddModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_b)) __PYX_ERR(0, 1, __pyx_L1_error) + #if CYTHON_COMPILING_IN_PYPY + Py_INCREF(__pyx_b); + #endif + if (PyObject_SetAttrString(__pyx_m, "__builtins__", __pyx_b) < 0) __PYX_ERR(0, 1, __pyx_L1_error); + /*--- Initialize various global constants etc. ---*/ + if (__Pyx_InitGlobals() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #if PY_MAJOR_VERSION < 3 && (__PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT) + if (__Pyx_init_sys_getdefaultencoding_params() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + if (__pyx_module_is_main__pydevd_frame_eval__pydevd_frame_evaluator) { + if (PyObject_SetAttrString(__pyx_m, "__name__", __pyx_n_s_main) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + } + #if PY_MAJOR_VERSION >= 3 + { + PyObject *modules = PyImport_GetModuleDict(); if (unlikely(!modules)) __PYX_ERR(0, 1, __pyx_L1_error) + if (!PyDict_GetItemString(modules, "_pydevd_frame_eval.pydevd_frame_evaluator")) { + if (unlikely(PyDict_SetItemString(modules, "_pydevd_frame_eval.pydevd_frame_evaluator", __pyx_m) < 0)) __PYX_ERR(0, 1, __pyx_L1_error) + } + } + #endif + /*--- Builtin init code ---*/ + if (__Pyx_InitCachedBuiltins() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + /*--- Constants init code ---*/ + if (__Pyx_InitCachedConstants() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + /*--- Global init code ---*/ + /*--- Variable export code ---*/ + /*--- Function export code ---*/ + /*--- Type init code ---*/ + /*--- Type import code ---*/ + /*--- Variable import code ---*/ + /*--- Function import code ---*/ + /*--- Execution code ---*/ + #if defined(__Pyx_Generator_USED) || defined(__Pyx_Coroutine_USED) + if (__Pyx_patch_abc() < 0) __PYX_ERR(0, 1, __pyx_L1_error) + #endif + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":1 + * import dis # <<<<<<<<<<<<<< + * from _pydev_imps._pydev_saved_modules import threading + * from _pydevd_bundle.pydevd_additional_thread_info import PyDBAdditionalThreadInfo + */ + __pyx_t_1 = __Pyx_Import(__pyx_n_s_dis, 0, -1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_dis, __pyx_t_1) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":2 + * import dis + * from _pydev_imps._pydev_saved_modules import threading # <<<<<<<<<<<<<< + * from _pydevd_bundle.pydevd_additional_thread_info import PyDBAdditionalThreadInfo + * from _pydevd_bundle.pydevd_comm import get_global_debugger + */ + __pyx_t_1 = PyList_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 2, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_n_s_threading); + __Pyx_GIVEREF(__pyx_n_s_threading); + PyList_SET_ITEM(__pyx_t_1, 0, __pyx_n_s_threading); + __pyx_t_2 = __Pyx_Import(__pyx_n_s_pydev_imps__pydev_saved_modules, __pyx_t_1, -1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 2, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_threading); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 2, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_threading, __pyx_t_1) < 0) __PYX_ERR(0, 2, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":3 + * import dis + * from _pydev_imps._pydev_saved_modules import threading + * from _pydevd_bundle.pydevd_additional_thread_info import PyDBAdditionalThreadInfo # <<<<<<<<<<<<<< + * from _pydevd_bundle.pydevd_comm import get_global_debugger + * from _pydevd_bundle.pydevd_dont_trace_files import DONT_TRACE + */ + __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 3, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_n_s_PyDBAdditionalThreadInfo); + __Pyx_GIVEREF(__pyx_n_s_PyDBAdditionalThreadInfo); + PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_PyDBAdditionalThreadInfo); + __pyx_t_1 = __Pyx_Import(__pyx_n_s_pydevd_bundle_pydevd_additional, __pyx_t_2, -1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 3, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_1, __pyx_n_s_PyDBAdditionalThreadInfo); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 3, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_PyDBAdditionalThreadInfo, __pyx_t_2) < 0) __PYX_ERR(0, 3, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":4 + * from _pydev_imps._pydev_saved_modules import threading + * from _pydevd_bundle.pydevd_additional_thread_info import PyDBAdditionalThreadInfo + * from _pydevd_bundle.pydevd_comm import get_global_debugger # <<<<<<<<<<<<<< + * from _pydevd_bundle.pydevd_dont_trace_files import DONT_TRACE + * from _pydevd_frame_eval.pydevd_frame_tracing import pydev_trace_code_wrapper, update_globals_dict, dummy_tracing_holder + */ + __pyx_t_1 = PyList_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 4, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_n_s_get_global_debugger); + __Pyx_GIVEREF(__pyx_n_s_get_global_debugger); + PyList_SET_ITEM(__pyx_t_1, 0, __pyx_n_s_get_global_debugger); + __pyx_t_2 = __Pyx_Import(__pyx_n_s_pydevd_bundle_pydevd_comm, __pyx_t_1, -1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 4, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_get_global_debugger); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 4, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_get_global_debugger, __pyx_t_1) < 0) __PYX_ERR(0, 4, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":5 + * from _pydevd_bundle.pydevd_additional_thread_info import PyDBAdditionalThreadInfo + * from _pydevd_bundle.pydevd_comm import get_global_debugger + * from _pydevd_bundle.pydevd_dont_trace_files import DONT_TRACE # <<<<<<<<<<<<<< + * from _pydevd_frame_eval.pydevd_frame_tracing import pydev_trace_code_wrapper, update_globals_dict, dummy_tracing_holder + * from _pydevd_frame_eval.pydevd_modify_bytecode import insert_code + */ + __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_n_s_DONT_TRACE); + __Pyx_GIVEREF(__pyx_n_s_DONT_TRACE); + PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_DONT_TRACE); + __pyx_t_1 = __Pyx_Import(__pyx_n_s_pydevd_bundle_pydevd_dont_trace, __pyx_t_2, -1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_1, __pyx_n_s_DONT_TRACE); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 5, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_DONT_TRACE, __pyx_t_2) < 0) __PYX_ERR(0, 5, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":6 + * from _pydevd_bundle.pydevd_comm import get_global_debugger + * from _pydevd_bundle.pydevd_dont_trace_files import DONT_TRACE + * from _pydevd_frame_eval.pydevd_frame_tracing import pydev_trace_code_wrapper, update_globals_dict, dummy_tracing_holder # <<<<<<<<<<<<<< + * from _pydevd_frame_eval.pydevd_modify_bytecode import insert_code + * from pydevd_file_utils import get_abs_path_real_path_and_base_from_frame, NORM_PATHS_AND_BASE_CONTAINER + */ + __pyx_t_1 = PyList_New(3); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_n_s_pydev_trace_code_wrapper); + __Pyx_GIVEREF(__pyx_n_s_pydev_trace_code_wrapper); + PyList_SET_ITEM(__pyx_t_1, 0, __pyx_n_s_pydev_trace_code_wrapper); + __Pyx_INCREF(__pyx_n_s_update_globals_dict); + __Pyx_GIVEREF(__pyx_n_s_update_globals_dict); + PyList_SET_ITEM(__pyx_t_1, 1, __pyx_n_s_update_globals_dict); + __Pyx_INCREF(__pyx_n_s_dummy_tracing_holder); + __Pyx_GIVEREF(__pyx_n_s_dummy_tracing_holder); + PyList_SET_ITEM(__pyx_t_1, 2, __pyx_n_s_dummy_tracing_holder); + __pyx_t_2 = __Pyx_Import(__pyx_n_s_pydevd_frame_eval_pydevd_frame, __pyx_t_1, -1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_pydev_trace_code_wrapper); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_pydev_trace_code_wrapper, __pyx_t_1) < 0) __PYX_ERR(0, 6, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_update_globals_dict); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_update_globals_dict, __pyx_t_1) < 0) __PYX_ERR(0, 6, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_dummy_tracing_holder); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 6, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_dummy_tracing_holder, __pyx_t_1) < 0) __PYX_ERR(0, 6, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":7 + * from _pydevd_bundle.pydevd_dont_trace_files import DONT_TRACE + * from _pydevd_frame_eval.pydevd_frame_tracing import pydev_trace_code_wrapper, update_globals_dict, dummy_tracing_holder + * from _pydevd_frame_eval.pydevd_modify_bytecode import insert_code # <<<<<<<<<<<<<< + * from pydevd_file_utils import get_abs_path_real_path_and_base_from_frame, NORM_PATHS_AND_BASE_CONTAINER + * + */ + __pyx_t_2 = PyList_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 7, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_n_s_insert_code); + __Pyx_GIVEREF(__pyx_n_s_insert_code); + PyList_SET_ITEM(__pyx_t_2, 0, __pyx_n_s_insert_code); + __pyx_t_1 = __Pyx_Import(__pyx_n_s_pydevd_frame_eval_pydevd_modify, __pyx_t_2, -1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 7, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __pyx_t_2 = __Pyx_ImportFrom(__pyx_t_1, __pyx_n_s_insert_code); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 7, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_insert_code, __pyx_t_2) < 0) __PYX_ERR(0, 7, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":8 + * from _pydevd_frame_eval.pydevd_frame_tracing import pydev_trace_code_wrapper, update_globals_dict, dummy_tracing_holder + * from _pydevd_frame_eval.pydevd_modify_bytecode import insert_code + * from pydevd_file_utils import get_abs_path_real_path_and_base_from_frame, NORM_PATHS_AND_BASE_CONTAINER # <<<<<<<<<<<<<< + * + * AVOID_RECURSION = [ + */ + __pyx_t_1 = PyList_New(2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 8, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_INCREF(__pyx_n_s_get_abs_path_real_path_and_base); + __Pyx_GIVEREF(__pyx_n_s_get_abs_path_real_path_and_base); + PyList_SET_ITEM(__pyx_t_1, 0, __pyx_n_s_get_abs_path_real_path_and_base); + __Pyx_INCREF(__pyx_n_s_NORM_PATHS_AND_BASE_CONTAINER); + __Pyx_GIVEREF(__pyx_n_s_NORM_PATHS_AND_BASE_CONTAINER); + PyList_SET_ITEM(__pyx_t_1, 1, __pyx_n_s_NORM_PATHS_AND_BASE_CONTAINER); + __pyx_t_2 = __Pyx_Import(__pyx_n_s_pydevd_file_utils, __pyx_t_1, -1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 8, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_get_abs_path_real_path_and_base); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 8, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_get_abs_path_real_path_and_base, __pyx_t_1) < 0) __PYX_ERR(0, 8, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __pyx_t_1 = __Pyx_ImportFrom(__pyx_t_2, __pyx_n_s_NORM_PATHS_AND_BASE_CONTAINER); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 8, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_NORM_PATHS_AND_BASE_CONTAINER, __pyx_t_1) < 0) __PYX_ERR(0, 8, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":10 + * from pydevd_file_utils import get_abs_path_real_path_and_base_from_frame, NORM_PATHS_AND_BASE_CONTAINER + * + * AVOID_RECURSION = [ # <<<<<<<<<<<<<< + * 'pydevd_additional_thread_info_regular.py', + * 'threading.py', + */ + __pyx_t_2 = PyList_New(3); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 10, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __Pyx_INCREF(__pyx_kp_s_pydevd_additional_thread_info_re); + __Pyx_GIVEREF(__pyx_kp_s_pydevd_additional_thread_info_re); + PyList_SET_ITEM(__pyx_t_2, 0, __pyx_kp_s_pydevd_additional_thread_info_re); + __Pyx_INCREF(__pyx_kp_s_threading_py); + __Pyx_GIVEREF(__pyx_kp_s_threading_py); + PyList_SET_ITEM(__pyx_t_2, 1, __pyx_kp_s_threading_py); + __Pyx_INCREF(__pyx_kp_s_weakrefset_py); + __Pyx_GIVEREF(__pyx_kp_s_weakrefset_py); + PyList_SET_ITEM(__pyx_t_2, 2, __pyx_kp_s_weakrefset_py); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_AVOID_RECURSION, __pyx_t_2) < 0) __PYX_ERR(0, 10, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":16 + * ] + * + * get_file_type = DONT_TRACE.get # <<<<<<<<<<<<<< + * NO_BREAKS_IN_FRAME = 1 + * + */ + __pyx_t_2 = __Pyx_GetModuleGlobalName(__pyx_n_s_DONT_TRACE); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 16, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_get); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 16, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + if (PyDict_SetItem(__pyx_d, __pyx_n_s_get_file_type, __pyx_t_1) < 0) __PYX_ERR(0, 16, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":17 + * + * get_file_type = DONT_TRACE.get + * NO_BREAKS_IN_FRAME = 1 # <<<<<<<<<<<<<< + * + * + */ + if (PyDict_SetItem(__pyx_d, __pyx_n_s_NO_BREAKS_IN_FRAME, __pyx_int_1) < 0) __PYX_ERR(0, 17, __pyx_L1_error) + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":20 + * + * + * class UseCodeExtraHolder: # <<<<<<<<<<<<<< + * # Use this flag in order to disable co_extra field + * use_code_extra = True + */ + __pyx_t_1 = __Pyx_Py3MetaclassPrepare((PyObject *) NULL, __pyx_empty_tuple, __pyx_n_s_UseCodeExtraHolder, __pyx_n_s_UseCodeExtraHolder, (PyObject *) NULL, __pyx_n_s_pydevd_frame_eval_pydevd_frame_2, (PyObject *) NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 20, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":22 + * class UseCodeExtraHolder: + * # Use this flag in order to disable co_extra field + * use_code_extra = True # <<<<<<<<<<<<<< + * # Keep the index of co_extra in a thread-local storage + * local = threading.local() + */ + if (PyObject_SetItem(__pyx_t_1, __pyx_n_s_use_code_extra, Py_True) < 0) __PYX_ERR(0, 22, __pyx_L1_error) + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":24 + * use_code_extra = True + * # Keep the index of co_extra in a thread-local storage + * local = threading.local() # <<<<<<<<<<<<<< + * local.index = -1 + * + */ + __pyx_t_3 = __Pyx_GetModuleGlobalName(__pyx_n_s_threading); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 24, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_3); + __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_local); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 24, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_4); + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + __pyx_t_3 = NULL; + if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_4))) { + __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_4); + if (likely(__pyx_t_3)) { + PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4); + __Pyx_INCREF(__pyx_t_3); + __Pyx_INCREF(function); + __Pyx_DECREF_SET(__pyx_t_4, function); + } + } + if (__pyx_t_3) { + __pyx_t_2 = __Pyx_PyObject_CallOneArg(__pyx_t_4, __pyx_t_3); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 24, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; + } else { + __pyx_t_2 = __Pyx_PyObject_CallNoArg(__pyx_t_4); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 24, __pyx_L1_error) + } + __Pyx_GOTREF(__pyx_t_2); + __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; + if (PyObject_SetItem(__pyx_t_1, __pyx_n_s_local, __pyx_t_2) < 0) __PYX_ERR(0, 24, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":25 + * # Keep the index of co_extra in a thread-local storage + * local = threading.local() + * local.index = -1 # <<<<<<<<<<<<<< + * + * + */ + __pyx_t_2 = PyObject_GetItem(__pyx_t_1, __pyx_n_s_local); + if (unlikely(!__pyx_t_2)) { + PyErr_Clear(); + __pyx_t_2 = __Pyx_GetModuleGlobalName(__pyx_n_s_local); + } + if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 25, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (__Pyx_PyObject_SetAttrStr(__pyx_t_2, __pyx_n_s_index, __pyx_int_neg_1) < 0) __PYX_ERR(0, 25, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":20 + * + * + * class UseCodeExtraHolder: # <<<<<<<<<<<<<< + * # Use this flag in order to disable co_extra field + * use_code_extra = True + */ + __pyx_t_2 = __Pyx_Py3ClassCreate(((PyObject*)&__Pyx_DefaultClassType), __pyx_n_s_UseCodeExtraHolder, __pyx_empty_tuple, __pyx_t_1, NULL, 0, 1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 20, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_2); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_UseCodeExtraHolder, __pyx_t_2) < 0) __PYX_ERR(0, 20, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":28 + * + * + * def is_use_code_extra(): # <<<<<<<<<<<<<< + * return UseCodeExtraHolder.use_code_extra + * + */ + __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_18_pydevd_frame_eval_22pydevd_frame_evaluator_1is_use_code_extra, NULL, __pyx_n_s_pydevd_frame_eval_pydevd_frame_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 28, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_is_use_code_extra, __pyx_t_1) < 0) __PYX_ERR(0, 28, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":33 + * + * # enable using `co_extra` field in order to cache frames without breakpoints + * def enable_cache_frames_without_breaks(new_value): # <<<<<<<<<<<<<< + * UseCodeExtraHolder.use_code_extra = new_value + * + */ + __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_18_pydevd_frame_eval_22pydevd_frame_evaluator_3enable_cache_frames_without_breaks, NULL, __pyx_n_s_pydevd_frame_eval_pydevd_frame_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 33, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_enable_cache_frames_without_brea, __pyx_t_1) < 0) __PYX_ERR(0, 33, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":151 + * return _PyEval_EvalFrameDefault(frame_obj, exc) + * + * def frame_eval_func(): # <<<<<<<<<<<<<< + * cdef PyThreadState *state = PyThreadState_Get() + * state.interp.eval_frame = get_bytecode_while_frame_eval + */ + __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_18_pydevd_frame_eval_22pydevd_frame_evaluator_7frame_eval_func, NULL, __pyx_n_s_pydevd_frame_eval_pydevd_frame_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 151, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_frame_eval_func, __pyx_t_1) < 0) __PYX_ERR(0, 151, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":157 + * dummy_tracing_holder.set_trace_func(dummy_trace_dispatch) + * + * def stop_frame_eval(): # <<<<<<<<<<<<<< + * cdef PyThreadState *state = PyThreadState_Get() + * state.interp.eval_frame = _PyEval_EvalFrameDefault + */ + __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_18_pydevd_frame_eval_22pydevd_frame_evaluator_9stop_frame_eval, NULL, __pyx_n_s_pydevd_frame_eval_pydevd_frame_2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 157, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_stop_frame_eval, __pyx_t_1) < 0) __PYX_ERR(0, 157, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /* "_pydevd_frame_eval/pydevd_frame_evaluator.pyx":1 + * import dis # <<<<<<<<<<<<<< + * from _pydev_imps._pydev_saved_modules import threading + * from _pydevd_bundle.pydevd_additional_thread_info import PyDBAdditionalThreadInfo + */ + __pyx_t_1 = PyDict_New(); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1, __pyx_L1_error) + __Pyx_GOTREF(__pyx_t_1); + if (PyDict_SetItem(__pyx_d, __pyx_n_s_test, __pyx_t_1) < 0) __PYX_ERR(0, 1, __pyx_L1_error) + __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; + + /*--- Wrapped vars code ---*/ + + goto __pyx_L0; + __pyx_L1_error:; + __Pyx_XDECREF(__pyx_t_1); + __Pyx_XDECREF(__pyx_t_2); + __Pyx_XDECREF(__pyx_t_3); + __Pyx_XDECREF(__pyx_t_4); + if (__pyx_m) { + if (__pyx_d) { + __Pyx_AddTraceback("init _pydevd_frame_eval.pydevd_frame_evaluator", __pyx_clineno, __pyx_lineno, __pyx_filename); + } + Py_DECREF(__pyx_m); __pyx_m = 0; + } else if (!PyErr_Occurred()) { + PyErr_SetString(PyExc_ImportError, "init _pydevd_frame_eval.pydevd_frame_evaluator"); + } + __pyx_L0:; + __Pyx_RefNannyFinishContext(); + #if PY_MAJOR_VERSION < 3 + return; + #else + return __pyx_m; + #endif +} + +/* --- Runtime support code --- */ +/* Refnanny */ +#if CYTHON_REFNANNY +static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname) { + PyObject *m = NULL, *p = NULL; + void *r = NULL; + m = PyImport_ImportModule((char *)modname); + if (!m) goto end; + p = PyObject_GetAttrString(m, (char *)"RefNannyAPI"); + if (!p) goto end; + r = PyLong_AsVoidPtr(p); +end: + Py_XDECREF(p); + Py_XDECREF(m); + return (__Pyx_RefNannyAPIStruct *)r; +} +#endif + +/* GetBuiltinName */ +static PyObject *__Pyx_GetBuiltinName(PyObject *name) { + PyObject* result = __Pyx_PyObject_GetAttrStr(__pyx_b, name); + if (unlikely(!result)) { + PyErr_Format(PyExc_NameError, +#if PY_MAJOR_VERSION >= 3 + "name '%U' is not defined", name); +#else + "name '%.200s' is not defined", PyString_AS_STRING(name)); +#endif + } + return result; +} + +/* GetModuleGlobalName */ +static CYTHON_INLINE PyObject *__Pyx_GetModuleGlobalName(PyObject *name) { + PyObject *result; +#if !CYTHON_AVOID_BORROWED_REFS + result = PyDict_GetItem(__pyx_d, name); + if (likely(result)) { + Py_INCREF(result); + } else { +#else + result = PyObject_GetItem(__pyx_d, name); + if (!result) { + PyErr_Clear(); +#endif + result = __Pyx_GetBuiltinName(name); + } + return result; +} + +/* RaiseArgTupleInvalid */ + static void __Pyx_RaiseArgtupleInvalid( + const char* func_name, + int exact, + Py_ssize_t num_min, + Py_ssize_t num_max, + Py_ssize_t num_found) +{ + Py_ssize_t num_expected; + const char *more_or_less; + if (num_found < num_min) { + num_expected = num_min; + more_or_less = "at least"; + } else { + num_expected = num_max; + more_or_less = "at most"; + } + if (exact) { + more_or_less = "exactly"; + } + PyErr_Format(PyExc_TypeError, + "%.200s() takes %.8s %" CYTHON_FORMAT_SSIZE_T "d positional argument%.1s (%" CYTHON_FORMAT_SSIZE_T "d given)", + func_name, more_or_less, num_expected, + (num_expected == 1) ? "" : "s", num_found); +} + +/* RaiseDoubleKeywords */ + static void __Pyx_RaiseDoubleKeywordsError( + const char* func_name, + PyObject* kw_name) +{ + PyErr_Format(PyExc_TypeError, + #if PY_MAJOR_VERSION >= 3 + "%s() got multiple values for keyword argument '%U'", func_name, kw_name); + #else + "%s() got multiple values for keyword argument '%s'", func_name, + PyString_AsString(kw_name)); + #endif +} + +/* ParseKeywords */ + static int __Pyx_ParseOptionalKeywords( + PyObject *kwds, + PyObject **argnames[], + PyObject *kwds2, + PyObject *values[], + Py_ssize_t num_pos_args, + const char* function_name) +{ + PyObject *key = 0, *value = 0; + Py_ssize_t pos = 0; + PyObject*** name; + PyObject*** first_kw_arg = argnames + num_pos_args; + while (PyDict_Next(kwds, &pos, &key, &value)) { + name = first_kw_arg; + while (*name && (**name != key)) name++; + if (*name) { + values[name-argnames] = value; + continue; + } + name = first_kw_arg; + #if PY_MAJOR_VERSION < 3 + if (likely(PyString_CheckExact(key)) || likely(PyString_Check(key))) { + while (*name) { + if ((CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**name) == PyString_GET_SIZE(key)) + && _PyString_Eq(**name, key)) { + values[name-argnames] = value; + break; + } + name++; + } + if (*name) continue; + else { + PyObject*** argname = argnames; + while (argname != first_kw_arg) { + if ((**argname == key) || ( + (CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**argname) == PyString_GET_SIZE(key)) + && _PyString_Eq(**argname, key))) { + goto arg_passed_twice; + } + argname++; + } + } + } else + #endif + if (likely(PyUnicode_Check(key))) { + while (*name) { + int cmp = (**name == key) ? 0 : + #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 + (PyUnicode_GET_SIZE(**name) != PyUnicode_GET_SIZE(key)) ? 1 : + #endif + PyUnicode_Compare(**name, key); + if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; + if (cmp == 0) { + values[name-argnames] = value; + break; + } + name++; + } + if (*name) continue; + else { + PyObject*** argname = argnames; + while (argname != first_kw_arg) { + int cmp = (**argname == key) ? 0 : + #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3 + (PyUnicode_GET_SIZE(**argname) != PyUnicode_GET_SIZE(key)) ? 1 : + #endif + PyUnicode_Compare(**argname, key); + if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad; + if (cmp == 0) goto arg_passed_twice; + argname++; + } + } + } else + goto invalid_keyword_type; + if (kwds2) { + if (unlikely(PyDict_SetItem(kwds2, key, value))) goto bad; + } else { + goto invalid_keyword; + } + } + return 0; +arg_passed_twice: + __Pyx_RaiseDoubleKeywordsError(function_name, key); + goto bad; +invalid_keyword_type: + PyErr_Format(PyExc_TypeError, + "%.200s() keywords must be strings", function_name); + goto bad; +invalid_keyword: + PyErr_Format(PyExc_TypeError, + #if PY_MAJOR_VERSION < 3 + "%.200s() got an unexpected keyword argument '%.200s'", + function_name, PyString_AsString(key)); + #else + "%s() got an unexpected keyword argument '%U'", + function_name, key); + #endif +bad: + return -1; +} + +/* ArgTypeTest */ + static void __Pyx_RaiseArgumentTypeInvalid(const char* name, PyObject *obj, PyTypeObject *type) { + PyErr_Format(PyExc_TypeError, + "Argument '%.200s' has incorrect type (expected %.200s, got %.200s)", + name, type->tp_name, Py_TYPE(obj)->tp_name); +} +static CYTHON_INLINE int __Pyx_ArgTypeTest(PyObject *obj, PyTypeObject *type, int none_allowed, + const char *name, int exact) +{ + if (unlikely(!type)) { + PyErr_SetString(PyExc_SystemError, "Missing type object"); + return 0; + } + if (none_allowed && obj == Py_None) return 1; + else if (exact) { + if (likely(Py_TYPE(obj) == type)) return 1; + #if PY_MAJOR_VERSION == 2 + else if ((type == &PyBaseString_Type) && likely(__Pyx_PyBaseString_CheckExact(obj))) return 1; + #endif + } + else { + if (likely(PyObject_TypeCheck(obj, type))) return 1; + } + __Pyx_RaiseArgumentTypeInvalid(name, obj, type); + return 0; +} + +/* PyCFunctionFastCall */ + #if CYTHON_FAST_PYCCALL +static CYTHON_INLINE PyObject * __Pyx_PyCFunction_FastCall(PyObject *func_obj, PyObject **args, Py_ssize_t nargs) { + PyCFunctionObject *func = (PyCFunctionObject*)func_obj; + PyCFunction meth = PyCFunction_GET_FUNCTION(func); + PyObject *self = PyCFunction_GET_SELF(func); + assert(PyCFunction_Check(func)); + assert(METH_FASTCALL == (PyCFunction_GET_FLAGS(func) & ~(METH_CLASS | METH_STATIC | METH_COEXIST))); + assert(nargs >= 0); + assert(nargs == 0 || args != NULL); + /* _PyCFunction_FastCallDict() must not be called with an exception set, + because it may clear it (directly or indirectly) and so the + caller loses its exception */ + assert(!PyErr_Occurred()); + return (*((__Pyx_PyCFunctionFast)meth)) (self, args, nargs, NULL); +} +#endif // CYTHON_FAST_PYCCALL + +/* PyFunctionFastCall */ + #if CYTHON_FAST_PYCALL +#include "frameobject.h" +static PyObject* __Pyx_PyFunction_FastCallNoKw(PyCodeObject *co, PyObject **args, Py_ssize_t na, + PyObject *globals) { + PyFrameObject *f; + PyThreadState *tstate = PyThreadState_GET(); + PyObject **fastlocals; + Py_ssize_t i; + PyObject *result; + assert(globals != NULL); + /* XXX Perhaps we should create a specialized + PyFrame_New() that doesn't take locals, but does + take builtins without sanity checking them. + */ + assert(tstate != NULL); + f = PyFrame_New(tstate, co, globals, NULL); + if (f == NULL) { + return NULL; + } + fastlocals = f->f_localsplus; + for (i = 0; i < na; i++) { + Py_INCREF(*args); + fastlocals[i] = *args++; + } + result = PyEval_EvalFrameEx(f,0); + ++tstate->recursion_depth; + Py_DECREF(f); + --tstate->recursion_depth; + return result; +} +#if 1 || PY_VERSION_HEX < 0x030600B1 +static PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, int nargs, PyObject *kwargs) { + PyCodeObject *co = (PyCodeObject *)PyFunction_GET_CODE(func); + PyObject *globals = PyFunction_GET_GLOBALS(func); + PyObject *argdefs = PyFunction_GET_DEFAULTS(func); + PyObject *closure; +#if PY_MAJOR_VERSION >= 3 + PyObject *kwdefs; +#endif + PyObject *kwtuple, **k; + PyObject **d; + Py_ssize_t nd; + Py_ssize_t nk; + PyObject *result; + assert(kwargs == NULL || PyDict_Check(kwargs)); + nk = kwargs ? PyDict_Size(kwargs) : 0; + if (Py_EnterRecursiveCall((char*)" while calling a Python object")) { + return NULL; + } + if ( +#if PY_MAJOR_VERSION >= 3 + co->co_kwonlyargcount == 0 && +#endif + likely(kwargs == NULL || nk == 0) && + co->co_flags == (CO_OPTIMIZED | CO_NEWLOCALS | CO_NOFREE)) { + if (argdefs == NULL && co->co_argcount == nargs) { + result = __Pyx_PyFunction_FastCallNoKw(co, args, nargs, globals); + goto done; + } + else if (nargs == 0 && argdefs != NULL + && co->co_argcount == Py_SIZE(argdefs)) { + /* function called with no arguments, but all parameters have + a default value: use default values as arguments .*/ + args = &PyTuple_GET_ITEM(argdefs, 0); + result =__Pyx_PyFunction_FastCallNoKw(co, args, Py_SIZE(argdefs), globals); + goto done; + } + } + if (kwargs != NULL) { + Py_ssize_t pos, i; + kwtuple = PyTuple_New(2 * nk); + if (kwtuple == NULL) { + result = NULL; + goto done; + } + k = &PyTuple_GET_ITEM(kwtuple, 0); + pos = i = 0; + while (PyDict_Next(kwargs, &pos, &k[i], &k[i+1])) { + Py_INCREF(k[i]); + Py_INCREF(k[i+1]); + i += 2; + } + nk = i / 2; + } + else { + kwtuple = NULL; + k = NULL; + } + closure = PyFunction_GET_CLOSURE(func); +#if PY_MAJOR_VERSION >= 3 + kwdefs = PyFunction_GET_KW_DEFAULTS(func); +#endif + if (argdefs != NULL) { + d = &PyTuple_GET_ITEM(argdefs, 0); + nd = Py_SIZE(argdefs); + } + else { + d = NULL; + nd = 0; + } +#if PY_MAJOR_VERSION >= 3 + result = PyEval_EvalCodeEx((PyObject*)co, globals, (PyObject *)NULL, + args, nargs, + k, (int)nk, + d, (int)nd, kwdefs, closure); +#else + result = PyEval_EvalCodeEx(co, globals, (PyObject *)NULL, + args, nargs, + k, (int)nk, + d, (int)nd, closure); +#endif + Py_XDECREF(kwtuple); +done: + Py_LeaveRecursiveCall(); + return result; +} +#endif // CPython < 3.6 +#endif // CYTHON_FAST_PYCALL + +/* PyObjectCall */ + #if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw) { + PyObject *result; + ternaryfunc call = func->ob_type->tp_call; + if (unlikely(!call)) + return PyObject_Call(func, arg, kw); + if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) + return NULL; + result = (*call)(func, arg, kw); + Py_LeaveRecursiveCall(); + if (unlikely(!result) && unlikely(!PyErr_Occurred())) { + PyErr_SetString( + PyExc_SystemError, + "NULL result without error in PyObject_Call"); + } + return result; +} +#endif + +/* PyObjectCallMethO */ + #if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg) { + PyObject *self, *result; + PyCFunction cfunc; + cfunc = PyCFunction_GET_FUNCTION(func); + self = PyCFunction_GET_SELF(func); + if (unlikely(Py_EnterRecursiveCall((char*)" while calling a Python object"))) + return NULL; + result = cfunc(self, arg); + Py_LeaveRecursiveCall(); + if (unlikely(!result) && unlikely(!PyErr_Occurred())) { + PyErr_SetString( + PyExc_SystemError, + "NULL result without error in PyObject_Call"); + } + return result; +} +#endif + +/* PyObjectCallOneArg */ + #if CYTHON_COMPILING_IN_CPYTHON +static PyObject* __Pyx__PyObject_CallOneArg(PyObject *func, PyObject *arg) { + PyObject *result; + PyObject *args = PyTuple_New(1); + if (unlikely(!args)) return NULL; + Py_INCREF(arg); + PyTuple_SET_ITEM(args, 0, arg); + result = __Pyx_PyObject_Call(func, args, NULL); + Py_DECREF(args); + return result; +} +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg) { +#if CYTHON_FAST_PYCALL + if (PyFunction_Check(func)) { + return __Pyx_PyFunction_FastCall(func, &arg, 1); + } +#endif +#ifdef __Pyx_CyFunction_USED + if (likely(PyCFunction_Check(func) || PyObject_TypeCheck(func, __pyx_CyFunctionType))) { +#else + if (likely(PyCFunction_Check(func))) { +#endif + if (likely(PyCFunction_GET_FLAGS(func) & METH_O)) { + return __Pyx_PyObject_CallMethO(func, arg); +#if CYTHON_FAST_PYCCALL + } else if (PyCFunction_GET_FLAGS(func) & METH_FASTCALL) { + return __Pyx_PyCFunction_FastCall(func, &arg, 1); +#endif + } + } + return __Pyx__PyObject_CallOneArg(func, arg); +} +#else +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg) { + PyObject *result; + PyObject *args = PyTuple_Pack(1, arg); + if (unlikely(!args)) return NULL; + result = __Pyx_PyObject_Call(func, args, NULL); + Py_DECREF(args); + return result; +} +#endif + +/* PyObjectCallNoArg */ + #if CYTHON_COMPILING_IN_CPYTHON +static CYTHON_INLINE PyObject* __Pyx_PyObject_CallNoArg(PyObject *func) { +#if CYTHON_FAST_PYCALL + if (PyFunction_Check(func)) { + return __Pyx_PyFunction_FastCall(func, NULL, 0); + } +#endif +#ifdef __Pyx_CyFunction_USED + if (likely(PyCFunction_Check(func) || PyObject_TypeCheck(func, __pyx_CyFunctionType))) { +#else + if (likely(PyCFunction_Check(func))) { +#endif + if (likely(PyCFunction_GET_FLAGS(func) & METH_NOARGS)) { + return __Pyx_PyObject_CallMethO(func, NULL); + } + } + return __Pyx_PyObject_Call(func, __pyx_empty_tuple, NULL); +} +#endif + +/* SaveResetException */ + #if CYTHON_FAST_THREAD_STATE +static CYTHON_INLINE void __Pyx__ExceptionSave(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { + *type = tstate->exc_type; + *value = tstate->exc_value; + *tb = tstate->exc_traceback; + Py_XINCREF(*type); + Py_XINCREF(*value); + Py_XINCREF(*tb); +} +static CYTHON_INLINE void __Pyx__ExceptionReset(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) { + PyObject *tmp_type, *tmp_value, *tmp_tb; + tmp_type = tstate->exc_type; + tmp_value = tstate->exc_value; + tmp_tb = tstate->exc_traceback; + tstate->exc_type = type; + tstate->exc_value = value; + tstate->exc_traceback = tb; + Py_XDECREF(tmp_type); + Py_XDECREF(tmp_value); + Py_XDECREF(tmp_tb); +} +#endif + +/* PyErrFetchRestore */ + #if CYTHON_FAST_THREAD_STATE +static CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) { + PyObject *tmp_type, *tmp_value, *tmp_tb; + tmp_type = tstate->curexc_type; + tmp_value = tstate->curexc_value; + tmp_tb = tstate->curexc_traceback; + tstate->curexc_type = type; + tstate->curexc_value = value; + tstate->curexc_traceback = tb; + Py_XDECREF(tmp_type); + Py_XDECREF(tmp_value); + Py_XDECREF(tmp_tb); +} +static CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { + *type = tstate->curexc_type; + *value = tstate->curexc_value; + *tb = tstate->curexc_traceback; + tstate->curexc_type = 0; + tstate->curexc_value = 0; + tstate->curexc_traceback = 0; +} +#endif + +/* bytes_tailmatch */ + static int __Pyx_PyBytes_SingleTailmatch(PyObject* self, PyObject* arg, + Py_ssize_t start, Py_ssize_t end, int direction) { + const char* self_ptr = PyBytes_AS_STRING(self); + Py_ssize_t self_len = PyBytes_GET_SIZE(self); + const char* sub_ptr; + Py_ssize_t sub_len; + int retval; + Py_buffer view; + view.obj = NULL; + if ( PyBytes_Check(arg) ) { + sub_ptr = PyBytes_AS_STRING(arg); + sub_len = PyBytes_GET_SIZE(arg); + } +#if PY_MAJOR_VERSION < 3 + else if ( PyUnicode_Check(arg) ) { + return (int) PyUnicode_Tailmatch(self, arg, start, end, direction); + } +#endif + else { + if (unlikely(PyObject_GetBuffer(self, &view, PyBUF_SIMPLE) == -1)) + return -1; + sub_ptr = (const char*) view.buf; + sub_len = view.len; + } + if (end > self_len) + end = self_len; + else if (end < 0) + end += self_len; + if (end < 0) + end = 0; + if (start < 0) + start += self_len; + if (start < 0) + start = 0; + if (direction > 0) { + if (end-sub_len > start) + start = end - sub_len; + } + if (start + sub_len <= end) + retval = !memcmp(self_ptr+start, sub_ptr, (size_t)sub_len); + else + retval = 0; + if (view.obj) + PyBuffer_Release(&view); + return retval; +} +static int __Pyx_PyBytes_Tailmatch(PyObject* self, PyObject* substr, + Py_ssize_t start, Py_ssize_t end, int direction) { + if (unlikely(PyTuple_Check(substr))) { + Py_ssize_t i, count = PyTuple_GET_SIZE(substr); + for (i = 0; i < count; i++) { + int result; +#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + result = __Pyx_PyBytes_SingleTailmatch(self, PyTuple_GET_ITEM(substr, i), + start, end, direction); +#else + PyObject* sub = PySequence_ITEM(substr, i); + if (unlikely(!sub)) return -1; + result = __Pyx_PyBytes_SingleTailmatch(self, sub, start, end, direction); + Py_DECREF(sub); +#endif + if (result) { + return result; + } + } + return 0; + } + return __Pyx_PyBytes_SingleTailmatch(self, substr, start, end, direction); +} + +/* unicode_tailmatch */ + static int __Pyx_PyUnicode_Tailmatch(PyObject* s, PyObject* substr, + Py_ssize_t start, Py_ssize_t end, int direction) { + if (unlikely(PyTuple_Check(substr))) { + Py_ssize_t i, count = PyTuple_GET_SIZE(substr); + for (i = 0; i < count; i++) { + Py_ssize_t result; +#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + result = PyUnicode_Tailmatch(s, PyTuple_GET_ITEM(substr, i), + start, end, direction); +#else + PyObject* sub = PySequence_ITEM(substr, i); + if (unlikely(!sub)) return -1; + result = PyUnicode_Tailmatch(s, sub, start, end, direction); + Py_DECREF(sub); +#endif + if (result) { + return (int) result; + } + } + return 0; + } + return (int) PyUnicode_Tailmatch(s, substr, start, end, direction); +} + +/* str_tailmatch */ + static CYTHON_INLINE int __Pyx_PyStr_Tailmatch(PyObject* self, PyObject* arg, Py_ssize_t start, + Py_ssize_t end, int direction) +{ + if (PY_MAJOR_VERSION < 3) + return __Pyx_PyBytes_Tailmatch(self, arg, start, end, direction); + else + return __Pyx_PyUnicode_Tailmatch(self, arg, start, end, direction); +} + +/* GetException */ + #if CYTHON_FAST_THREAD_STATE +static int __Pyx__GetException(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) { +#else +static int __Pyx_GetException(PyObject **type, PyObject **value, PyObject **tb) { +#endif + PyObject *local_type, *local_value, *local_tb; +#if CYTHON_FAST_THREAD_STATE + PyObject *tmp_type, *tmp_value, *tmp_tb; + local_type = tstate->curexc_type; + local_value = tstate->curexc_value; + local_tb = tstate->curexc_traceback; + tstate->curexc_type = 0; + tstate->curexc_value = 0; + tstate->curexc_traceback = 0; +#else + PyErr_Fetch(&local_type, &local_value, &local_tb); +#endif + PyErr_NormalizeException(&local_type, &local_value, &local_tb); +#if CYTHON_FAST_THREAD_STATE + if (unlikely(tstate->curexc_type)) +#else + if (unlikely(PyErr_Occurred())) +#endif + goto bad; + #if PY_MAJOR_VERSION >= 3 + if (local_tb) { + if (unlikely(PyException_SetTraceback(local_value, local_tb) < 0)) + goto bad; + } + #endif + Py_XINCREF(local_tb); + Py_XINCREF(local_type); + Py_XINCREF(local_value); + *type = local_type; + *value = local_value; + *tb = local_tb; +#if CYTHON_FAST_THREAD_STATE + tmp_type = tstate->exc_type; + tmp_value = tstate->exc_value; + tmp_tb = tstate->exc_traceback; + tstate->exc_type = local_type; + tstate->exc_value = local_value; + tstate->exc_traceback = local_tb; + Py_XDECREF(tmp_type); + Py_XDECREF(tmp_value); + Py_XDECREF(tmp_tb); +#else + PyErr_SetExcInfo(local_type, local_value, local_tb); +#endif + return 0; +bad: + *type = 0; + *value = 0; + *tb = 0; + Py_XDECREF(local_type); + Py_XDECREF(local_value); + Py_XDECREF(local_tb); + return -1; +} + +/* None */ + static CYTHON_INLINE void __Pyx_RaiseUnboundLocalError(const char *varname) { + PyErr_Format(PyExc_UnboundLocalError, "local variable '%s' referenced before assignment", varname); +} + +/* RaiseException */ + #if PY_MAJOR_VERSION < 3 +static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, + CYTHON_UNUSED PyObject *cause) { + __Pyx_PyThreadState_declare + Py_XINCREF(type); + if (!value || value == Py_None) + value = NULL; + else + Py_INCREF(value); + if (!tb || tb == Py_None) + tb = NULL; + else { + Py_INCREF(tb); + if (!PyTraceBack_Check(tb)) { + PyErr_SetString(PyExc_TypeError, + "raise: arg 3 must be a traceback or None"); + goto raise_error; + } + } + if (PyType_Check(type)) { +#if CYTHON_COMPILING_IN_PYPY + if (!value) { + Py_INCREF(Py_None); + value = Py_None; + } +#endif + PyErr_NormalizeException(&type, &value, &tb); + } else { + if (value) { + PyErr_SetString(PyExc_TypeError, + "instance exception may not have a separate value"); + goto raise_error; + } + value = type; + type = (PyObject*) Py_TYPE(type); + Py_INCREF(type); + if (!PyType_IsSubtype((PyTypeObject *)type, (PyTypeObject *)PyExc_BaseException)) { + PyErr_SetString(PyExc_TypeError, + "raise: exception class must be a subclass of BaseException"); + goto raise_error; + } + } + __Pyx_PyThreadState_assign + __Pyx_ErrRestore(type, value, tb); + return; +raise_error: + Py_XDECREF(value); + Py_XDECREF(type); + Py_XDECREF(tb); + return; +} +#else +static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause) { + PyObject* owned_instance = NULL; + if (tb == Py_None) { + tb = 0; + } else if (tb && !PyTraceBack_Check(tb)) { + PyErr_SetString(PyExc_TypeError, + "raise: arg 3 must be a traceback or None"); + goto bad; + } + if (value == Py_None) + value = 0; + if (PyExceptionInstance_Check(type)) { + if (value) { + PyErr_SetString(PyExc_TypeError, + "instance exception may not have a separate value"); + goto bad; + } + value = type; + type = (PyObject*) Py_TYPE(value); + } else if (PyExceptionClass_Check(type)) { + PyObject *instance_class = NULL; + if (value && PyExceptionInstance_Check(value)) { + instance_class = (PyObject*) Py_TYPE(value); + if (instance_class != type) { + int is_subclass = PyObject_IsSubclass(instance_class, type); + if (!is_subclass) { + instance_class = NULL; + } else if (unlikely(is_subclass == -1)) { + goto bad; + } else { + type = instance_class; + } + } + } + if (!instance_class) { + PyObject *args; + if (!value) + args = PyTuple_New(0); + else if (PyTuple_Check(value)) { + Py_INCREF(value); + args = value; + } else + args = PyTuple_Pack(1, value); + if (!args) + goto bad; + owned_instance = PyObject_Call(type, args, NULL); + Py_DECREF(args); + if (!owned_instance) + goto bad; + value = owned_instance; + if (!PyExceptionInstance_Check(value)) { + PyErr_Format(PyExc_TypeError, + "calling %R should have returned an instance of " + "BaseException, not %R", + type, Py_TYPE(value)); + goto bad; + } + } + } else { + PyErr_SetString(PyExc_TypeError, + "raise: exception class must be a subclass of BaseException"); + goto bad; + } +#if PY_VERSION_HEX >= 0x03030000 + if (cause) { +#else + if (cause && cause != Py_None) { +#endif + PyObject *fixed_cause; + if (cause == Py_None) { + fixed_cause = NULL; + } else if (PyExceptionClass_Check(cause)) { + fixed_cause = PyObject_CallObject(cause, NULL); + if (fixed_cause == NULL) + goto bad; + } else if (PyExceptionInstance_Check(cause)) { + fixed_cause = cause; + Py_INCREF(fixed_cause); + } else { + PyErr_SetString(PyExc_TypeError, + "exception causes must derive from " + "BaseException"); + goto bad; + } + PyException_SetCause(value, fixed_cause); + } + PyErr_SetObject(type, value); + if (tb) { +#if CYTHON_COMPILING_IN_PYPY + PyObject *tmp_type, *tmp_value, *tmp_tb; + PyErr_Fetch(&tmp_type, &tmp_value, &tmp_tb); + Py_INCREF(tb); + PyErr_Restore(tmp_type, tmp_value, tb); + Py_XDECREF(tmp_tb); +#else + PyThreadState *tstate = PyThreadState_GET(); + PyObject* tmp_tb = tstate->curexc_traceback; + if (tb != tmp_tb) { + Py_INCREF(tb); + tstate->curexc_traceback = tb; + Py_XDECREF(tmp_tb); + } +#endif + } +bad: + Py_XDECREF(owned_instance); + return; +} +#endif + +/* GetAttr */ + static CYTHON_INLINE PyObject *__Pyx_GetAttr(PyObject *o, PyObject *n) { +#if CYTHON_COMPILING_IN_CPYTHON +#if PY_MAJOR_VERSION >= 3 + if (likely(PyUnicode_Check(n))) +#else + if (likely(PyString_Check(n))) +#endif + return __Pyx_PyObject_GetAttrStr(o, n); +#endif + return PyObject_GetAttr(o, n); +} + +/* GetAttr3 */ + static CYTHON_INLINE PyObject *__Pyx_GetAttr3(PyObject *o, PyObject *n, PyObject *d) { + PyObject *r = __Pyx_GetAttr(o, n); + if (unlikely(!r)) { + if (!PyErr_ExceptionMatches(PyExc_AttributeError)) + goto bad; + PyErr_Clear(); + r = d; + Py_INCREF(d); + } + return r; +bad: + return NULL; +} + +/* GetItemInt */ + static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Generic(PyObject *o, PyObject* j) { + PyObject *r; + if (!j) return NULL; + r = PyObject_GetItem(o, j); + Py_DECREF(j); + return r; +} +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_List_Fast(PyObject *o, Py_ssize_t i, + CYTHON_NCP_UNUSED int wraparound, + CYTHON_NCP_UNUSED int boundscheck) { +#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + if (wraparound & unlikely(i < 0)) i += PyList_GET_SIZE(o); + if ((!boundscheck) || likely((0 <= i) & (i < PyList_GET_SIZE(o)))) { + PyObject *r = PyList_GET_ITEM(o, i); + Py_INCREF(r); + return r; + } + return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); +#else + return PySequence_GetItem(o, i); +#endif +} +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Tuple_Fast(PyObject *o, Py_ssize_t i, + CYTHON_NCP_UNUSED int wraparound, + CYTHON_NCP_UNUSED int boundscheck) { +#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS + if (wraparound & unlikely(i < 0)) i += PyTuple_GET_SIZE(o); + if ((!boundscheck) || likely((0 <= i) & (i < PyTuple_GET_SIZE(o)))) { + PyObject *r = PyTuple_GET_ITEM(o, i); + Py_INCREF(r); + return r; + } + return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); +#else + return PySequence_GetItem(o, i); +#endif +} +static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Fast(PyObject *o, Py_ssize_t i, int is_list, + CYTHON_NCP_UNUSED int wraparound, + CYTHON_NCP_UNUSED int boundscheck) { +#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS && CYTHON_USE_TYPE_SLOTS + if (is_list || PyList_CheckExact(o)) { + Py_ssize_t n = ((!wraparound) | likely(i >= 0)) ? i : i + PyList_GET_SIZE(o); + if ((!boundscheck) || (likely((n >= 0) & (n < PyList_GET_SIZE(o))))) { + PyObject *r = PyList_GET_ITEM(o, n); + Py_INCREF(r); + return r; + } + } + else if (PyTuple_CheckExact(o)) { + Py_ssize_t n = ((!wraparound) | likely(i >= 0)) ? i : i + PyTuple_GET_SIZE(o); + if ((!boundscheck) || likely((n >= 0) & (n < PyTuple_GET_SIZE(o)))) { + PyObject *r = PyTuple_GET_ITEM(o, n); + Py_INCREF(r); + return r; + } + } else { + PySequenceMethods *m = Py_TYPE(o)->tp_as_sequence; + if (likely(m && m->sq_item)) { + if (wraparound && unlikely(i < 0) && likely(m->sq_length)) { + Py_ssize_t l = m->sq_length(o); + if (likely(l >= 0)) { + i += l; + } else { + if (!PyErr_ExceptionMatches(PyExc_OverflowError)) + return NULL; + PyErr_Clear(); + } + } + return m->sq_item(o, i); + } + } +#else + if (is_list || PySequence_Check(o)) { + return PySequence_GetItem(o, i); + } +#endif + return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); +} + +/* RaiseTooManyValuesToUnpack */ + static CYTHON_INLINE void __Pyx_RaiseTooManyValuesError(Py_ssize_t expected) { + PyErr_Format(PyExc_ValueError, + "too many values to unpack (expected %" CYTHON_FORMAT_SSIZE_T "d)", expected); +} + +/* RaiseNeedMoreValuesToUnpack */ + static CYTHON_INLINE void __Pyx_RaiseNeedMoreValuesError(Py_ssize_t index) { + PyErr_Format(PyExc_ValueError, + "need more than %" CYTHON_FORMAT_SSIZE_T "d value%.1s to unpack", + index, (index == 1) ? "" : "s"); +} + +/* IterFinish */ + static CYTHON_INLINE int __Pyx_IterFinish(void) { +#if CYTHON_FAST_THREAD_STATE + PyThreadState *tstate = PyThreadState_GET(); + PyObject* exc_type = tstate->curexc_type; + if (unlikely(exc_type)) { + if (likely(exc_type == PyExc_StopIteration) || PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration)) { + PyObject *exc_value, *exc_tb; + exc_value = tstate->curexc_value; + exc_tb = tstate->curexc_traceback; + tstate->curexc_type = 0; + tstate->curexc_value = 0; + tstate->curexc_traceback = 0; + Py_DECREF(exc_type); + Py_XDECREF(exc_value); + Py_XDECREF(exc_tb); + return 0; + } else { + return -1; + } + } + return 0; +#else + if (unlikely(PyErr_Occurred())) { + if (likely(PyErr_ExceptionMatches(PyExc_StopIteration))) { + PyErr_Clear(); + return 0; + } else { + return -1; + } + } + return 0; +#endif +} + +/* UnpackItemEndCheck */ + static int __Pyx_IternextUnpackEndCheck(PyObject *retval, Py_ssize_t expected) { + if (unlikely(retval)) { + Py_DECREF(retval); + __Pyx_RaiseTooManyValuesError(expected); + return -1; + } else { + return __Pyx_IterFinish(); + } + return 0; +} + +/* WriteUnraisableException */ + static void __Pyx_WriteUnraisable(const char *name, CYTHON_UNUSED int clineno, + CYTHON_UNUSED int lineno, CYTHON_UNUSED const char *filename, + int full_traceback, CYTHON_UNUSED int nogil) { + PyObject *old_exc, *old_val, *old_tb; + PyObject *ctx; + __Pyx_PyThreadState_declare +#ifdef WITH_THREAD + PyGILState_STATE state; + if (nogil) + state = PyGILState_Ensure(); +#ifdef _MSC_VER + else state = (PyGILState_STATE)-1; +#endif +#endif + __Pyx_PyThreadState_assign + __Pyx_ErrFetch(&old_exc, &old_val, &old_tb); + if (full_traceback) { + Py_XINCREF(old_exc); + Py_XINCREF(old_val); + Py_XINCREF(old_tb); + __Pyx_ErrRestore(old_exc, old_val, old_tb); + PyErr_PrintEx(1); + } + #if PY_MAJOR_VERSION < 3 + ctx = PyString_FromString(name); + #else + ctx = PyUnicode_FromString(name); + #endif + __Pyx_ErrRestore(old_exc, old_val, old_tb); + if (!ctx) { + PyErr_WriteUnraisable(Py_None); + } else { + PyErr_WriteUnraisable(ctx); + Py_DECREF(ctx); + } +#ifdef WITH_THREAD + if (nogil) + PyGILState_Release(state); +#endif +} + +/* Import */ + static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level) { + PyObject *empty_list = 0; + PyObject *module = 0; + PyObject *global_dict = 0; + PyObject *empty_dict = 0; + PyObject *list; + #if PY_VERSION_HEX < 0x03030000 + PyObject *py_import; + py_import = __Pyx_PyObject_GetAttrStr(__pyx_b, __pyx_n_s_import); + if (!py_import) + goto bad; + #endif + if (from_list) + list = from_list; + else { + empty_list = PyList_New(0); + if (!empty_list) + goto bad; + list = empty_list; + } + global_dict = PyModule_GetDict(__pyx_m); + if (!global_dict) + goto bad; + empty_dict = PyDict_New(); + if (!empty_dict) + goto bad; + { + #if PY_MAJOR_VERSION >= 3 + if (level == -1) { + if (strchr(__Pyx_MODULE_NAME, '.')) { + #if PY_VERSION_HEX < 0x03030000 + PyObject *py_level = PyInt_FromLong(1); + if (!py_level) + goto bad; + module = PyObject_CallFunctionObjArgs(py_import, + name, global_dict, empty_dict, list, py_level, NULL); + Py_DECREF(py_level); + #else + module = PyImport_ImportModuleLevelObject( + name, global_dict, empty_dict, list, 1); + #endif + if (!module) { + if (!PyErr_ExceptionMatches(PyExc_ImportError)) + goto bad; + PyErr_Clear(); + } + } + level = 0; + } + #endif + if (!module) { + #if PY_VERSION_HEX < 0x03030000 + PyObject *py_level = PyInt_FromLong(level); + if (!py_level) + goto bad; + module = PyObject_CallFunctionObjArgs(py_import, + name, global_dict, empty_dict, list, py_level, NULL); + Py_DECREF(py_level); + #else + module = PyImport_ImportModuleLevelObject( + name, global_dict, empty_dict, list, level); + #endif + } + } +bad: + #if PY_VERSION_HEX < 0x03030000 + Py_XDECREF(py_import); + #endif + Py_XDECREF(empty_list); + Py_XDECREF(empty_dict); + return module; +} + +/* ImportFrom */ + static PyObject* __Pyx_ImportFrom(PyObject* module, PyObject* name) { + PyObject* value = __Pyx_PyObject_GetAttrStr(module, name); + if (unlikely(!value) && PyErr_ExceptionMatches(PyExc_AttributeError)) { + PyErr_Format(PyExc_ImportError, + #if PY_MAJOR_VERSION < 3 + "cannot import name %.230s", PyString_AS_STRING(name)); + #else + "cannot import name %S", name); + #endif + } + return value; +} + +/* CalculateMetaclass */ + static PyObject *__Pyx_CalculateMetaclass(PyTypeObject *metaclass, PyObject *bases) { + Py_ssize_t i, nbases = PyTuple_GET_SIZE(bases); + for (i=0; i < nbases; i++) { + PyTypeObject *tmptype; + PyObject *tmp = PyTuple_GET_ITEM(bases, i); + tmptype = Py_TYPE(tmp); +#if PY_MAJOR_VERSION < 3 + if (tmptype == &PyClass_Type) + continue; +#endif + if (!metaclass) { + metaclass = tmptype; + continue; + } + if (PyType_IsSubtype(metaclass, tmptype)) + continue; + if (PyType_IsSubtype(tmptype, metaclass)) { + metaclass = tmptype; + continue; + } + PyErr_SetString(PyExc_TypeError, + "metaclass conflict: " + "the metaclass of a derived class " + "must be a (non-strict) subclass " + "of the metaclasses of all its bases"); + return NULL; + } + if (!metaclass) { +#if PY_MAJOR_VERSION < 3 + metaclass = &PyClass_Type; +#else + metaclass = &PyType_Type; +#endif + } + Py_INCREF((PyObject*) metaclass); + return (PyObject*) metaclass; +} + +/* Py3ClassCreate */ + static PyObject *__Pyx_Py3MetaclassPrepare(PyObject *metaclass, PyObject *bases, PyObject *name, + PyObject *qualname, PyObject *mkw, PyObject *modname, PyObject *doc) { + PyObject *ns; + if (metaclass) { + PyObject *prep = __Pyx_PyObject_GetAttrStr(metaclass, __pyx_n_s_prepare); + if (prep) { + PyObject *pargs = PyTuple_Pack(2, name, bases); + if (unlikely(!pargs)) { + Py_DECREF(prep); + return NULL; + } + ns = PyObject_Call(prep, pargs, mkw); + Py_DECREF(prep); + Py_DECREF(pargs); + } else { + if (unlikely(!PyErr_ExceptionMatches(PyExc_AttributeError))) + return NULL; + PyErr_Clear(); + ns = PyDict_New(); + } + } else { + ns = PyDict_New(); + } + if (unlikely(!ns)) + return NULL; + if (unlikely(PyObject_SetItem(ns, __pyx_n_s_module, modname) < 0)) goto bad; + if (unlikely(PyObject_SetItem(ns, __pyx_n_s_qualname, qualname) < 0)) goto bad; + if (unlikely(doc && PyObject_SetItem(ns, __pyx_n_s_doc, doc) < 0)) goto bad; + return ns; +bad: + Py_DECREF(ns); + return NULL; +} +static PyObject *__Pyx_Py3ClassCreate(PyObject *metaclass, PyObject *name, PyObject *bases, + PyObject *dict, PyObject *mkw, + int calculate_metaclass, int allow_py2_metaclass) { + PyObject *result, *margs; + PyObject *owned_metaclass = NULL; + if (allow_py2_metaclass) { + owned_metaclass = PyObject_GetItem(dict, __pyx_n_s_metaclass); + if (owned_metaclass) { + metaclass = owned_metaclass; + } else if (likely(PyErr_ExceptionMatches(PyExc_KeyError))) { + PyErr_Clear(); + } else { + return NULL; + } + } + if (calculate_metaclass && (!metaclass || PyType_Check(metaclass))) { + metaclass = __Pyx_CalculateMetaclass((PyTypeObject*) metaclass, bases); + Py_XDECREF(owned_metaclass); + if (unlikely(!metaclass)) + return NULL; + owned_metaclass = metaclass; + } + margs = PyTuple_Pack(3, name, bases, dict); + if (unlikely(!margs)) { + result = NULL; + } else { + result = PyObject_Call(metaclass, margs, mkw); + Py_DECREF(margs); + } + Py_XDECREF(owned_metaclass); + return result; +} + +/* CodeObjectCache */ + static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line) { + int start = 0, mid = 0, end = count - 1; + if (end >= 0 && code_line > entries[end].code_line) { + return count; + } + while (start < end) { + mid = start + (end - start) / 2; + if (code_line < entries[mid].code_line) { + end = mid; + } else if (code_line > entries[mid].code_line) { + start = mid + 1; + } else { + return mid; + } + } + if (code_line <= entries[mid].code_line) { + return mid; + } else { + return mid + 1; + } +} +static PyCodeObject *__pyx_find_code_object(int code_line) { + PyCodeObject* code_object; + int pos; + if (unlikely(!code_line) || unlikely(!__pyx_code_cache.entries)) { + return NULL; + } + pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); + if (unlikely(pos >= __pyx_code_cache.count) || unlikely(__pyx_code_cache.entries[pos].code_line != code_line)) { + return NULL; + } + code_object = __pyx_code_cache.entries[pos].code_object; + Py_INCREF(code_object); + return code_object; +} +static void __pyx_insert_code_object(int code_line, PyCodeObject* code_object) { + int pos, i; + __Pyx_CodeObjectCacheEntry* entries = __pyx_code_cache.entries; + if (unlikely(!code_line)) { + return; + } + if (unlikely(!entries)) { + entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Malloc(64*sizeof(__Pyx_CodeObjectCacheEntry)); + if (likely(entries)) { + __pyx_code_cache.entries = entries; + __pyx_code_cache.max_count = 64; + __pyx_code_cache.count = 1; + entries[0].code_line = code_line; + entries[0].code_object = code_object; + Py_INCREF(code_object); + } + return; + } + pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line); + if ((pos < __pyx_code_cache.count) && unlikely(__pyx_code_cache.entries[pos].code_line == code_line)) { + PyCodeObject* tmp = entries[pos].code_object; + entries[pos].code_object = code_object; + Py_DECREF(tmp); + return; + } + if (__pyx_code_cache.count == __pyx_code_cache.max_count) { + int new_max = __pyx_code_cache.max_count + 64; + entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Realloc( + __pyx_code_cache.entries, (size_t)new_max*sizeof(__Pyx_CodeObjectCacheEntry)); + if (unlikely(!entries)) { + return; + } + __pyx_code_cache.entries = entries; + __pyx_code_cache.max_count = new_max; + } + for (i=__pyx_code_cache.count; i>pos; i--) { + entries[i] = entries[i-1]; + } + entries[pos].code_line = code_line; + entries[pos].code_object = code_object; + __pyx_code_cache.count++; + Py_INCREF(code_object); +} + +/* AddTraceback */ + #include "compile.h" +#include "frameobject.h" +#include "traceback.h" +static PyCodeObject* __Pyx_CreateCodeObjectForTraceback( + const char *funcname, int c_line, + int py_line, const char *filename) { + PyCodeObject *py_code = 0; + PyObject *py_srcfile = 0; + PyObject *py_funcname = 0; + #if PY_MAJOR_VERSION < 3 + py_srcfile = PyString_FromString(filename); + #else + py_srcfile = PyUnicode_FromString(filename); + #endif + if (!py_srcfile) goto bad; + if (c_line) { + #if PY_MAJOR_VERSION < 3 + py_funcname = PyString_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); + #else + py_funcname = PyUnicode_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, c_line); + #endif + } + else { + #if PY_MAJOR_VERSION < 3 + py_funcname = PyString_FromString(funcname); + #else + py_funcname = PyUnicode_FromString(funcname); + #endif + } + if (!py_funcname) goto bad; + py_code = __Pyx_PyCode_New( + 0, + 0, + 0, + 0, + 0, + __pyx_empty_bytes, /*PyObject *code,*/ + __pyx_empty_tuple, /*PyObject *consts,*/ + __pyx_empty_tuple, /*PyObject *names,*/ + __pyx_empty_tuple, /*PyObject *varnames,*/ + __pyx_empty_tuple, /*PyObject *freevars,*/ + __pyx_empty_tuple, /*PyObject *cellvars,*/ + py_srcfile, /*PyObject *filename,*/ + py_funcname, /*PyObject *name,*/ + py_line, + __pyx_empty_bytes /*PyObject *lnotab*/ + ); + Py_DECREF(py_srcfile); + Py_DECREF(py_funcname); + return py_code; +bad: + Py_XDECREF(py_srcfile); + Py_XDECREF(py_funcname); + return NULL; +} +static void __Pyx_AddTraceback(const char *funcname, int c_line, + int py_line, const char *filename) { + PyCodeObject *py_code = 0; + PyFrameObject *py_frame = 0; + py_code = __pyx_find_code_object(c_line ? c_line : py_line); + if (!py_code) { + py_code = __Pyx_CreateCodeObjectForTraceback( + funcname, c_line, py_line, filename); + if (!py_code) goto bad; + __pyx_insert_code_object(c_line ? c_line : py_line, py_code); + } + py_frame = PyFrame_New( + PyThreadState_GET(), /*PyThreadState *tstate,*/ + py_code, /*PyCodeObject *code,*/ + __pyx_d, /*PyObject *globals,*/ + 0 /*PyObject *locals*/ + ); + if (!py_frame) goto bad; + __Pyx_PyFrame_SetLineNumber(py_frame, py_line); + PyTraceBack_Here(py_frame); +bad: + Py_XDECREF(py_code); + Py_XDECREF(py_frame); +} + +/* CIntFromPyVerify */ + #define __PYX_VERIFY_RETURN_INT(target_type, func_type, func_value)\ + __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 0) +#define __PYX_VERIFY_RETURN_INT_EXC(target_type, func_type, func_value)\ + __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 1) +#define __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, exc)\ + {\ + func_type value = func_value;\ + if (sizeof(target_type) < sizeof(func_type)) {\ + if (unlikely(value != (func_type) (target_type) value)) {\ + func_type zero = 0;\ + if (exc && unlikely(value == (func_type)-1 && PyErr_Occurred()))\ + return (target_type) -1;\ + if (is_unsigned && unlikely(value < zero))\ + goto raise_neg_overflow;\ + else\ + goto raise_overflow;\ + }\ + }\ + return (target_type) value;\ + } + +/* CIntToPy */ + static CYTHON_INLINE PyObject* __Pyx_PyInt_From_int(int value) { + const int neg_one = (int) -1, const_zero = (int) 0; + const int is_unsigned = neg_one > const_zero; + if (is_unsigned) { + if (sizeof(int) < sizeof(long)) { + return PyInt_FromLong((long) value); + } else if (sizeof(int) <= sizeof(unsigned long)) { + return PyLong_FromUnsignedLong((unsigned long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(int) <= sizeof(unsigned PY_LONG_LONG)) { + return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); +#endif + } + } else { + if (sizeof(int) <= sizeof(long)) { + return PyInt_FromLong((long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(int) <= sizeof(PY_LONG_LONG)) { + return PyLong_FromLongLong((PY_LONG_LONG) value); +#endif + } + } + { + int one = 1; int little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&value; + return _PyLong_FromByteArray(bytes, sizeof(int), + little, !is_unsigned); + } +} + +/* CIntToPy */ + static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value) { + const long neg_one = (long) -1, const_zero = (long) 0; + const int is_unsigned = neg_one > const_zero; + if (is_unsigned) { + if (sizeof(long) < sizeof(long)) { + return PyInt_FromLong((long) value); + } else if (sizeof(long) <= sizeof(unsigned long)) { + return PyLong_FromUnsignedLong((unsigned long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) { + return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value); +#endif + } + } else { + if (sizeof(long) <= sizeof(long)) { + return PyInt_FromLong((long) value); +#ifdef HAVE_LONG_LONG + } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) { + return PyLong_FromLongLong((PY_LONG_LONG) value); +#endif + } + } + { + int one = 1; int little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&value; + return _PyLong_FromByteArray(bytes, sizeof(long), + little, !is_unsigned); + } +} + +/* CIntFromPy */ + static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *x) { + const int neg_one = (int) -1, const_zero = (int) 0; + const int is_unsigned = neg_one > const_zero; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_Check(x))) { + if (sizeof(int) < sizeof(long)) { + __PYX_VERIFY_RETURN_INT(int, long, PyInt_AS_LONG(x)) + } else { + long val = PyInt_AS_LONG(x); + if (is_unsigned && unlikely(val < 0)) { + goto raise_neg_overflow; + } + return (int) val; + } + } else +#endif + if (likely(PyLong_Check(x))) { + if (is_unsigned) { +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (int) 0; + case 1: __PYX_VERIFY_RETURN_INT(int, digit, digits[0]) + case 2: + if (8 * sizeof(int) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) >= 2 * PyLong_SHIFT) { + return (int) (((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); + } + } + break; + case 3: + if (8 * sizeof(int) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) >= 3 * PyLong_SHIFT) { + return (int) (((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); + } + } + break; + case 4: + if (8 * sizeof(int) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) >= 4 * PyLong_SHIFT) { + return (int) (((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])); + } + } + break; + } +#endif +#if CYTHON_COMPILING_IN_CPYTHON + if (unlikely(Py_SIZE(x) < 0)) { + goto raise_neg_overflow; + } +#else + { + int result = PyObject_RichCompareBool(x, Py_False, Py_LT); + if (unlikely(result < 0)) + return (int) -1; + if (unlikely(result == 1)) + goto raise_neg_overflow; + } +#endif + if (sizeof(int) <= sizeof(unsigned long)) { + __PYX_VERIFY_RETURN_INT_EXC(int, unsigned long, PyLong_AsUnsignedLong(x)) +#ifdef HAVE_LONG_LONG + } else if (sizeof(int) <= sizeof(unsigned PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(int, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) +#endif + } + } else { +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (int) 0; + case -1: __PYX_VERIFY_RETURN_INT(int, sdigit, (sdigit) (-(sdigit)digits[0])) + case 1: __PYX_VERIFY_RETURN_INT(int, digit, +digits[0]) + case -2: + if (8 * sizeof(int) - 1 > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { + return (int) (((int)-1)*(((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case 2: + if (8 * sizeof(int) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { + return (int) ((((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case -3: + if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { + return (int) (((int)-1)*(((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case 3: + if (8 * sizeof(int) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { + return (int) ((((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case -4: + if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 4 * PyLong_SHIFT) { + return (int) (((int)-1)*(((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + case 4: + if (8 * sizeof(int) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(int) - 1 > 4 * PyLong_SHIFT) { + return (int) ((((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]))); + } + } + break; + } +#endif + if (sizeof(int) <= sizeof(long)) { + __PYX_VERIFY_RETURN_INT_EXC(int, long, PyLong_AsLong(x)) +#ifdef HAVE_LONG_LONG + } else if (sizeof(int) <= sizeof(PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(int, PY_LONG_LONG, PyLong_AsLongLong(x)) +#endif + } + } + { +#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray) + PyErr_SetString(PyExc_RuntimeError, + "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers"); +#else + int val; + PyObject *v = __Pyx_PyNumber_IntOrLong(x); + #if PY_MAJOR_VERSION < 3 + if (likely(v) && !PyLong_Check(v)) { + PyObject *tmp = v; + v = PyNumber_Long(tmp); + Py_DECREF(tmp); + } + #endif + if (likely(v)) { + int one = 1; int is_little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&val; + int ret = _PyLong_AsByteArray((PyLongObject *)v, + bytes, sizeof(val), + is_little, !is_unsigned); + Py_DECREF(v); + if (likely(!ret)) + return val; + } +#endif + return (int) -1; + } + } else { + int val; + PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); + if (!tmp) return (int) -1; + val = __Pyx_PyInt_As_int(tmp); + Py_DECREF(tmp); + return val; + } +raise_overflow: + PyErr_SetString(PyExc_OverflowError, + "value too large to convert to int"); + return (int) -1; +raise_neg_overflow: + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to int"); + return (int) -1; +} + +/* CIntFromPy */ + static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *x) { + const long neg_one = (long) -1, const_zero = (long) 0; + const int is_unsigned = neg_one > const_zero; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_Check(x))) { + if (sizeof(long) < sizeof(long)) { + __PYX_VERIFY_RETURN_INT(long, long, PyInt_AS_LONG(x)) + } else { + long val = PyInt_AS_LONG(x); + if (is_unsigned && unlikely(val < 0)) { + goto raise_neg_overflow; + } + return (long) val; + } + } else +#endif + if (likely(PyLong_Check(x))) { + if (is_unsigned) { +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (long) 0; + case 1: __PYX_VERIFY_RETURN_INT(long, digit, digits[0]) + case 2: + if (8 * sizeof(long) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) >= 2 * PyLong_SHIFT) { + return (long) (((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); + } + } + break; + case 3: + if (8 * sizeof(long) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) >= 3 * PyLong_SHIFT) { + return (long) (((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); + } + } + break; + case 4: + if (8 * sizeof(long) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) >= 4 * PyLong_SHIFT) { + return (long) (((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])); + } + } + break; + } +#endif +#if CYTHON_COMPILING_IN_CPYTHON + if (unlikely(Py_SIZE(x) < 0)) { + goto raise_neg_overflow; + } +#else + { + int result = PyObject_RichCompareBool(x, Py_False, Py_LT); + if (unlikely(result < 0)) + return (long) -1; + if (unlikely(result == 1)) + goto raise_neg_overflow; + } +#endif + if (sizeof(long) <= sizeof(unsigned long)) { + __PYX_VERIFY_RETURN_INT_EXC(long, unsigned long, PyLong_AsUnsignedLong(x)) +#ifdef HAVE_LONG_LONG + } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(long, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x)) +#endif + } + } else { +#if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)x)->ob_digit; + switch (Py_SIZE(x)) { + case 0: return (long) 0; + case -1: __PYX_VERIFY_RETURN_INT(long, sdigit, (sdigit) (-(sdigit)digits[0])) + case 1: __PYX_VERIFY_RETURN_INT(long, digit, +digits[0]) + case -2: + if (8 * sizeof(long) - 1 > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { + return (long) (((long)-1)*(((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case 2: + if (8 * sizeof(long) > 1 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { + return (long) ((((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case -3: + if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { + return (long) (((long)-1)*(((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case 3: + if (8 * sizeof(long) > 2 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { + return (long) ((((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case -4: + if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) { + return (long) (((long)-1)*(((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + case 4: + if (8 * sizeof(long) > 3 * PyLong_SHIFT) { + if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) { + __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]))) + } else if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) { + return (long) ((((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]))); + } + } + break; + } +#endif + if (sizeof(long) <= sizeof(long)) { + __PYX_VERIFY_RETURN_INT_EXC(long, long, PyLong_AsLong(x)) +#ifdef HAVE_LONG_LONG + } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) { + __PYX_VERIFY_RETURN_INT_EXC(long, PY_LONG_LONG, PyLong_AsLongLong(x)) +#endif + } + } + { +#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray) + PyErr_SetString(PyExc_RuntimeError, + "_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers"); +#else + long val; + PyObject *v = __Pyx_PyNumber_IntOrLong(x); + #if PY_MAJOR_VERSION < 3 + if (likely(v) && !PyLong_Check(v)) { + PyObject *tmp = v; + v = PyNumber_Long(tmp); + Py_DECREF(tmp); + } + #endif + if (likely(v)) { + int one = 1; int is_little = (int)*(unsigned char *)&one; + unsigned char *bytes = (unsigned char *)&val; + int ret = _PyLong_AsByteArray((PyLongObject *)v, + bytes, sizeof(val), + is_little, !is_unsigned); + Py_DECREF(v); + if (likely(!ret)) + return val; + } +#endif + return (long) -1; + } + } else { + long val; + PyObject *tmp = __Pyx_PyNumber_IntOrLong(x); + if (!tmp) return (long) -1; + val = __Pyx_PyInt_As_long(tmp); + Py_DECREF(tmp); + return val; + } +raise_overflow: + PyErr_SetString(PyExc_OverflowError, + "value too large to convert to long"); + return (long) -1; +raise_neg_overflow: + PyErr_SetString(PyExc_OverflowError, + "can't convert negative value to long"); + return (long) -1; +} + +/* CheckBinaryVersion */ + static int __Pyx_check_binary_version(void) { + char ctversion[4], rtversion[4]; + PyOS_snprintf(ctversion, 4, "%d.%d", PY_MAJOR_VERSION, PY_MINOR_VERSION); + PyOS_snprintf(rtversion, 4, "%s", Py_GetVersion()); + if (ctversion[0] != rtversion[0] || ctversion[2] != rtversion[2]) { + char message[200]; + PyOS_snprintf(message, sizeof(message), + "compiletime version %s of module '%.100s' " + "does not match runtime version %s", + ctversion, __Pyx_MODULE_NAME, rtversion); + return PyErr_WarnEx(NULL, message, 1); + } + return 0; +} + +/* InitStrings */ + static int __Pyx_InitStrings(__Pyx_StringTabEntry *t) { + while (t->p) { + #if PY_MAJOR_VERSION < 3 + if (t->is_unicode) { + *t->p = PyUnicode_DecodeUTF8(t->s, t->n - 1, NULL); + } else if (t->intern) { + *t->p = PyString_InternFromString(t->s); + } else { + *t->p = PyString_FromStringAndSize(t->s, t->n - 1); + } + #else + if (t->is_unicode | t->is_str) { + if (t->intern) { + *t->p = PyUnicode_InternFromString(t->s); + } else if (t->encoding) { + *t->p = PyUnicode_Decode(t->s, t->n - 1, t->encoding, NULL); + } else { + *t->p = PyUnicode_FromStringAndSize(t->s, t->n - 1); + } + } else { + *t->p = PyBytes_FromStringAndSize(t->s, t->n - 1); + } + #endif + if (!*t->p) + return -1; + ++t; + } + return 0; +} + +static CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char* c_str) { + return __Pyx_PyUnicode_FromStringAndSize(c_str, (Py_ssize_t)strlen(c_str)); +} +static CYTHON_INLINE char* __Pyx_PyObject_AsString(PyObject* o) { + Py_ssize_t ignore; + return __Pyx_PyObject_AsStringAndSize(o, &ignore); +} +static CYTHON_INLINE char* __Pyx_PyObject_AsStringAndSize(PyObject* o, Py_ssize_t *length) { +#if CYTHON_COMPILING_IN_CPYTHON && (__PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT) + if ( +#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII + __Pyx_sys_getdefaultencoding_not_ascii && +#endif + PyUnicode_Check(o)) { +#if PY_VERSION_HEX < 0x03030000 + char* defenc_c; + PyObject* defenc = _PyUnicode_AsDefaultEncodedString(o, NULL); + if (!defenc) return NULL; + defenc_c = PyBytes_AS_STRING(defenc); +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII + { + char* end = defenc_c + PyBytes_GET_SIZE(defenc); + char* c; + for (c = defenc_c; c < end; c++) { + if ((unsigned char) (*c) >= 128) { + PyUnicode_AsASCIIString(o); + return NULL; + } + } + } +#endif + *length = PyBytes_GET_SIZE(defenc); + return defenc_c; +#else + if (__Pyx_PyUnicode_READY(o) == -1) return NULL; +#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII + if (PyUnicode_IS_ASCII(o)) { + *length = PyUnicode_GET_LENGTH(o); + return PyUnicode_AsUTF8(o); + } else { + PyUnicode_AsASCIIString(o); + return NULL; + } +#else + return PyUnicode_AsUTF8AndSize(o, length); +#endif +#endif + } else +#endif +#if (!CYTHON_COMPILING_IN_PYPY) || (defined(PyByteArray_AS_STRING) && defined(PyByteArray_GET_SIZE)) + if (PyByteArray_Check(o)) { + *length = PyByteArray_GET_SIZE(o); + return PyByteArray_AS_STRING(o); + } else +#endif + { + char* result; + int r = PyBytes_AsStringAndSize(o, &result, length); + if (unlikely(r < 0)) { + return NULL; + } else { + return result; + } + } +} +static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject* x) { + int is_true = x == Py_True; + if (is_true | (x == Py_False) | (x == Py_None)) return is_true; + else return PyObject_IsTrue(x); +} +static CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x) { +#if CYTHON_USE_TYPE_SLOTS + PyNumberMethods *m; +#endif + const char *name = NULL; + PyObject *res = NULL; +#if PY_MAJOR_VERSION < 3 + if (PyInt_Check(x) || PyLong_Check(x)) +#else + if (PyLong_Check(x)) +#endif + return __Pyx_NewRef(x); +#if CYTHON_USE_TYPE_SLOTS + m = Py_TYPE(x)->tp_as_number; + #if PY_MAJOR_VERSION < 3 + if (m && m->nb_int) { + name = "int"; + res = PyNumber_Int(x); + } + else if (m && m->nb_long) { + name = "long"; + res = PyNumber_Long(x); + } + #else + if (m && m->nb_int) { + name = "int"; + res = PyNumber_Long(x); + } + #endif +#else + res = PyNumber_Int(x); +#endif + if (res) { +#if PY_MAJOR_VERSION < 3 + if (!PyInt_Check(res) && !PyLong_Check(res)) { +#else + if (!PyLong_Check(res)) { +#endif + PyErr_Format(PyExc_TypeError, + "__%.4s__ returned non-%.4s (type %.200s)", + name, name, Py_TYPE(res)->tp_name); + Py_DECREF(res); + return NULL; + } + } + else if (!PyErr_Occurred()) { + PyErr_SetString(PyExc_TypeError, + "an integer is required"); + } + return res; +} +static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject* b) { + Py_ssize_t ival; + PyObject *x; +#if PY_MAJOR_VERSION < 3 + if (likely(PyInt_CheckExact(b))) { + if (sizeof(Py_ssize_t) >= sizeof(long)) + return PyInt_AS_LONG(b); + else + return PyInt_AsSsize_t(x); + } +#endif + if (likely(PyLong_CheckExact(b))) { + #if CYTHON_USE_PYLONG_INTERNALS + const digit* digits = ((PyLongObject*)b)->ob_digit; + const Py_ssize_t size = Py_SIZE(b); + if (likely(__Pyx_sst_abs(size) <= 1)) { + ival = likely(size) ? digits[0] : 0; + if (size == -1) ival = -ival; + return ival; + } else { + switch (size) { + case 2: + if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { + return (Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case -2: + if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) { + return -(Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case 3: + if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { + return (Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case -3: + if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) { + return -(Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case 4: + if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { + return (Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + case -4: + if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) { + return -(Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])); + } + break; + } + } + #endif + return PyLong_AsSsize_t(b); + } + x = PyNumber_Index(b); + if (!x) return -1; + ival = PyInt_AsSsize_t(x); + Py_DECREF(x); + return ival; +} +static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t ival) { + return PyInt_FromSize_t(ival); +} + + +#endif /* Py_PYTHON_H */ diff --git a/ptvsd/pydevd/_pydevd_frame_eval/pydevd_frame_evaluator.pxd b/ptvsd/pydevd/_pydevd_frame_eval/pydevd_frame_evaluator.pxd new file mode 100644 index 00000000..2bf8b2d6 --- /dev/null +++ b/ptvsd/pydevd/_pydevd_frame_eval/pydevd_frame_evaluator.pxd @@ -0,0 +1,99 @@ +from cpython.mem cimport PyMem_Malloc, PyMem_Free + +cdef extern from *: + ctypedef void PyObject + ctypedef struct PyCodeObject: + int co_argcount; # arguments, except *args */ + int co_kwonlyargcount; # keyword only arguments */ + int co_nlocals; # local variables */ + int co_stacksize; # entries needed for evaluation stack */ + int co_flags; # CO_..., see below */ + int co_firstlineno; # first source line number */ + PyObject *co_code; # instruction opcodes */ + PyObject *co_consts; # list (constants used) */ + PyObject *co_names; # list of strings (names used) */ + PyObject *co_varnames; # tuple of strings (local variable names) */ + PyObject *co_freevars; # tuple of strings (free variable names) */ + PyObject *co_cellvars; # tuple of strings (cell variable names) */ + unsigned char *co_cell2arg; # Maps cell vars which are arguments. */ + PyObject *co_filename; # unicode (where it was loaded from) */ + PyObject *co_name; # unicode (name, for reference) */ + PyObject *co_lnotab; # string (encoding addr<->lineno mapping) See + # Objects/lnotab_notes.txt for details. */ + void *co_zombieframe; # for optimization only (see frameobject.c) */ + PyObject *co_weakreflist; # to support weakrefs to code objects */ + void *co_extra; + +cdef extern from "frameobject.h": + ctypedef struct PyFrameObject: + PyCodeObject *f_code # code segment + PyObject *f_builtins # builtin symbol table (PyDictObject) + PyObject *f_globals # global symbol table (PyDictObject) */ + PyObject *f_locals # local symbol table (any mapping) */ + PyObject **f_valuestack # + PyObject **f_stacktop + PyObject *f_trace # Trace function */ + PyObject *f_exc_type + PyObject *f_exc_value + PyObject *f_exc_traceback + PyObject *f_gen; + + int f_lasti; #/* Last instruction if called */ + int f_lineno; #/* Current line number */ + int f_iblock; #/* index in f_blockstack */ + char f_executing; #/* whether the frame is still executing */ + PyObject *f_localsplus[1]; + +cdef extern from "code.h": + ctypedef void freefunc(void *) + int _PyCode_GetExtra(PyObject *code, Py_ssize_t index, void **extra) + int _PyCode_SetExtra(PyObject *code, Py_ssize_t index, void *extra) + +cdef extern from "Python.h": + void Py_INCREF(object o) + void Py_DECREF(object o) + object PyImport_ImportModule(char *name) + PyObject* PyObject_CallFunction(PyObject *callable, const char *format, ...) + object PyObject_GetAttrString(object o, char *attr_name) + +cdef extern from "pystate.h": + ctypedef PyObject* _PyFrameEvalFunction(PyFrameObject *frame, int exc) + + ctypedef struct PyInterpreterState: + PyInterpreterState *next + PyInterpreterState *tstate_head + + PyObject *modules + + PyObject *modules_by_index + PyObject *sysdict + PyObject *builtins + PyObject *importlib + + PyObject *codec_search_path + PyObject *codec_search_cache + PyObject *codec_error_registry + int codecs_initialized + int fscodec_initialized + + int dlopenflags + + PyObject *builtins_copy + PyObject *import_func + # Initialized to PyEval_EvalFrameDefault(). + _PyFrameEvalFunction eval_frame + + ctypedef struct PyThreadState: + PyThreadState *prev + PyThreadState *next + PyInterpreterState *interp + # ... + + PyThreadState *PyThreadState_Get() + +cdef extern from "ceval.h": + int _PyEval_RequestCodeExtraIndex(freefunc) + PyFrameObject *PyEval_GetFrame() + PyObject* PyEval_CallFunction(PyObject *callable, const char *format, ...) + + PyObject* _PyEval_EvalFrameDefault(PyFrameObject *frame, int exc) diff --git a/ptvsd/pydevd/_pydevd_frame_eval/pydevd_frame_evaluator.pyx b/ptvsd/pydevd/_pydevd_frame_eval/pydevd_frame_evaluator.pyx new file mode 100644 index 00000000..2bc0dcf0 --- /dev/null +++ b/ptvsd/pydevd/_pydevd_frame_eval/pydevd_frame_evaluator.pyx @@ -0,0 +1,159 @@ +import dis +from _pydev_imps._pydev_saved_modules import threading +from _pydevd_bundle.pydevd_additional_thread_info import PyDBAdditionalThreadInfo +from _pydevd_bundle.pydevd_comm import get_global_debugger +from _pydevd_bundle.pydevd_dont_trace_files import DONT_TRACE +from _pydevd_frame_eval.pydevd_frame_tracing import pydev_trace_code_wrapper, update_globals_dict, dummy_tracing_holder +from _pydevd_frame_eval.pydevd_modify_bytecode import insert_code +from pydevd_file_utils import get_abs_path_real_path_and_base_from_frame, NORM_PATHS_AND_BASE_CONTAINER + +AVOID_RECURSION = [ + 'pydevd_additional_thread_info_regular.py', + 'threading.py', + '_weakrefset.py' +] + +get_file_type = DONT_TRACE.get +NO_BREAKS_IN_FRAME = 1 + + +class UseCodeExtraHolder: + # Use this flag in order to disable co_extra field + use_code_extra = True + # Keep the index of co_extra in a thread-local storage + local = threading.local() + local.index = -1 + + +def is_use_code_extra(): + return UseCodeExtraHolder.use_code_extra + + +# enable using `co_extra` field in order to cache frames without breakpoints +def enable_cache_frames_without_breaks(new_value): + UseCodeExtraHolder.use_code_extra = new_value + + +cpdef dummy_trace_dispatch(frame, str event, arg): + return None + + +cdef PyObject* get_bytecode_while_frame_eval(PyFrameObject *frame_obj, int exc): + frame = frame_obj + cdef str filepath = frame.f_code.co_filename + cdef bint skip_file = exc + cdef void* extra = NULL + cdef int* extra_value = NULL + cdef int thread_index = -1 + + if is_use_code_extra is None or AVOID_RECURSION is None: + # Sometimes during process shutdown these global variables become None + return _PyEval_EvalFrameDefault(frame_obj, exc) + + if is_use_code_extra(): + extra = PyMem_Malloc(sizeof(int)) + try: + thread_index = UseCodeExtraHolder.local.index + except: + pass + if thread_index != -1: + _PyCode_GetExtra( frame.f_code, thread_index, &extra) + if extra is not NULL: + extra_value = extra + if extra_value[0] == NO_BREAKS_IN_FRAME: + return _PyEval_EvalFrameDefault(frame_obj, exc) + + for file in AVOID_RECURSION: + # we can't call any other function without this check, because we can get stack overflow + for path_separator in ('/', '\\'): + if filepath.endswith(path_separator + file): + skip_file = True + break + + if not skip_file: + try: + t = threading.currentThread() + except: + skip_file = True + + if not skip_file: + try: + additional_info = t.additional_info + if additional_info is None: + raise AttributeError() + except: + additional_info = t.additional_info = PyDBAdditionalThreadInfo() + # request `co_extra` inside every new thread + thread_index = _PyEval_RequestCodeExtraIndex(PyMem_Free) + UseCodeExtraHolder.local.index = thread_index + + if additional_info.is_tracing or getattr(t, 'pydev_do_not_trace', None): + return _PyEval_EvalFrameDefault(frame_obj, exc) + + additional_info.is_tracing = True + try: + abs_path_real_path_and_base = NORM_PATHS_AND_BASE_CONTAINER[frame.f_code.co_filename] + except: + abs_path_real_path_and_base = get_abs_path_real_path_and_base_from_frame(frame) + + file_type = get_file_type(abs_path_real_path_and_base[-1]) #we don't want to debug anything related to pydevd + if file_type is not None: + additional_info.is_tracing = False + return _PyEval_EvalFrameDefault(frame_obj, exc) + + was_break = False + main_debugger = get_global_debugger() + breakpoints = main_debugger.breakpoints.get(abs_path_real_path_and_base[1]) + code_object = frame.f_code + if breakpoints: + breakpoints_to_update = [] + for offset, line in dis.findlinestarts(code_object): + if line in breakpoints: + breakpoint = breakpoints[line] + if code_object not in breakpoint.code_objects: + # This check is needed for generator functions, because after each yield the new frame is created + # but the former code object is used + success, new_code = insert_code(frame.f_code, pydev_trace_code_wrapper.__code__, line) + if success: + breakpoints_to_update.append(breakpoint) + Py_INCREF(new_code) + frame_obj.f_code = new_code + was_break = True + else: + main_debugger.set_trace_for_frame_and_parents(frame) + was_break = False + break + if was_break: + update_globals_dict(frame.f_globals) + for bp in breakpoints_to_update: + bp.code_objects.add(frame.f_code) + else: + if main_debugger.has_plugin_line_breaks: + can_not_skip = main_debugger.plugin.can_not_skip(main_debugger, None, frame) + if can_not_skip: + was_break = True + main_debugger.SetTrace(main_debugger.trace_dispatch) + main_debugger.set_trace_for_frame_and_parents(frame) + + if not was_break: + extra_value = PyMem_Malloc(sizeof(int)) + extra_value[0] = NO_BREAKS_IN_FRAME + try: + thread_index = UseCodeExtraHolder.local.index + except: + pass + if thread_index != -1: + _PyCode_SetExtra( code_object, thread_index, extra_value) + + additional_info.is_tracing = False + return _PyEval_EvalFrameDefault(frame_obj, exc) + +def frame_eval_func(): + cdef PyThreadState *state = PyThreadState_Get() + state.interp.eval_frame = get_bytecode_while_frame_eval + global dummy_tracing_holder + dummy_tracing_holder.set_trace_func(dummy_trace_dispatch) + +def stop_frame_eval(): + cdef PyThreadState *state = PyThreadState_Get() + state.interp.eval_frame = _PyEval_EvalFrameDefault diff --git a/ptvsd/pydevd/_pydevd_frame_eval/pydevd_frame_tracing.py b/ptvsd/pydevd/_pydevd_frame_eval/pydevd_frame_tracing.py new file mode 100644 index 00000000..c58f71cb --- /dev/null +++ b/ptvsd/pydevd/_pydevd_frame_eval/pydevd_frame_tracing.py @@ -0,0 +1,86 @@ +import sys + +from _pydev_bundle import pydev_log +from _pydev_imps._pydev_saved_modules import threading +from _pydevd_bundle.pydevd_comm import get_global_debugger, CMD_SET_BREAK +from pydevd_file_utils import get_abs_path_real_path_and_base_from_frame, NORM_PATHS_AND_BASE_CONTAINER +from _pydevd_bundle.pydevd_frame import handle_breakpoint_condition, handle_breakpoint_expression + + +class DummyTracingHolder: + dummy_trace_func = None + + def set_trace_func(self, trace_func): + self.dummy_trace_func = trace_func + + +dummy_tracing_holder = DummyTracingHolder() + + +def update_globals_dict(globals_dict): + new_globals = {'_pydev_stop_at_break': _pydev_stop_at_break} + globals_dict.update(new_globals) + + +def handle_breakpoint(frame, thread, global_debugger, breakpoint): + # ok, hit breakpoint, now, we have to discover if it is a conditional breakpoint + new_frame = frame + condition = breakpoint.condition + info = thread.additional_info + if condition is not None: + handle_breakpoint_condition(global_debugger, info, breakpoint, new_frame, False) + + if breakpoint.expression is not None: + handle_breakpoint_expression(breakpoint, info, new_frame) + + if breakpoint.suspend_policy == "ALL": + global_debugger.suspend_all_other_threads(thread) + + return True + + +def _get_line_for_frame(frame): + # it's absolutely necessary to reset tracing function for frame in order to get the real line number + tracing_func = frame.f_trace + frame.f_trace = None + line = frame.f_lineno + frame.f_trace = tracing_func + return line + + +def _pydev_stop_at_break(): + frame = sys._getframe(1) + t = threading.currentThread() + if t.additional_info.is_tracing: + return + + if t.additional_info.pydev_step_cmd == -1 and frame.f_trace in (None, dummy_tracing_holder.dummy_trace_func): + # do not handle breakpoints while stepping, because they're handled by old tracing function + t.additional_info.is_tracing = True + debugger = get_global_debugger() + + try: + abs_path_real_path_and_base = NORM_PATHS_AND_BASE_CONTAINER[frame.f_code.co_filename] + except: + abs_path_real_path_and_base = get_abs_path_real_path_and_base_from_frame(frame) + filename = abs_path_real_path_and_base[1] + + breakpoints_for_file = debugger.breakpoints.get(filename) + line = _get_line_for_frame(frame) + try: + breakpoint = breakpoints_for_file[line] + except KeyError: + pydev_log.debug("Couldn't find breakpoint in the file {} on line {}".format(frame.f_code.co_filename, line)) + return + if breakpoint and handle_breakpoint(frame, t, debugger, breakpoint): + pydev_log.debug("Suspending at breakpoint in file: {} on line {}".format(frame.f_code.co_filename, line)) + debugger.set_suspend(t, CMD_SET_BREAK) + debugger.do_wait_suspend(t, frame, 'line', None, "frame_eval") + + t.additional_info.is_tracing = False + + +def pydev_trace_code_wrapper(): + # import this module again, because it's inserted inside user's code + global _pydev_stop_at_break + _pydev_stop_at_break() diff --git a/ptvsd/pydevd/_pydevd_frame_eval/pydevd_modify_bytecode.py b/ptvsd/pydevd/_pydevd_frame_eval/pydevd_modify_bytecode.py new file mode 100644 index 00000000..14f3a38a --- /dev/null +++ b/ptvsd/pydevd/_pydevd_frame_eval/pydevd_modify_bytecode.py @@ -0,0 +1,212 @@ +import dis +import traceback +from opcode import opmap, EXTENDED_ARG, HAVE_ARGUMENT +from types import CodeType + +MAX_BYTE = 255 + + +def _add_attr_values_from_insert_to_original(original_code, insert_code, insert_code_obj, attribute_name, op_list): + """ + This function appends values of the attribute `attribute_name` of the inserted code to the original values, + and changes indexes inside inserted code. If some bytecode instruction in the inserted code used to call argument + number i, after modification it calls argument n + i, where n - length of the values in the original code. + So it helps to avoid variables mixing between two pieces of code. + + :param original_code: code to modify + :param insert_code: code to insert + :param insert_code_obj: bytes sequence of inserted code, which should be modified too + :param attribute_name: name of attribute to modify ('co_names', 'co_consts' or 'co_varnames') + :param op_list: sequence of bytecodes whose arguments should be changed + :return: modified bytes sequence of the code to insert and new values of the attribute `attribute_name` for + original code + """ + orig_value = getattr(original_code, attribute_name) + insert_value = getattr(insert_code, attribute_name) + orig_names_len = len(orig_value) + code_with_new_values = list(insert_code_obj) + offset = 0 + while offset < len(code_with_new_values): + op = code_with_new_values[offset] + if op in op_list: + new_val = code_with_new_values[offset + 1] + orig_names_len + if new_val > MAX_BYTE: + code_with_new_values[offset + 1] = new_val & MAX_BYTE + code_with_new_values = code_with_new_values[:offset] + [EXTENDED_ARG, new_val >> 8] + \ + code_with_new_values[offset:] + offset += 2 + else: + code_with_new_values[offset + 1] = new_val + offset += 2 + new_values = orig_value + insert_value + return bytes(code_with_new_values), new_values + + +def _modify_new_lines(code_to_modify, all_inserted_code): + """ + Update new lines in order to hide inserted code inside the original code + :param code_to_modify: code to modify + :param all_inserted_code: list of tuples (offset, list of code instructions) with all inserted pieces of code + :return: bytes sequence of code with updated lines offsets + """ + new_list = list(code_to_modify.co_lnotab) + abs_offset = prev_abs_offset = 0 + i = 0 + while i < len(new_list): + prev_abs_offset = abs_offset + abs_offset += new_list[i] + for (inserted_offset, inserted_code) in all_inserted_code: + if prev_abs_offset <= inserted_offset < abs_offset: + size_of_inserted = len(inserted_code) + new_list[i] += size_of_inserted + abs_offset += size_of_inserted + if new_list[i] > MAX_BYTE: + new_list[i] = new_list[i] - MAX_BYTE + new_list = new_list[:i] + [MAX_BYTE, 0] + new_list[i:] + i += 2 + return bytes(new_list) + + +def _unpack_opargs(code, inserted_code_list, current_index): + """ + Modified version of `_unpack_opargs` function from module `dis`. + We have to use it, because sometimes code can be in an inconsistent state: if EXTENDED_ARG + operator was introduced into the code, but it hasn't been inserted into `code_list` yet. + In this case we can't use standard `_unpack_opargs` and we should check whether there are + some new operators in `inserted_code_list`. + """ + extended_arg = 0 + for i in range(0, len(code), 2): + op = code[i] + if op >= HAVE_ARGUMENT: + if not extended_arg: + # in case if we added EXTENDED_ARG, but haven't inserted it to the source code yet. + for code_index in range(current_index, len(inserted_code_list)): + inserted_offset, inserted_code = inserted_code_list[code_index] + if inserted_offset == i and inserted_code[0] == EXTENDED_ARG: + extended_arg = inserted_code[1] << 8 + arg = code[i+1] | extended_arg + extended_arg = (arg << 8) if op == EXTENDED_ARG else 0 + else: + arg = None + yield (i, op, arg) + + +def _update_label_offsets(code_obj, breakpoint_offset, breakpoint_code_list): + """ + Update labels for the relative and absolute jump targets + :param code_obj: code to modify + :param breakpoint_offset: offset for the inserted code + :param breakpoint_code_list: size of the inserted code + :return: bytes sequence with modified labels; list of tuples (resulting offset, list of code instructions) with + information about all inserted pieces of code + """ + inserted_code = list() + # the list with all inserted pieces of code + inserted_code.append((breakpoint_offset, breakpoint_code_list)) + code_list = list(code_obj) + j = 0 + + while j < len(inserted_code): + current_offset, current_code_list = inserted_code[j] + offsets_for_modification = [] + + for offset, op, arg in _unpack_opargs(code_list, inserted_code, j): + if arg is not None: + if op in dis.hasjrel: + # has relative jump target + label = offset + 2 + arg + if offset < current_offset < label: + # change labels for relative jump targets if code was inserted inside + offsets_for_modification.append(offset) + elif op in dis.hasjabs: + # change label for absolute jump if code was inserted before it + if current_offset < arg: + offsets_for_modification.append(offset) + for i in range(0, len(code_list), 2): + op = code_list[i] + if i in offsets_for_modification and op >= dis.HAVE_ARGUMENT: + new_arg = code_list[i + 1] + len(current_code_list) + if new_arg <= MAX_BYTE: + code_list[i + 1] = new_arg + else: + # handle bytes overflow + if i - 2 > 0 and code_list[i - 2] == EXTENDED_ARG and code_list[i - 1] < MAX_BYTE: + # if new argument > 255 and EXTENDED_ARG already exists we need to increase it's argument + code_list[i - 1] += 1 + else: + # if there isn't EXTENDED_ARG operator yet we have to insert the new operator + extended_arg_code = [EXTENDED_ARG, new_arg >> 8] + inserted_code.append((i, extended_arg_code)) + code_list[i + 1] = new_arg & MAX_BYTE + + code_list = code_list[:current_offset] + current_code_list + code_list[current_offset:] + + for k in range(len(inserted_code)): + offset, inserted_code_list = inserted_code[k] + if current_offset < offset: + inserted_code[k] = (offset + len(current_code_list), inserted_code_list) + j += 1 + + return bytes(code_list), inserted_code + + +def _return_none_fun(): + return None + + +def insert_code(code_to_modify, code_to_insert, before_line): + """ + Insert piece of code `code_to_insert` to `code_to_modify` right inside the line `before_line` before the + instruction on this line by modifying original bytecode + + :param code_to_modify: Code to modify + :param code_to_insert: Code to insert + :param before_line: Number of line for code insertion + :return: boolean flag whether insertion was successful, modified code + """ + linestarts = dict(dis.findlinestarts(code_to_modify)) + if before_line not in linestarts.values(): + return code_to_modify + offset = None + for off, line_no in linestarts.items(): + if line_no == before_line: + offset = off + + return_none_size = len(_return_none_fun.__code__.co_code) + code_to_insert_obj = code_to_insert.co_code[:-return_none_size] + try: + code_to_insert_obj, new_names = \ + _add_attr_values_from_insert_to_original(code_to_modify, code_to_insert, code_to_insert_obj, 'co_names', + dis.hasname) + code_to_insert_obj, new_consts = \ + _add_attr_values_from_insert_to_original(code_to_modify, code_to_insert, code_to_insert_obj, 'co_consts', + [opmap['LOAD_CONST']]) + code_to_insert_obj, new_vars = \ + _add_attr_values_from_insert_to_original(code_to_modify, code_to_insert, code_to_insert_obj, 'co_varnames', + dis.haslocal) + new_bytes, all_inserted_code = _update_label_offsets(code_to_modify.co_code, offset, list(code_to_insert_obj)) + + new_lnotab = _modify_new_lines(code_to_modify, all_inserted_code) + except ValueError: + traceback.print_exc() + return False, code_to_modify + + new_code = CodeType( + code_to_modify.co_argcount, # integer + code_to_modify.co_kwonlyargcount, # integer + len(new_vars), # integer + code_to_modify.co_stacksize, # integer + code_to_modify.co_flags, # integer + new_bytes, # bytes + new_consts, # tuple + new_names, # tuple + new_vars, # tuple + code_to_modify.co_filename, # string + code_to_modify.co_name, # string + code_to_modify.co_firstlineno, # integer + new_lnotab, # bytes + code_to_modify.co_freevars, # tuple + code_to_modify.co_cellvars # tuple + ) + return True, new_code diff --git a/ptvsd/pydevd/appveyor.yml b/ptvsd/pydevd/appveyor.yml new file mode 100644 index 00000000..9279e7c6 --- /dev/null +++ b/ptvsd/pydevd/appveyor.yml @@ -0,0 +1,76 @@ +environment: + + matrix: + + # For Python versions available on Appveyor, see + # http://www.appveyor.com/docs/installed-software#python + # The list here is complete (excluding Python 2.6, which + # isn't covered by this document) at the time of writing. + + - PYTHON_FOLDER: "C:\\Python27" + PYDEVD_USE_CYTHON: YES + - PYTHON_FOLDER: "C:\\Python27" + PYDEVD_USE_CYTHON: NO + + #- PYTHON_FOLDER: "C:\\Python33" + #- PYTHON_FOLDER: "C:\\Python34" + #- PYTHON_FOLDER: "C:\\Python35" + #- PYTHON_FOLDER: "C:\\Python27-x64" + #- PYTHON_FOLDER: "C:\\Python33-x64" + # DISTUTILS_USE_SDK: "1" + #- PYTHON_FOLDER: "C:\\Python34-x64" + # DISTUTILS_USE_SDK: "1" + + - PYTHON_FOLDER: "C:\\Python35-x64" + PYDEVD_USE_CYTHON: YES + - PYTHON_FOLDER: "C:\\Python35-x64" + PYDEVD_USE_CYTHON: NO + + - PYTHON_FOLDER: "C:\\Python36-x64" + PYDEVD_USE_CYTHON: YES + - PYTHON_FOLDER: "C:\\Python36-x64" + PYDEVD_USE_CYTHON: NO + + - PYTHON_FOLDER: "C:\\Python36-x64" + PYDEVD_USE_CYTHON: NO + TEST_IRONPYTHON: YES + +install: + # Note: we can't use powershell for everything as it'll fail if anything is written to stderr (which is expected + # in some cases), so, using cmd on case where writing to stderr is Ok. + - cmd: "set PYTHON_EXE=%PYTHON_FOLDER%\\python.exe" + - ps: if ($env:TEST_IRONPYTHON -eq "YES"){Start-FileDownload https://github.com/IronLanguages/main/releases/download/ipy-2.7.5/IronPython-2.7.5.zip -FileName ironpython.zip} + - cmd: IF "%TEST_IRONPYTHON%"=="YES" (7z x ironpython.zip -oironpython) + - cmd: IF "%TEST_IRONPYTHON%"=="YES" (ironpython\IronPython-2.7.5\ipy.exe -X:Frames -X:ExceptionDetail -X:ShowClrExceptions -m ensurepip) + - cmd: IF "%TEST_IRONPYTHON%"=="YES" (ironpython\IronPython-2.7.5\ipy.exe -X:Frames -X:ExceptionDetail -X:ShowClrExceptions -m pip install pytest) + - ps: | + if ($env:TEST_IRONPYTHON -ne "YES"){ + $PYTHON_EXE = $Env:PYTHON_EXE + & $PYTHON_EXE -m pip install wheel + & $PYTHON_EXE -m pip install cython + & $PYTHON_EXE -m pip install numpy + & $PYTHON_EXE -m pip install pytest + & $PYTHON_EXE -m pip install psutil + & $PYTHON_EXE -m pip install ipython + if ($env:PYTHON -eq "C:\\Python27"){ + "%PYTHON%\\python.exe -m pip install django>=1.7,<1.8" + } + } + - cmd: "set PYTHONPATH=%PYTHONPATH%;%APPVEYOR_BUILD_FOLDER%" + +build_script: + - "%PYTHON_EXE% build_tools/build.py" + +test_script: + - cmd: IF "%TEST_IRONPYTHON%"=="YES" (ironpython\IronPython-2.7.5\ipy.exe -X:Frames -X:ExceptionDetail -X:ShowClrExceptions -m pytest --assert=plain -k "not samples") + - cmd: IF "%TEST_IRONPYTHON%"=="" (%PYTHON_EXE% -m pytest) + +artifacts: + # bdist_wheel puts your built wheel in the dist directory + # - path: dist\* + - path: build\lib.* + +#on_success: +# You can use this step to upload your artifacts to a public website. +# See Appveyor's documentation for more details. Or you can simply +# access your wheels from the Appveyor "artifacts" tab for your build. \ No newline at end of file diff --git a/ptvsd/pydevd/build.gradle b/ptvsd/pydevd/build.gradle new file mode 100644 index 00000000..d3361333 --- /dev/null +++ b/ptvsd/pydevd/build.gradle @@ -0,0 +1,43 @@ +import org.apache.tools.ant.taskdefs.condition.Os + +plugins { + id "com.jetbrains.python.envs" version "0.0.9" +} + +envs { + project.buildDir = new File(System.getenv().getOrDefault("PYCHARM_BUILD_DIR", buildDir)) + bootstrapDirectory = new File(buildDir, '.miniconda') + envsDirectory = new File(buildDir, 'MinicondaEnvs') + minicondaVersion = 'latest' + packages = ["cython", "pip", "setuptools"] + + conda "py27_64", "2.7", ["wheel", "twine"], false + conda "py34_64", "3.4", ["wheel", "twine"], false + conda "py35_64", "3.5", ["wheel", "twine"], false + conda "py36_64", "3.6", ["wheel", "twine"], false + conda "py27_32", "2.7", ["wheel", "twine"], false + conda "py34_32", "3.4", ["wheel", "twine"], false + conda "py35_32", "3.5", ["wheel", "twine"], false + conda "py36_32", "3.6", ["wheel", "twine"], false +} + +task buildBinariesOnWindows(dependsOn: 'build_envs') << { + exec { + workingDir projectDir + + environment PYTHONPATH: projectDir, + MINICONDA32_ENVS: envs.envsDirectory, + MINICONDA64_ENVS: envs.envsDirectory + + commandLine "${envs.envsDirectory}/py27_32/python.exe", "build_tools/build_binaries_windows.py" + } +} + +task buildBinaries() { + if (Os.isFamily(Os.FAMILY_WINDOWS)) { + dependsOn buildBinariesOnWindows + } + else if (Os.isFamily(Os.FAMILY_MAC)) { + dependsOn buildOnMac + } +} diff --git a/ptvsd/pydevd/build_tools/build.py b/ptvsd/pydevd/build_tools/build.py new file mode 100644 index 00000000..017e6434 --- /dev/null +++ b/ptvsd/pydevd/build_tools/build.py @@ -0,0 +1,171 @@ +''' +Helper to build pydevd. + +It should: + * recreate our generated files + * compile cython deps (properly setting up the environment first). + +Note that it's used in the CI to build the cython deps based on the PYDEVD_USE_CYTHON environment variable. +''' +from __future__ import print_function + +import os +import subprocess +import sys + +from generate_code import remove_if_exists, root_dir, is_python_64bit, generate_dont_trace_files, generate_cython_module + + +def validate_pair(ob): + try: + if not (len(ob) == 2): + print("Unexpected result:", ob, file=sys.stderr) + raise ValueError + except: + return False + return True + + +def consume(it): + try: + while True: + next(it) + except StopIteration: + pass + +def get_environment_from_batch_command(env_cmd, initial=None): + """ + Take a command (either a single command or list of arguments) + and return the environment created after running that command. + Note that if the command must be a batch file or .cmd file, or the + changes to the environment will not be captured. + + If initial is supplied, it is used as the initial environment passed + to the child process. + """ + if not isinstance(env_cmd, (list, tuple)): + env_cmd = [env_cmd] + if not os.path.exists(env_cmd[0]): + raise RuntimeError('Error: %s does not exist' % (env_cmd[0],)) + + # construct the command that will alter the environment + env_cmd = subprocess.list2cmdline(env_cmd) + # create a tag so we can tell in the output when the proc is done + tag = 'Done running command' + # construct a cmd.exe command to do accomplish this + cmd = 'cmd.exe /s /c "{env_cmd} && echo "{tag}" && set"'.format(**vars()) + # launch the process + proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, env=initial) + # parse the output sent to stdout + lines = proc.stdout + # consume whatever output occurs until the tag is reached + for line in lines: + line = line.decode('utf-8') + if 'The specified configuration type is missing.' in line: + raise AssertionError('Error executing %s. View http://blog.ionelmc.ro/2014/12/21/compiling-python-extensions-on-windows/ for details.' % (env_cmd)) + if tag in line: + break + if sys.version_info[0] > 2: + # define a way to handle each KEY=VALUE line + handle_line = lambda l: l.decode('utf-8').rstrip().split('=', 1) + else: + # define a way to handle each KEY=VALUE line + handle_line = lambda l: l.rstrip().split('=', 1) + # parse key/values into pairs + pairs = map(handle_line, lines) + # make sure the pairs are valid + valid_pairs = filter(validate_pair, pairs) + # construct a dictionary of the pairs + result = dict(valid_pairs) + # let the process finish + proc.communicate() + return result + +def remove_binaries(): + for f in os.listdir(os.path.join(root_dir, '_pydevd_bundle')): + if f.endswith('.pyd'): + remove_if_exists(os.path.join(root_dir, '_pydevd_bundle', f)) + +def build(): + if '--no-remove-binaries' not in sys.argv: + remove_binaries() + + + os.chdir(root_dir) + + env=None + if sys.platform == 'win32': + # "C:\Program Files (x86)\Microsoft Visual Studio 9.0\VC\bin\vcvars64.bat" + # set MSSdk=1 + # set DISTUTILS_USE_SDK=1 + # set VS100COMNTOOLS=C:\Program Files (x86)\Microsoft Visual Studio 9.0\Common7\Tools + + + env = os.environ.copy() + if sys.version_info[:2] in ((2,6), (2,7), (3,5), (3,6)): + import setuptools # We have to import it first for the compiler to be found + from distutils import msvc9compiler + + if sys.version_info[:2] in ((2,6), (2,7)): + vcvarsall = msvc9compiler.find_vcvarsall(9.0) + elif sys.version_info[:2] in ((3,5), (3,6)): + vcvarsall = msvc9compiler.find_vcvarsall(14.0) + if vcvarsall is None or not os.path.exists(vcvarsall): + raise RuntimeError('Error finding vcvarsall.') + + if is_python_64bit(): + env.update(get_environment_from_batch_command( + [vcvarsall, 'amd64'], + initial=os.environ.copy())) + else: + env.update(get_environment_from_batch_command( + [vcvarsall, 'x86'], + initial=os.environ.copy())) + + elif sys.version_info[:2] in ((3,3), (3,4)): + if is_python_64bit(): + env.update(get_environment_from_batch_command( + [r"C:\Program Files\Microsoft SDKs\Windows\v7.1\Bin\SetEnv.cmd", '/x64'], + initial=os.environ.copy())) + else: + env.update(get_environment_from_batch_command( + [r"C:\Program Files\Microsoft SDKs\Windows\v7.1\Bin\SetEnv.cmd", '/x86'], + initial=os.environ.copy())) + + else: + raise AssertionError('Unable to setup environment for Python: %s' % (sys.version,)) + + env['MSSdk'] = '1' + env['DISTUTILS_USE_SDK'] = '1' + + additional_args = [] + for arg in sys.argv: + if arg.startswith('--target-pyd-name='): + additional_args.append(arg) + if arg.startswith('--target-pyd-frame-eval='): + additional_args.append(arg) + break + else: + additional_args.append('--force-cython') # Build always forces cython! + + args = [ + sys.executable, os.path.join(os.path.dirname(__file__), '..', 'setup_cython.py'), 'build_ext', '--inplace', + ]+additional_args + print('Calling args: %s' % (args,)) + subprocess.check_call(args, env=env,) + +if __name__ == '__main__': + use_cython = os.getenv('PYDEVD_USE_CYTHON', None) + if use_cython == 'YES': + build() + elif use_cython == 'NO': + remove_binaries() + elif use_cython is None: + # Regular process + if '--no-regenerate-files' not in sys.argv: + generate_dont_trace_files() + generate_cython_module() + build() + else: + raise RuntimeError('Unexpected value for PYDEVD_USE_CYTHON: %s (accepted: YES, NO)' % (use_cython,)) + diff --git a/ptvsd/pydevd/build_tools/build_binaries_windows.py b/ptvsd/pydevd/build_tools/build_binaries_windows.py new file mode 100644 index 00000000..f19535f4 --- /dev/null +++ b/ptvsd/pydevd/build_tools/build_binaries_windows.py @@ -0,0 +1,187 @@ +r''' +Creating the needed environments for creating the pre-compiled distribution on Windods: + +1. Download: + +* conda32 at C:\tools\Miniconda32 + +* conda64 at C:\tools\Miniconda + +Create the environments: + +C:\tools\Miniconda32\Scripts\conda create -y -f -n py27_32 python=2.7 cython numpy nose ipython pip +C:\tools\Miniconda32\Scripts\activate py27_32 +pip install "django>=1.7,<1.8" +pip install -U "setuptools>=0.9" +pip install -U "pip>=1.4" "wheel>=0.21" twine +deactivate + +C:\tools\Miniconda32\Scripts\conda create -y -f -n py34_32 python=3.4 cython numpy nose ipython pip +C:\tools\Miniconda32\Scripts\activate py34_32 +pip install "django>=1.9" +pip install -U "setuptools>=0.9" +pip install -U "pip>=1.4" "wheel>=0.21" twine +deactivate + +C:\tools\Miniconda32\Scripts\conda create -y -f -n py35_32 python=3.5 cython numpy nose ipython pip +C:\tools\Miniconda32\Scripts\activate py35_32 +pip install "django>=1.9" +pip install -U "setuptools>=0.9" +pip install -U "pip>=1.4" "wheel>=0.21" twine +deactivate + +C:\tools\Miniconda32\Scripts\conda create -y -f -n py36_32 python=3.6 cython numpy nose ipython pip +C:\tools\Miniconda32\Scripts\activate py36_32 +pip install "django>=1.9" +pip install -U "setuptools>=0.9" +pip install -U "pip>=1.4" "wheel>=0.21" twine +deactivate + + +C:\tools\Miniconda\Scripts\conda create -y -f -n py27_64 python=2.7 cython numpy nose ipython pip +C:\tools\Miniconda\Scripts\activate py27_64 +pip install "django>=1.7,<1.8" +pip install -U "setuptools>=0.9" +pip install -U "pip>=1.4" "wheel>=0.21" twine +deactivate + +C:\tools\Miniconda\Scripts\conda create -y -f -n py34_64 python=3.4 cython numpy nose ipython pip +C:\tools\Miniconda\Scripts\activate py34_64 +pip install "django>=1.9" +pip install -U "setuptools>=0.9" +pip install -U "pip>=1.4" "wheel>=0.21" twine +deactivate + +C:\tools\Miniconda\Scripts\conda create -y -f -n py35_64 python=3.5 cython numpy nose ipython pip +C:\tools\Miniconda\Scripts\activate py35_64 +pip install "django>=1.9" +pip install -U "setuptools>=0.9" +pip install -U "pip>=1.4" "wheel>=0.21" twine +deactivate + + +C:\tools\Miniconda\Scripts\conda create -y -f -n py36_64 python=3.6 cython numpy nose ipython pip +C:\tools\Miniconda\Scripts\activate py36_64 +pip install "django>=1.9" +pip install -U "setuptools>=0.9" +pip install -U "pip>=1.4" "wheel>=0.21" twine +deactivate + + + + +### UPDATE CYTHON + +C:\tools\Miniconda32\Scripts\activate py27_32 +conda update -y cython +deactivate + +C:\tools\Miniconda32\Scripts\activate py34_32 +conda update -y cython +deactivate + +C:\tools\Miniconda32\Scripts\activate py35_32 +conda update -y cython +deactivate + +C:\tools\Miniconda32\Scripts\activate py36_32 +conda update -y cython +deactivate + +C:\tools\Miniconda\Scripts\activate py27_64 +conda update -y cython +deactivate + +C:\tools\Miniconda\Scripts\activate py34_64 +conda update -y cython +deactivate + +C:\tools\Miniconda\Scripts\activate py35_64 +conda update -y cython +deactivate + +C:\tools\Miniconda\Scripts\activate py36_64 +conda update -y cython +deactivate + + +''' + +from __future__ import unicode_literals +import os +import subprocess +import sys + +miniconda32_envs = os.getenv('MINICONDA32_ENVS', r'C:\tools\Miniconda32\envs') +miniconda64_envs = os.getenv('MINICONDA64_ENVS', r'C:\tools\Miniconda\envs') + +python_installations = [ + r'%s\py27_32\python.exe' % miniconda32_envs, + r'%s\py34_32\python.exe' % miniconda32_envs, + r'%s\py35_32\python.exe' % miniconda32_envs, + r'%s\py36_32\python.exe' % miniconda32_envs, + + r'%s\py27_64\python.exe' % miniconda64_envs, + r'%s\py34_64\python.exe' % miniconda64_envs, + r'%s\py35_64\python.exe' % miniconda64_envs, + r'%s\py36_64\python.exe' % miniconda64_envs, +] + +root_dir = os.path.dirname(os.path.dirname(__file__)) +def list_binaries(): + for f in os.listdir(os.path.join(root_dir, '_pydevd_bundle')): + if f.endswith('.pyd'): + yield f + +def extract_version(python_install): + return python_install.split('\\')[-2][2:] + + +def main(): + from generate_code import generate_dont_trace_files + from generate_code import generate_cython_module + + # First, make sure that our code is up to date. + generate_dont_trace_files() + generate_cython_module() + + for python_install in python_installations: + assert os.path.exists(python_install) + + from build import remove_binaries + remove_binaries() + + for f in list_binaries(): + raise AssertionError('Binary not removed: %s' % (f,)) + + for i, python_install in enumerate(python_installations): + print() + print('*'*80) + print('*'*80) + print() + new_name = 'pydevd_cython_%s_%s' % (sys.platform, extract_version(python_install)) + args = [ + python_install, os.path.join(root_dir, 'build_tools', 'build.py'), '--no-remove-binaries', '--target-pyd-name=%s' % new_name, '--force-cython'] + if i != 0: + args.append('--no-regenerate-files') + if extract_version(python_install).startswith('36'): + name_frame_eval = 'pydevd_frame_evaluator_%s_%s' % (sys.platform, extract_version(python_install)) + args.append('--target-pyd-frame-eval=%s' % name_frame_eval) + print('Calling: %s' % (' '.join(args))) + subprocess.check_call(args) + + + +if __name__ == '__main__': + main() + +# C:\tools\Miniconda32\envs\py27_32\python build_tools\build.py: generates the .pyx and .c +# C:\tools\Miniconda32\envs\py27_32\python build_tools\build_binaries_windows.py: builds for multiple python versions + +r''' +To run do: +cd /D x:\PyDev.Debugger +set PYTHONPATH=x:\PyDev.Debugger +C:\tools\Miniconda32\envs\py27_32\python build_tools\build.py +C:\tools\Miniconda32\envs\py27_32\python build_tools\build_binaries_windows.py +''' diff --git a/ptvsd/pydevd/build_tools/generate_code.py b/ptvsd/pydevd/build_tools/generate_code.py new file mode 100644 index 00000000..dc7a3b81 --- /dev/null +++ b/ptvsd/pydevd/build_tools/generate_code.py @@ -0,0 +1,186 @@ +''' +This module should be run to recreate the files that we generate automatically +(i.e.: modules that shouldn't be traced and cython .pyx) +''' + +from __future__ import print_function + +import os +import struct + + + +def is_python_64bit(): + return (struct.calcsize('P') == 8) + +root_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) + +def get_cython_contents(filename): + if filename.endswith('.pyc'): + filename = filename[:-1] + + state = 'regular' + + new_contents = [] + with open(filename, 'r') as stream: + for line in stream: + strip = line.strip() + if state == 'regular': + if strip == '# IFDEF CYTHON': + state = 'cython' + + new_contents.append('%s -- DONT EDIT THIS FILE (it is automatically generated)\n' % line.replace('\n', '').replace('\r', '')) + continue + + new_contents.append(line) + + elif state == 'cython': + if strip == '# ELSE': + state = 'nocython' + new_contents.append(line) + continue + + elif strip == '# ENDIF': + state = 'regular' + new_contents.append(line) + continue + + assert strip.startswith('# '), 'Line inside # IFDEF CYTHON must start with "# ".' + new_contents.append(line.replace('# ', '', 1)) + + elif state == 'nocython': + if strip == '# ENDIF': + state = 'regular' + new_contents.append(line) + continue + new_contents.append('# %s' % line) + + assert state == 'regular', 'Error: # IFDEF CYTHON found without # ENDIF' + + + return ''.join(new_contents) + +def _generate_cython_from_files(target, modules): + contents = ['''# Important: Autogenerated file. + +# DO NOT edit manually! +# DO NOT edit manually! +'''] + + for mod in modules: + contents.append(get_cython_contents(mod.__file__)) + + with open(target, 'w') as stream: + stream.write(''.join(contents)) + +def generate_dont_trace_files(): + template = '''# Important: Autogenerated file. + +# DO NOT edit manually! +# DO NOT edit manually! + +from _pydevd_bundle.pydevd_constants import IS_PY3K + +LIB_FILE = 1 +PYDEV_FILE = 2 + +DONT_TRACE = { + # commonly used things from the stdlib that we don't want to trace + 'Queue.py':LIB_FILE, + 'queue.py':LIB_FILE, + 'socket.py':LIB_FILE, + 'weakref.py':LIB_FILE, + '_weakrefset.py':LIB_FILE, + 'linecache.py':LIB_FILE, + 'threading.py':LIB_FILE, + 'dis.py':LIB_FILE, + + #things from pydev that we don't want to trace + '_pydev_execfile.py':PYDEV_FILE, +%(pydev_files)s +} + +if IS_PY3K: + # if we try to trace io.py it seems it can get halted (see http://bugs.python.org/issue4716) + DONT_TRACE['io.py'] = LIB_FILE + + # Don't trace common encodings too + DONT_TRACE['cp1252.py'] = LIB_FILE + DONT_TRACE['utf_8.py'] = LIB_FILE +''' + + pydev_files = [] + + for root, dirs, files in os.walk(root_dir): + for d in [ + '.git', + '.settings', + 'build', + 'build_tools', + 'dist', + 'pydevd.egg-info', + 'pydevd_attach_to_process', + 'pydev_sitecustomize', + 'stubs', + 'tests', + 'tests_mainloop', + 'tests_python', + 'tests_runfiles', + 'test_pydevd_reload', + 'third_party', + '__pycache__', + '_pydev_runfiles', + 'pydev_ipython', + ]: + try: + dirs.remove(d) + except: + pass + + for f in files: + if f.endswith('.py'): + if f not in ( + '__init__.py', + 'runfiles.py', + 'pydev_coverage.py', + 'pydev_pysrc.py', + 'setup.py', + 'setup_cython.py', + 'interpreterInfo.py', + 'conftest.py', + ): + pydev_files.append(" '%s': PYDEV_FILE," % (f,)) + + contents = template % (dict(pydev_files='\n'.join(sorted(pydev_files)))) + assert 'pydevd.py' in contents + assert 'pydevd_dont_trace.py' in contents + with open(os.path.join(root_dir, '_pydevd_bundle', 'pydevd_dont_trace_files.py'), 'w') as stream: + stream.write(contents) + +def remove_if_exists(f): + try: + if os.path.exists(f): + os.remove(f) + except: + import traceback;traceback.print_exc() + +def generate_cython_module(): + remove_if_exists(os.path.join(root_dir, '_pydevd_bundle', 'pydevd_cython.pyx')) + + target = os.path.join(root_dir, '_pydevd_bundle', 'pydevd_cython.pyx') + curr = os.environ.get('PYDEVD_USE_CYTHON') + try: + os.environ['PYDEVD_USE_CYTHON'] = 'NO' + + from _pydevd_bundle import pydevd_additional_thread_info_regular + from _pydevd_bundle import pydevd_frame, pydevd_trace_dispatch_regular + _generate_cython_from_files(target, [pydevd_additional_thread_info_regular, pydevd_frame, pydevd_trace_dispatch_regular]) + finally: + if curr is None: + del os.environ['PYDEVD_USE_CYTHON'] + else: + os.environ['PYDEVD_USE_CYTHON'] = curr + +if __name__ == '__main__': + generate_dont_trace_files() + generate_cython_module() diff --git a/ptvsd/pydevd/build_tools/names_to_rename.py b/ptvsd/pydevd/build_tools/names_to_rename.py new file mode 100644 index 00000000..1525974d --- /dev/null +++ b/ptvsd/pydevd/build_tools/names_to_rename.py @@ -0,0 +1,318 @@ +''' +Helper module to hold the names to rename while doing refactoring to convert to pep8. +''' +NAMES = ''' +# sendCaughtExceptionStack +# sendBreakpointConditionException +# setSuspend +# processThreadNotAlive +# sendCaughtExceptionStackProceeded +# doWaitSuspend +# SetTraceForFrameAndParents +# prepareToRun +# processCommandLine +# initStdoutRedirect +# initStderrRedirect +# OnRun +# doKillPydevThread +# stopTrace +# handleExcept +# processCommand +# processNetCommand +# addCommand +# StartClient +# getNextSeq +# makeMessage +# StartServer +# threadToXML +# makeErrorMessage +# makeThreadCreatedMessage +# makeCustomFrameCreatedMessage +# makeListThreadsMessage +# makeVariableChangedMessage +# makeIoMessage +# makeVersionMessage +# makeThreadKilledMessage +# makeThreadSuspendStr +# makeValidXmlValue +# makeThreadSuspendMessage +# makeThreadRunMessage +# makeGetVariableMessage +# makeGetArrayMessage +# makeGetFrameMessage +# makeEvaluateExpressionMessage +# makeGetCompletionsMessage +# makeGetFileContents +# makeSendBreakpointExceptionMessage +# makeSendCurrExceptionTraceMessage +# makeSendCurrExceptionTraceProceededMessage +# makeSendConsoleMessage +# makeCustomOperationMessage +# makeLoadSourceMessage +# makeShowConsoleMessage +# makeExitMessage +# canBeExecutedBy +# doIt +# additionalInfo +# cmdFactory +# GetExceptionTracebackStr +# _GetStackStr +# _InternalSetTrace +# ReplaceSysSetTraceFunc +# RestoreSysSetTraceFunc + + + +# AddContent +# AddException +# AddObserver +# # Call -- skip +# # Call1 -- skip +# # Call2 -- skip +# # Call3 -- skip +# # Call4 -- skip +# ChangePythonPath +# CheckArgs +# CheckChar +# CompleteFromDir +# CreateDbFrame +# CustomFramesContainerInit +# DictContains +# DictItems +# DictIterItems +# DictIterValues +# DictKeys +# DictPop +# DictValues + + +# DoExit +# DoFind +# EndRedirect +# # Exec -- skip +# ExecuteTestsInParallel +# # Find -- skip +# FinishDebuggingSession +# FlattenTestSuite +# GenerateCompletionsAsXML +# GenerateImportsTipForModule +# GenerateTip + + +# testAddExec +# testComplete +# testCompleteDoesNotDoPythonMatches +# testCompletionSocketsAndMessages +# testConsoleHello +# testConsoleRequests +# testDotNetLibraries +# testEdit +# testGetCompletions +# testGetNamespace +# testGetReferrers1 +# testGetReferrers2 +# testGetReferrers3 +# testGetReferrers4 +# testGetReferrers5 +# testGetReferrers6 +# testGetReferrers7 +# testGettingInfoOnJython +# testGui +# testHistory +# testImports +# testImports1 +# testImports1a +# testImports1b +# testImports1c +# testImports2 +# testImports2a +# testImports2b +# testImports2c +# testImports3 +# testImports4 +# testImports5 +# testInspect +# testIt +# testMessage +# testPrint +# testProperty +# testProperty2 +# testProperty3 +# testQuestionMark +# testSearch +# testSearchOnJython +# testServer +# testTipOnString +# toXML +# updateCustomFrame +# varToXML + +# +# GetContents +# GetCoverageFiles +# GetFile +# GetFileNameAndBaseFromFile +# GetFilenameAndBase +# GetFrame +# GetGlobalDebugger # -- renamed but kept backward-compatibility +# GetNormPathsAndBase +# GetNormPathsAndBaseFromFile +# GetTestsToRun -- skip +# GetThreadId +# GetVmType +# IPythonEditor -- skip +# ImportName +# InitializeServer +# IterFrames + + +# Method1 -- skip +# Method1a -- skip +# Method2 -- skip +# Method3 -- skip + +# NewConsolidate +# NormFileToClient +# NormFileToServer +# # Notify -- skip +# # NotifyFinished -- skip +# OnFunButton +# # OnInit -- skip +# OnTimeToClose +# PydevdFindThreadById +# PydevdLog +# # RequestInput -- skip + + +# Search -- manual: search_definition +# ServerProxy -- skip +# SetGlobalDebugger + +# SetServer +# SetUp +# SetTrace -- skip + + +# SetVmType +# SetupType +# StartCoverageSupport +# StartCoverageSupportFromParams +# StartPydevNosePluginSingleton +# StartRedirect +# ToTuple + +# addAdditionalFrameById +# removeAdditionalFrameById +# removeCustomFrame +# addCustomFrame +# addError -- skip +# addExec +# addFailure -- skip +# addSuccess -- skip +# assertArgs +# assertIn + +# basicAsStr +# changeAttrExpression +# # changeVariable -- skip (part of public API for console) +# checkOutput +# checkOutputRedirect +# clearBuffer + +# # connectToDebugger -- skip (part of public API for console) +# connectToServer +# consoleExec +# createConnections +# createStdIn +# customOperation +# dirObj +# doAddExec +# doExecCode +# dumpFrames + +# # enableGui -- skip (part of public API for console) +# evalInContext +# evaluateExpression +# # execLine -- skip (part of public API for console) +# # execMultipleLines -- skip (part of public API for console) +# findFrame +# orig_findFrame +# finishExec +# fixGetpass + +# forceServerKill +# formatArg +# formatCompletionMessage +# formatParamClassName +# frameVarsToXML +# fullyNormalizePath + +# getArray -- skip (part of public API for console) +# getAsDoc +# getCapturedOutput +# getCompletions -- skip (part of public API for console) + +# getCompletionsMessage +# getCustomFrame +# # getDescription -- skip (part of public API for console) +# getDictionary +# # getFrame -- skip (part of public API for console) +# getFrameName + + + +# getFrameStack +# getFreeAddresses +# getInternalQueue +# getIoFromError +# getNamespace +# getTestName +# getTokenAndData +# getType + +# getVariable -- skip (part of public API for console) + +# # haveAliveThreads -> has_threads_alive +# initializeNetwork +# isThreadAlive +# # iterFrames -> _iter_frames +# # keyStr -> key_to_str +# killAllPydevThreads +# longRunning +# # metA -- skip +# nativePath + +# needMore +# needMoreForCode +# # notifyCommands -- skip (part of public API) +# # notifyConnected -- skip (part of public API) +# # notifyStartTest -- skip (part of public API) +# # notifyTest -- skip (part of public API) +# # notifyTestRunFinished -- skip (part of public API) +# # notifyTestsCollected -- skip (part of public API) +# postInternalCommand +# processInternalCommands +# readMsg + + +# redirectStdout +# removeInvalidChars +# reportCond +# resolveCompoundVariable +# resolveVar +# restoreStdout +# sendKillMsg +# sendSignatureCallTrace +# setTracingForUntracedContexts +# startClientThread +# startDebuggerServerThread +# startExec + +# startTest -- skip +# stopTest -- skip +# setUp -- skip +# setUpClass -- skip +# setUpModule -- skip +# tearDown -- skip + +''' \ No newline at end of file diff --git a/ptvsd/pydevd/build_tools/rename_pep8.py b/ptvsd/pydevd/build_tools/rename_pep8.py new file mode 100644 index 00000000..b673fb50 --- /dev/null +++ b/ptvsd/pydevd/build_tools/rename_pep8.py @@ -0,0 +1,123 @@ +''' +Helper module to do refactoring to convert names to pep8. +''' +import re +import os +import names_to_rename + +_CAMEL_RE = re.compile(r'(?<=[a-z])([A-Z])') +_CAMEL_DEF_RE = re.compile(r'(def )((([A-Z0-9]+|[a-z0-9])[a-z][a-z0-9]*[A-Z]|[a-z0-9]*[A-Z][A-Z0-9]*[a-z])[A-Za-z0-9]*)') + +def _normalize(name): + return _CAMEL_RE.sub(lambda x: '_' + x.group(1).lower(), name).lower() + +def find_matches_in_contents(contents): + return [x[1] for x in re.findall(_CAMEL_DEF_RE, contents)] + +def iter_files_in_dir(dirname): + for root, dirs, files in os.walk(dirname): + for name in ('pydevd_attach_to_process', '.git', 'stubs', 'pydev_ipython', 'third_party', 'pydev_ipython'): + try: + dirs.remove(name) + except: + pass + for filename in files: + if filename.endswith('.py') and filename not in ('rename_pep8.py', 'names_to_rename.py'): + path = os.path.join(root, filename) + with open(path, 'rb') as stream: + initial_contents = stream.read() + + yield path, initial_contents + +def find_matches(): + found = set() + for path, initial_contents in iter_files_in_dir(os.path.dirname(os.path.dirname(__file__))): + found.update(find_matches_in_contents(initial_contents)) + print '\n'.join(sorted(found)) + print 'Total', len(found) + +def substitute_contents(re_name_to_new_val, initial_contents): + contents = initial_contents + for key, val in re_name_to_new_val.iteritems(): + contents = re.sub(key, val, contents) + return contents + +def make_replace(): + re_name_to_new_val = load_re_to_new_val(names_to_rename.NAMES) + # traverse root directory, and list directories as dirs and files as files + for path, initial_contents in iter_files_in_dir(os.path.dirname(os.path.dirname(__file__))): + contents = substitute_contents(re_name_to_new_val, initial_contents) + if contents != initial_contents: + print 'Changed something at: %s' % (path,) + + for val in re_name_to_new_val.itervalues(): + # Check in initial contents to see if it already existed! + if re.findall(r'\b%s\b' % (val,), initial_contents): + raise AssertionError('Error in:\n%s\n%s is already being used (and changes may conflict).' % (path, val,)) + + with open(path, 'wb') as stream: + stream.write(contents) + + +def load_re_to_new_val(names): + name_to_new_val = {} + for n in names.splitlines(): + n = n.strip() + if not n.startswith('#') and n: + name_to_new_val[r'\b'+n+r'\b'] = _normalize(n) + return name_to_new_val + +def test(): + assert _normalize('RestoreSysSetTraceFunc') == 'restore_sys_set_trace_func' + assert _normalize('restoreSysSetTraceFunc') == 'restore_sys_set_trace_func' + assert _normalize('Restore') == 'restore' + matches = find_matches_in_contents(''' + def CamelCase() + def camelCase() + def ignore() + def ignore_this() + def Camel() + def CamelCaseAnother() + ''') + assert matches == ['CamelCase', 'camelCase', 'Camel', 'CamelCaseAnother'] + re_name_to_new_val = load_re_to_new_val(''' +# Call -- skip +# Call1 -- skip +# Call2 -- skip +# Call3 -- skip +# Call4 -- skip +CustomFramesContainerInit +DictContains +DictItems +DictIterItems +DictIterValues +DictKeys +DictPop +DictValues +''') + assert re_name_to_new_val == {'\\bDictPop\\b': 'dict_pop', '\\bDictItems\\b': 'dict_items', '\\bDictIterValues\\b': 'dict_iter_values', '\\bDictKeys\\b': 'dict_keys', '\\bDictContains\\b': 'dict_contains', '\\bDictIterItems\\b': 'dict_iter_items', '\\bCustomFramesContainerInit\\b': 'custom_frames_container_init', '\\bDictValues\\b': 'dict_values'} + assert substitute_contents(re_name_to_new_val, ''' +CustomFramesContainerInit +DictContains +DictItems +DictIterItems +DictIterValues +DictKeys +DictPop +DictValues +''') == ''' +custom_frames_container_init +dict_contains +dict_items +dict_iter_items +dict_iter_values +dict_keys +dict_pop +dict_values +''' + +if __name__ == '__main__': +# find_matches() + make_replace() +# test() + diff --git a/ptvsd/pydevd/conftest.py b/ptvsd/pydevd/conftest.py new file mode 100644 index 00000000..b0ebc7a6 --- /dev/null +++ b/ptvsd/pydevd/conftest.py @@ -0,0 +1,163 @@ +import pytest +import sys +from _pydevd_bundle.pydevd_constants import IS_JYTHON, IS_IRONPYTHON + + +# see: http://goo.gl/kTQMs +SYMBOLS = { + 'customary' : ('B', 'K', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y'), + 'customary_ext' : ('byte', 'kilo', 'mega', 'giga', 'tera', 'peta', 'exa', + 'zetta', 'iotta'), + 'iec' : ('Bi', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi', 'Yi'), + 'iec_ext' : ('byte', 'kibi', 'mebi', 'gibi', 'tebi', 'pebi', 'exbi', + 'zebi', 'yobi'), +} + +def bytes2human(n, format='%(value).1f %(symbol)s', symbols='customary'): + """ + Bytes-to-human / human-to-bytes converter. + Based on: http://goo.gl/kTQMs + Working with Python 2.x and 3.x. + + Author: Giampaolo Rodola' + License: MIT + """ + + """ + Convert n bytes into a human readable string based on format. + symbols can be either "customary", "customary_ext", "iec" or "iec_ext", + see: http://goo.gl/kTQMs + + >>> bytes2human(0) + '0.0 B' + >>> bytes2human(0.9) + '0.0 B' + >>> bytes2human(1) + '1.0 B' + >>> bytes2human(1.9) + '1.0 B' + >>> bytes2human(1024) + '1.0 K' + >>> bytes2human(1048576) + '1.0 M' + >>> bytes2human(1099511627776127398123789121) + '909.5 Y' + + >>> bytes2human(9856, symbols="customary") + '9.6 K' + >>> bytes2human(9856, symbols="customary_ext") + '9.6 kilo' + >>> bytes2human(9856, symbols="iec") + '9.6 Ki' + >>> bytes2human(9856, symbols="iec_ext") + '9.6 kibi' + + >>> bytes2human(10000, "%(value).1f %(symbol)s/sec") + '9.8 K/sec' + + >>> # precision can be adjusted by playing with %f operator + >>> bytes2human(10000, format="%(value).5f %(symbol)s") + '9.76562 K' + """ + n = int(n) + if n < 0: + raise ValueError("n < 0") + symbols = SYMBOLS[symbols] + prefix = {} + for i, s in enumerate(symbols[1:]): + prefix[s] = 1 << (i+1)*10 + for symbol in reversed(symbols[1:]): + if n >= prefix[symbol]: + value = float(n) / prefix[symbol] + return format % locals() + return format % dict(symbol=symbols[0], value=n) + +def format_memory_info(memory_info, curr_proc_memory_info): + return 'Total: %s, Available: %s, Used: %s %%, Curr process: %s' % ( + bytes2human(memory_info.total), bytes2human(memory_info.available), memory_info.percent, format_process_memory_info(curr_proc_memory_info)) + +def format_process_memory_info(proc_memory_info): + return bytes2human(proc_memory_info.rss) + +DEBUG_MEMORY_INFO = False + +_global_collect_info = False + +@pytest.yield_fixture(autouse=True) +def before_after_each_function(request): + global _global_collect_info + import psutil + current_pids = set(proc.pid for proc in psutil.process_iter()) + before_curr_proc_memory_info = psutil.Process().memory_info() + + if _global_collect_info and DEBUG_MEMORY_INFO: + try: + from pympler import summary, muppy + sum1 = summary.summarize(muppy.get_objects()) + except: + import traceback;traceback.print_exc() + + sys.stdout.write( +''' +=============================================================================== +Memory before: %s +%s +=============================================================================== +''' % (request.function, format_memory_info(psutil.virtual_memory(), before_curr_proc_memory_info))) + yield + + processes_info = [] + for proc in psutil.process_iter(): + if proc.pid not in current_pids: + try: + processes_info.append( + 'New Process: %s(%s) - %s' % ( + proc.name(), + proc.pid, + format_process_memory_info(proc.memory_info()) + ) + ) + except psutil.NoSuchProcess: + pass # The process could've died in the meanwhile + + after_curr_proc_memory_info = psutil.Process().memory_info() + + if DEBUG_MEMORY_INFO: + try: + if after_curr_proc_memory_info.rss - before_curr_proc_memory_info.rss > 10 * 1000 * 1000: + # 10 MB leak + if _global_collect_info: + sum2 = summary.summarize(muppy.get_objects()) + diff = summary.get_diff(sum1, sum2) + sys.stdout.write('===============================================================================\n') + sys.stdout.write('Leak info:\n') + sys.stdout.write('===============================================================================\n') + summary.print_(diff) + sys.stdout.write('===============================================================================\n') + + _global_collect_info = True + # We'll only really collect the info on the next test (i.e.: if at one test + # we used too much memory, the next one will start collecting) + else: + _global_collect_info = False + except: + import traceback;traceback.print_exc() + + sys.stdout.write( +''' +=============================================================================== +Memory after: %s +%s%s +=============================================================================== + + +''' % ( + request.function, + format_memory_info(psutil.virtual_memory(), after_curr_proc_memory_info), + '' if not processes_info else '\nLeaked processes:\n'+'\n'.join(processes_info)), + ) + +if IS_JYTHON or IS_IRONPYTHON: + # On Jython and IronPython, it's a no-op. + def before_after_each_function(): + pass \ No newline at end of file diff --git a/ptvsd/pydevd/gradle/wrapper/gradle-wrapper.jar b/ptvsd/pydevd/gradle/wrapper/gradle-wrapper.jar new file mode 100644 index 00000000..13372aef Binary files /dev/null and b/ptvsd/pydevd/gradle/wrapper/gradle-wrapper.jar differ diff --git a/ptvsd/pydevd/gradle/wrapper/gradle-wrapper.properties b/ptvsd/pydevd/gradle/wrapper/gradle-wrapper.properties new file mode 100644 index 00000000..9ee9ac96 --- /dev/null +++ b/ptvsd/pydevd/gradle/wrapper/gradle-wrapper.properties @@ -0,0 +1,6 @@ +#Thu Feb 04 13:39:02 CET 2016 +distributionBase=GRADLE_USER_HOME +distributionPath=wrapper/dists +zipStoreBase=GRADLE_USER_HOME +zipStorePath=wrapper/dists +distributionUrl=https\://services.gradle.org/distributions/gradle-2.10-bin.zip diff --git a/ptvsd/pydevd/gradlew b/ptvsd/pydevd/gradlew new file mode 100644 index 00000000..9d82f789 --- /dev/null +++ b/ptvsd/pydevd/gradlew @@ -0,0 +1,160 @@ +#!/usr/bin/env bash + +############################################################################## +## +## Gradle start up script for UN*X +## +############################################################################## + +# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +DEFAULT_JVM_OPTS="" + +APP_NAME="Gradle" +APP_BASE_NAME=`basename "$0"` + +# Use the maximum available, or set MAX_FD != -1 to use that value. +MAX_FD="maximum" + +warn ( ) { + echo "$*" +} + +die ( ) { + echo + echo "$*" + echo + exit 1 +} + +# OS specific support (must be 'true' or 'false'). +cygwin=false +msys=false +darwin=false +case "`uname`" in + CYGWIN* ) + cygwin=true + ;; + Darwin* ) + darwin=true + ;; + MINGW* ) + msys=true + ;; +esac + +# Attempt to set APP_HOME +# Resolve links: $0 may be a link +PRG="$0" +# Need this for relative symlinks. +while [ -h "$PRG" ] ; do + ls=`ls -ld "$PRG"` + link=`expr "$ls" : '.*-> \(.*\)$'` + if expr "$link" : '/.*' > /dev/null; then + PRG="$link" + else + PRG=`dirname "$PRG"`"/$link" + fi +done +SAVED="`pwd`" +cd "`dirname \"$PRG\"`/" >/dev/null +APP_HOME="`pwd -P`" +cd "$SAVED" >/dev/null + +CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar + +# Determine the Java command to use to start the JVM. +if [ -n "$JAVA_HOME" ] ; then + if [ -x "$JAVA_HOME/jre/sh/java" ] ; then + # IBM's JDK on AIX uses strange locations for the executables + JAVACMD="$JAVA_HOME/jre/sh/java" + else + JAVACMD="$JAVA_HOME/bin/java" + fi + if [ ! -x "$JAVACMD" ] ; then + die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME + +Please set the JAVA_HOME variable in your environment to match the +location of your Java installation." + fi +else + JAVACMD="java" + which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. + +Please set the JAVA_HOME variable in your environment to match the +location of your Java installation." +fi + +# Increase the maximum file descriptors if we can. +if [ "$cygwin" = "false" -a "$darwin" = "false" ] ; then + MAX_FD_LIMIT=`ulimit -H -n` + if [ $? -eq 0 ] ; then + if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then + MAX_FD="$MAX_FD_LIMIT" + fi + ulimit -n $MAX_FD + if [ $? -ne 0 ] ; then + warn "Could not set maximum file descriptor limit: $MAX_FD" + fi + else + warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT" + fi +fi + +# For Darwin, add options to specify how the application appears in the dock +if $darwin; then + GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\"" +fi + +# For Cygwin, switch paths to Windows format before running java +if $cygwin ; then + APP_HOME=`cygpath --path --mixed "$APP_HOME"` + CLASSPATH=`cygpath --path --mixed "$CLASSPATH"` + JAVACMD=`cygpath --unix "$JAVACMD"` + + # We build the pattern for arguments to be converted via cygpath + ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null` + SEP="" + for dir in $ROOTDIRSRAW ; do + ROOTDIRS="$ROOTDIRS$SEP$dir" + SEP="|" + done + OURCYGPATTERN="(^($ROOTDIRS))" + # Add a user-defined pattern to the cygpath arguments + if [ "$GRADLE_CYGPATTERN" != "" ] ; then + OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)" + fi + # Now convert the arguments - kludge to limit ourselves to /bin/sh + i=0 + for arg in "$@" ; do + CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -` + CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option + + if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition + eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"` + else + eval `echo args$i`="\"$arg\"" + fi + i=$((i+1)) + done + case $i in + (0) set -- ;; + (1) set -- "$args0" ;; + (2) set -- "$args0" "$args1" ;; + (3) set -- "$args0" "$args1" "$args2" ;; + (4) set -- "$args0" "$args1" "$args2" "$args3" ;; + (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;; + (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;; + (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;; + (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;; + (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;; + esac +fi + +# Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules +function splitJvmOpts() { + JVM_OPTS=("$@") +} +eval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS +JVM_OPTS[${#JVM_OPTS[*]}]="-Dorg.gradle.appname=$APP_BASE_NAME" + +exec "$JAVACMD" "${JVM_OPTS[@]}" -classpath "$CLASSPATH" org.gradle.wrapper.GradleWrapperMain "$@" diff --git a/ptvsd/pydevd/gradlew.bat b/ptvsd/pydevd/gradlew.bat new file mode 100644 index 00000000..8a0b282a --- /dev/null +++ b/ptvsd/pydevd/gradlew.bat @@ -0,0 +1,90 @@ +@if "%DEBUG%" == "" @echo off +@rem ########################################################################## +@rem +@rem Gradle startup script for Windows +@rem +@rem ########################################################################## + +@rem Set local scope for the variables with windows NT shell +if "%OS%"=="Windows_NT" setlocal + +@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +set DEFAULT_JVM_OPTS= + +set DIRNAME=%~dp0 +if "%DIRNAME%" == "" set DIRNAME=. +set APP_BASE_NAME=%~n0 +set APP_HOME=%DIRNAME% + +@rem Find java.exe +if defined JAVA_HOME goto findJavaFromJavaHome + +set JAVA_EXE=java.exe +%JAVA_EXE% -version >NUL 2>&1 +if "%ERRORLEVEL%" == "0" goto init + +echo. +echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. +echo. +echo Please set the JAVA_HOME variable in your environment to match the +echo location of your Java installation. + +goto fail + +:findJavaFromJavaHome +set JAVA_HOME=%JAVA_HOME:"=% +set JAVA_EXE=%JAVA_HOME%/bin/java.exe + +if exist "%JAVA_EXE%" goto init + +echo. +echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% +echo. +echo Please set the JAVA_HOME variable in your environment to match the +echo location of your Java installation. + +goto fail + +:init +@rem Get command-line arguments, handling Windowz variants + +if not "%OS%" == "Windows_NT" goto win9xME_args +if "%@eval[2+2]" == "4" goto 4NT_args + +:win9xME_args +@rem Slurp the command line arguments. +set CMD_LINE_ARGS= +set _SKIP=2 + +:win9xME_args_slurp +if "x%~1" == "x" goto execute + +set CMD_LINE_ARGS=%* +goto execute + +:4NT_args +@rem Get arguments from the 4NT Shell from JP Software +set CMD_LINE_ARGS=%$ + +:execute +@rem Setup the command line + +set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar + +@rem Execute Gradle +"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS% + +:end +@rem End local scope for the variables with windows NT shell +if "%ERRORLEVEL%"=="0" goto mainEnd + +:fail +rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of +rem the _cmd.exe /c_ return code! +if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 +exit /b 1 + +:mainEnd +if "%OS%"=="Windows_NT" endlocal + +:omega diff --git a/ptvsd/pydevd/interpreterInfo.py b/ptvsd/pydevd/interpreterInfo.py new file mode 100644 index 00000000..40c4ebe7 --- /dev/null +++ b/ptvsd/pydevd/interpreterInfo.py @@ -0,0 +1,256 @@ +''' +This module was created to get information available in the interpreter, such as libraries, +paths, etc. + +what is what: +sys.builtin_module_names: contains the builtin modules embeeded in python (rigth now, we specify all manually). +sys.prefix: A string giving the site-specific directory prefix where the platform independent Python files are installed + +format is something as +EXECUTABLE:python.exe|libs@compiled_dlls$builtin_mods + +all internal are separated by | +''' +import sys + +try: + import os.path + def fully_normalize_path(path): + '''fixes the path so that the format of the path really reflects the directories in the system + ''' + return os.path.normpath(path) + join = os.path.join +except: # ImportError or AttributeError. + # See: http://stackoverflow.com/questions/10254353/error-while-installing-jython-for-pydev + def fully_normalize_path(path): + '''fixes the path so that the format of the path really reflects the directories in the system + ''' + return path + + def join(a, b): + if a.endswith('/') or a.endswith('\\'): + return a + b + return a + '/' + b + + +IS_PYTHON_3_ONWARDS = 0 + +try: + IS_PYTHON_3_ONWARDS = sys.version_info[0] >= 3 +except: + # That's OK, not all versions of python have sys.version_info + pass + +try: + # Just check if False and True are defined (depends on version, not whether it's jython/python) + False + True +except: + exec ('True, False = 1,0') # An exec is used so that python 3k does not give a syntax error + +if sys.platform == "cygwin": + + try: + import ctypes # use from the system if available + except ImportError: + sys.path.append(join(sys.path[0], 'third_party/wrapped_for_pydev')) + import ctypes + + def native_path(path): + MAX_PATH = 512 # On cygwin NT, its 260 lately, but just need BIG ENOUGH buffer + '''Get the native form of the path, like c:\\Foo for /cygdrive/c/Foo''' + + retval = ctypes.create_string_buffer(MAX_PATH) + path = fully_normalize_path(path) + path = tobytes(path) + CCP_POSIX_TO_WIN_A = 0 + cygwin1dll = ctypes.cdll.LoadLibrary( 'cygwin1.dll' ) + cygwin1dll.cygwin_conv_path(CCP_POSIX_TO_WIN_A, path, retval, MAX_PATH) + + return retval.value + +else: + def native_path(path): + return fully_normalize_path(path) + + + +def __getfilesystemencoding(): + ''' + Note: there's a copy of this method in _pydev_filesystem_encoding.py + ''' + try: + ret = sys.getfilesystemencoding() + if not ret: + raise RuntimeError('Unable to get encoding.') + return ret + except: + try: + # Handle Jython + from java.lang import System # @UnresolvedImport + env = System.getProperty("os.name").lower() + if env.find('win') != -1: + return 'ISO-8859-1' # mbcs does not work on Jython, so, use a (hopefully) suitable replacement + return 'utf-8' + except: + pass + + # Only available from 2.3 onwards. + if sys.platform == 'win32': + return 'mbcs' + return 'utf-8' + +def getfilesystemencoding(): + try: + ret = __getfilesystemencoding() + + #Check if the encoding is actually there to be used! + if hasattr('', 'encode'): + ''.encode(ret) + if hasattr('', 'decode'): + ''.decode(ret) + + return ret + except: + return 'utf-8' + +file_system_encoding = getfilesystemencoding() + +if IS_PYTHON_3_ONWARDS: + unicode_type = str + bytes_type = bytes + +else: + unicode_type = unicode + bytes_type = str + + +def tounicode(s): + if hasattr(s, 'decode'): + if not isinstance(s, unicode_type): + # Depending on the platform variant we may have decode on string or not. + return s.decode(file_system_encoding) + return s + +def tobytes(s): + if hasattr(s, 'encode'): + if not isinstance(s, bytes_type): + return s.encode(file_system_encoding) + return s + +def toasciimxl(s): + # output for xml without a declared encoding + + # As the output is xml, we have to encode chars (< and > are ok as they're not accepted in the filesystem name -- + # if it was allowed, we'd have to do things more selectively so that < and > don't get wrongly replaced). + s = s.replace("&", "&") + + try: + ret = s.encode('ascii', 'xmlcharrefreplace') + except: + # use workaround + ret = '' + for c in s: + try: + ret += c.encode('ascii') + except: + try: + # Python 2: unicode is a valid identifier + ret += unicode("&#%d;") % ord(c) + except: + # Python 3: a string is already unicode, so, just doing it directly should work. + ret += "&#%d;" % ord(c) + return ret + + +if __name__ == '__main__': + try: + # just give some time to get the reading threads attached (just in case) + import time + time.sleep(0.1) + except: + pass + + try: + executable = tounicode(native_path(sys.executable)) + except: + executable = tounicode(sys.executable) + + if sys.platform == "cygwin" and not executable.endswith(tounicode('.exe')): + executable += tounicode('.exe') + + + try: + major = str(sys.version_info[0]) + minor = str(sys.version_info[1]) + except AttributeError: + # older versions of python don't have version_info + import string + s = string.split(sys.version, ' ')[0] + s = string.split(s, '.') + major = s[0] + minor = s[1] + + s = tounicode('%s.%s') % (tounicode(major), tounicode(minor)) + + contents = [tounicode('')] + contents.append(tounicode('%s') % (tounicode(s),)) + + contents.append(tounicode('%s') % tounicode(executable)) + + # this is the new implementation to get the system folders + # (still need to check if it works in linux) + # (previously, we were getting the executable dir, but that is not always correct...) + prefix = tounicode(native_path(sys.prefix)) + # print_ 'prefix is', prefix + + + result = [] + + path_used = sys.path + try: + path_used = path_used[1:] # Use a copy (and don't include the directory of this script as a path.) + except: + pass # just ignore it... + + for p in path_used: + p = tounicode(native_path(p)) + + try: + import string # to be compatible with older versions + if string.find(p, prefix) == 0: # was startswith + result.append((p, True)) + else: + result.append((p, False)) + except (ImportError, AttributeError): + # python 3k also does not have it + # jython may not have it (depending on how are things configured) + if p.startswith(prefix): # was startswith + result.append((p, True)) + else: + result.append((p, False)) + + for p, b in result: + if b: + contents.append(tounicode('%s') % (p,)) + else: + contents.append(tounicode('%s') % (p,)) + + # no compiled libs + # nor forced libs + + for builtinMod in sys.builtin_module_names: + contents.append(tounicode('%s') % tounicode(builtinMod)) + + + contents.append(tounicode('')) + unic = tounicode('\n').join(contents) + inasciixml = toasciimxl(unic) + if IS_PYTHON_3_ONWARDS: + # This is the 'official' way of writing binary output in Py3K (see: http://bugs.python.org/issue4571) + sys.stdout.buffer.write(inasciixml) + else: + sys.stdout.write(inasciixml) + + sys.stdout.flush() + sys.stderr.flush() diff --git a/ptvsd/pydevd/jython_test_deps/ant.jar b/ptvsd/pydevd/jython_test_deps/ant.jar new file mode 100644 index 00000000..24641e74 Binary files /dev/null and b/ptvsd/pydevd/jython_test_deps/ant.jar differ diff --git a/ptvsd/pydevd/jython_test_deps/junit.jar b/ptvsd/pydevd/jython_test_deps/junit.jar new file mode 100644 index 00000000..5b4bb849 Binary files /dev/null and b/ptvsd/pydevd/jython_test_deps/junit.jar differ diff --git a/ptvsd/pydevd/pycompletionserver.py b/ptvsd/pydevd/pycompletionserver.py new file mode 100644 index 00000000..d73c9020 --- /dev/null +++ b/ptvsd/pydevd/pycompletionserver.py @@ -0,0 +1,405 @@ +''' +Entry-point module to start the code-completion server for PyDev. + +@author Fabio Zadrozny +''' +import sys +IS_PYTHON_3_ONWARDS = sys.version_info[0] >= 3 + +if not IS_PYTHON_3_ONWARDS: + import __builtin__ +else: + import builtins as __builtin__ # Python 3.0 + +from _pydevd_bundle.pydevd_constants import IS_JYTHON + +if IS_JYTHON: + import java.lang # @UnresolvedImport + SERVER_NAME = 'jycompletionserver' + from _pydev_bundle import _pydev_jy_imports_tipper + _pydev_imports_tipper = _pydev_jy_imports_tipper + +else: + # it is python + SERVER_NAME = 'pycompletionserver' + from _pydev_bundle import _pydev_imports_tipper + + +from _pydev_imps._pydev_saved_modules import socket + +import sys +if sys.platform == "darwin": + # See: https://sourceforge.net/projects/pydev/forums/forum/293649/topic/3454227 + try: + import _CF # Don't fail if it doesn't work -- do it because it must be loaded on the main thread! @UnresolvedImport @UnusedImport + except: + pass + + +# initial sys.path +_sys_path = [] +for p in sys.path: + # changed to be compatible with 1.5 + _sys_path.append(p) + +# initial sys.modules +_sys_modules = {} +for name, mod in sys.modules.items(): + _sys_modules[name] = mod + + +import traceback + +from _pydev_imps._pydev_saved_modules import time + +try: + import StringIO +except: + import io as StringIO #Python 3.0 + +try: + from urllib import quote_plus, unquote_plus +except ImportError: + from urllib.parse import quote_plus, unquote_plus #Python 3.0 + +INFO1 = 1 +INFO2 = 2 +WARN = 4 +ERROR = 8 + +DEBUG = INFO1 | ERROR + +def dbg(s, prior): + if prior & DEBUG != 0: + sys.stdout.write('%s\n' % (s,)) +# f = open('c:/temp/test.txt', 'a') +# print_ >> f, s +# f.close() + +from _pydev_bundle import pydev_localhost +HOST = pydev_localhost.get_localhost() # Symbolic name meaning the local host + +MSG_KILL_SERVER = '@@KILL_SERVER_END@@' +MSG_COMPLETIONS = '@@COMPLETIONS' +MSG_END = 'END@@' +MSG_INVALID_REQUEST = '@@INVALID_REQUEST' +MSG_JYTHON_INVALID_REQUEST = '@@JYTHON_INVALID_REQUEST' +MSG_CHANGE_DIR = '@@CHANGE_DIR:' +MSG_OK = '@@MSG_OK_END@@' +MSG_IMPORTS = '@@IMPORTS:' +MSG_PYTHONPATH = '@@PYTHONPATH_END@@' +MSG_CHANGE_PYTHONPATH = '@@CHANGE_PYTHONPATH:' +MSG_JEDI = '@@MSG_JEDI:' +MSG_SEARCH = '@@SEARCH' + +BUFFER_SIZE = 1024 + + + +currDirModule = None + +def complete_from_dir(directory): + ''' + This is necessary so that we get the imports from the same directory where the file + we are completing is located. + ''' + global currDirModule + if currDirModule is not None: + if len(sys.path) > 0 and sys.path[0] == currDirModule: + del sys.path[0] + + currDirModule = directory + sys.path.insert(0, directory) + + +def change_python_path(pythonpath): + '''Changes the pythonpath (clears all the previous pythonpath) + + @param pythonpath: string with paths separated by | + ''' + + split = pythonpath.split('|') + sys.path = [] + for path in split: + path = path.strip() + if len(path) > 0: + sys.path.append(path) + + +class Processor: + + def __init__(self): + # nothing to do + return + + def remove_invalid_chars(self, msg): + try: + msg = str(msg) + except UnicodeDecodeError: + pass + + if msg: + try: + return quote_plus(msg) + except: + sys.stdout.write('error making quote plus in %s\n' % (msg,)) + raise + return ' ' + + def format_completion_message(self, defFile, completionsList): + ''' + Format the completions suggestions in the following format: + @@COMPLETIONS(modFile(token,description),(token,description),(token,description))END@@ + ''' + compMsg = [] + compMsg.append('%s' % defFile) + for tup in completionsList: + compMsg.append(',') + + compMsg.append('(') + compMsg.append(str(self.remove_invalid_chars(tup[0]))) # token + compMsg.append(',') + compMsg.append(self.remove_invalid_chars(tup[1])) # description + + if(len(tup) > 2): + compMsg.append(',') + compMsg.append(self.remove_invalid_chars(tup[2])) # args - only if function. + + if(len(tup) > 3): + compMsg.append(',') + compMsg.append(self.remove_invalid_chars(tup[3])) # TYPE + + compMsg.append(')') + + return '%s(%s)%s' % (MSG_COMPLETIONS, ''.join(compMsg), MSG_END) + +class Exit(Exception): + pass + +class CompletionServer: + + def __init__(self, port): + self.ended = False + self.port = port + self.socket = None # socket to send messages. + self.exit_process_on_kill = True + self.processor = Processor() + + + def connect_to_server(self): + from _pydev_imps._pydev_saved_modules import socket + + self.socket = s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + try: + s.connect((HOST, self.port)) + except: + sys.stderr.write('Error on connect_to_server with parameters: host: %s port: %s\n' % (HOST, self.port)) + raise + + def get_completions_message(self, defFile, completionsList): + ''' + get message with completions. + ''' + return self.processor.format_completion_message(defFile, completionsList) + + def get_token_and_data(self, data): + ''' + When we receive this, we have 'token):data' + ''' + token = '' + for c in data: + if c != ')': + token = token + c + else: + break; + + return token, data.lstrip(token + '):') + + def emulated_sendall(self, msg): + MSGLEN = 1024 * 20 + + totalsent = 0 + while totalsent < MSGLEN: + sent = self.socket.send(msg[totalsent:]) + if sent == 0: + return + totalsent = totalsent + sent + + + def send(self, msg): + if not hasattr(self.socket, 'sendall'): + #Older versions (jython 2.1) + self.emulated_sendall(msg) + else: + if IS_PYTHON_3_ONWARDS: + self.socket.sendall(bytearray(msg, 'utf-8')) + else: + self.socket.sendall(msg) + + + def run(self): + # Echo server program + try: + from _pydev_bundle import _pydev_log + log = _pydev_log.Log() + + dbg(SERVER_NAME + ' connecting to java server on %s (%s)' % (HOST, self.port) , INFO1) + # after being connected, create a socket as a client. + self.connect_to_server() + + dbg(SERVER_NAME + ' Connected to java server', INFO1) + + + while not self.ended: + data = '' + + while data.find(MSG_END) == -1: + received = self.socket.recv(BUFFER_SIZE) + if len(received) == 0: + raise Exit() # ok, connection ended + if IS_PYTHON_3_ONWARDS: + data = data + received.decode('utf-8') + else: + data = data + received + + try: + try: + if data.find(MSG_KILL_SERVER) != -1: + dbg(SERVER_NAME + ' kill message received', INFO1) + # break if we received kill message. + self.ended = True + raise Exit() + + dbg(SERVER_NAME + ' starting keep alive thread', INFO2) + + if data.find(MSG_PYTHONPATH) != -1: + comps = [] + for p in _sys_path: + comps.append((p, ' ')) + self.send(self.get_completions_message(None, comps)) + + else: + data = data[:data.rfind(MSG_END)] + + if data.startswith(MSG_IMPORTS): + data = data[len(MSG_IMPORTS):] + data = unquote_plus(data) + defFile, comps = _pydev_imports_tipper.generate_tip(data, log) + self.send(self.get_completions_message(defFile, comps)) + + elif data.startswith(MSG_CHANGE_PYTHONPATH): + data = data[len(MSG_CHANGE_PYTHONPATH):] + data = unquote_plus(data) + change_python_path(data) + self.send(MSG_OK) + + elif data.startswith(MSG_JEDI): + data = data[len(MSG_JEDI):] + data = unquote_plus(data) + line, column, encoding, path, source = data.split('|', 4) + try: + import jedi # @UnresolvedImport + except: + self.send(self.get_completions_message(None, [('Error on import jedi', 'Error importing jedi', '')])) + else: + script = jedi.Script( + # Line +1 because it expects lines 1-based (and col 0-based) + source=source, + line=int(line) + 1, + column=int(column), + source_encoding=encoding, + path=path, + ) + lst = [] + for completion in script.completions(): + t = completion.type + if t == 'class': + t = '1' + + elif t == 'function': + t = '2' + + elif t == 'import': + t = '0' + + elif t == 'keyword': + continue # Keywords are already handled in PyDev + + elif t == 'statement': + t = '3' + + else: + t = '-1' + + # gen list(tuple(name, doc, args, type)) + lst.append((completion.name, '', '', t)) + self.send(self.get_completions_message('empty', lst)) + + elif data.startswith(MSG_SEARCH): + data = data[len(MSG_SEARCH):] + data = unquote_plus(data) + (f, line, col), foundAs = _pydev_imports_tipper.search_definition(data) + self.send(self.get_completions_message(f, [(line, col, foundAs)])) + + elif data.startswith(MSG_CHANGE_DIR): + data = data[len(MSG_CHANGE_DIR):] + data = unquote_plus(data) + complete_from_dir(data) + self.send(MSG_OK) + + else: + self.send(MSG_INVALID_REQUEST) + except Exit: + e = sys.exc_info()[1] + msg = self.get_completions_message(None, [('Exit:', 'SystemExit', '')]) + try: + self.send(msg) + except socket.error: + pass # Ok, may be closed already + + raise e # raise original error. + + except: + dbg(SERVER_NAME + ' exception occurred', ERROR) + s = StringIO.StringIO() + traceback.print_exc(file=s) + + err = s.getvalue() + dbg(SERVER_NAME + ' received error: ' + str(err), ERROR) + msg = self.get_completions_message(None, [('ERROR:', '%s\nLog:%s' % (err, log.get_contents()), '')]) + try: + self.send(msg) + except socket.error: + pass # Ok, may be closed already + + + finally: + log.clear_log() + + self.socket.close() + self.ended = True + raise Exit() # connection broken + + + except Exit: + if self.exit_process_on_kill: + sys.exit(0) + # No need to log SystemExit error + except: + s = StringIO.StringIO() + exc_info = sys.exc_info() + + traceback.print_exception(exc_info[0], exc_info[1], exc_info[2], limit=None, file=s) + err = s.getvalue() + dbg(SERVER_NAME + ' received error: ' + str(err), ERROR) + raise + + + +if __name__ == '__main__': + + port = int(sys.argv[1]) # this is from where we want to receive messages. + + t = CompletionServer(port) + dbg(SERVER_NAME + ' will start', INFO1) + t.run() diff --git a/ptvsd/pydevd/pydev_app_engine_debug_startup.py b/ptvsd/pydevd/pydev_app_engine_debug_startup.py new file mode 100644 index 00000000..464f0ddf --- /dev/null +++ b/ptvsd/pydevd/pydev_app_engine_debug_startup.py @@ -0,0 +1,21 @@ +if False: + config = None + + +# See: https://docs.google.com/document/d/1CCSaRiIWCLgbD3OwmuKsRoHHDfBffbROWyVWWL0ZXN4/edit +if ':' not in config.version_id: + # The default server version_id does not contain ':' + import json + import os + import sys + + startup = config.python_config.startup_args + if not startup: + raise AssertionError('Expected --python_startup_args to be passed from the pydev debugger.') + + setup = json.loads(startup) + pydevd_path = setup['pydevd'] + sys.path.append(os.path.dirname(pydevd_path)) + + import pydevd + pydevd.settrace(setup['client'], port=setup['port'], suspend=False, trace_only_current_thread=False) diff --git a/ptvsd/pydevd/pydev_coverage.py b/ptvsd/pydevd/pydev_coverage.py new file mode 100644 index 00000000..279ae65a --- /dev/null +++ b/ptvsd/pydevd/pydev_coverage.py @@ -0,0 +1,62 @@ +''' +Entry point module to run code-coverage. +''' + +def execute(): + import os + import sys + + files = None + if 'combine' not in sys.argv: + + if '--pydev-analyze' in sys.argv: + + #Ok, what we want here is having the files passed through stdin (because + #there may be too many files for passing in the command line -- we could + #just pass a dir and make the find files here, but as that's already + #given in the java side, let's just gather that info here). + sys.argv.remove('--pydev-analyze') + try: + s = raw_input() + except: + s = input() + s = s.replace('\r', '') + s = s.replace('\n', '') + files = s.split('|') + files = [v for v in files if len(v) > 0] + + #Note that in this case we'll already be in the working dir with the coverage files, so, the + #coverage file location is not passed. + + else: + #For all commands, the coverage file is configured in pydev, and passed as the first argument + #in the command line, so, let's make sure this gets to the coverage module. + os.environ['COVERAGE_FILE'] = sys.argv[1] + del sys.argv[1] + + try: + import coverage #@UnresolvedImport + except: + sys.stderr.write('Error: coverage module could not be imported\n') + sys.stderr.write('Please make sure that the coverage module (http://nedbatchelder.com/code/coverage/)\n') + sys.stderr.write('is properly installed in your interpreter: %s\n' % (sys.executable,)) + + import traceback;traceback.print_exc() + return + + version = tuple(map(int, coverage.__version__.split('.')[:2])) + if version < (4, 3): + sys.stderr.write('Error: minimum supported coverage version is 4.3.\nFound: %s\nLocation: %s' % ('.'.join(str(x) for x in version), coverage.__file__)) + sys.exit(1) + + #print(coverage.__version__) TODO: Check if the version is a version we support (should be at least 3.4) -- note that maybe the attr is not there. + from coverage.cmdline import main #@UnresolvedImport + + if files is not None: + sys.argv.append('xml') + sys.argv += files + + main() + +if __name__ == '__main__': + execute() \ No newline at end of file diff --git a/ptvsd/pydevd/pydev_ipython/README b/ptvsd/pydevd/pydev_ipython/README new file mode 100644 index 00000000..185d417d --- /dev/null +++ b/ptvsd/pydevd/pydev_ipython/README @@ -0,0 +1,8 @@ +# Parts of IPython, files from: https://github.com/ipython/ipython/tree/rel-1.0.0/IPython +# The files in this package are extracted from IPython to aid the main loop integration +# See tests_mainloop for some manually runable tests + +# What we are doing is reusing the "inputhook" functionality (i.e. what in IPython +# ends up on PyOS_InputHook) and using it in the pydevconsole context. +# Rather that having the callbacks called in PyOS_InputHook, we use a custom XML-RPC +# Server (HookableXMLRPCServer) that calls the inputhook when idle diff --git a/ptvsd/pydevd/pydev_ipython/__init__.py b/ptvsd/pydevd/pydev_ipython/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/ptvsd/pydevd/pydev_ipython/inputhook.py b/ptvsd/pydevd/pydev_ipython/inputhook.py new file mode 100644 index 00000000..b2769fd3 --- /dev/null +++ b/ptvsd/pydevd/pydev_ipython/inputhook.py @@ -0,0 +1,579 @@ +# coding: utf-8 +""" +Inputhook management for GUI event loop integration. +""" + +#----------------------------------------------------------------------------- +# Copyright (C) 2008-2011 The IPython Development Team +# +# Distributed under the terms of the BSD License. The full license is in +# the file COPYING, distributed as part of this software. +#----------------------------------------------------------------------------- + +#----------------------------------------------------------------------------- +# Imports +#----------------------------------------------------------------------------- + +import sys +import select + +#----------------------------------------------------------------------------- +# Constants +#----------------------------------------------------------------------------- + +# Constants for identifying the GUI toolkits. +GUI_WX = 'wx' +GUI_QT = 'qt' +GUI_QT4 = 'qt4' +GUI_QT5 = 'qt5' +GUI_GTK = 'gtk' +GUI_TK = 'tk' +GUI_OSX = 'osx' +GUI_GLUT = 'glut' +GUI_PYGLET = 'pyglet' +GUI_GTK3 = 'gtk3' +GUI_NONE = 'none' # i.e. disable + +#----------------------------------------------------------------------------- +# Utilities +#----------------------------------------------------------------------------- + +def ignore_CTRL_C(): + """Ignore CTRL+C (not implemented).""" + pass + +def allow_CTRL_C(): + """Take CTRL+C into account (not implemented).""" + pass + +#----------------------------------------------------------------------------- +# Main InputHookManager class +#----------------------------------------------------------------------------- + + +class InputHookManager(object): + """Manage PyOS_InputHook for different GUI toolkits. + + This class installs various hooks under ``PyOSInputHook`` to handle + GUI event loop integration. + """ + + def __init__(self): + self._return_control_callback = None + self._apps = {} + self._reset() + self.pyplot_imported = False + + def _reset(self): + self._callback_pyfunctype = None + self._callback = None + self._current_gui = None + + def set_return_control_callback(self, return_control_callback): + self._return_control_callback = return_control_callback + + def get_return_control_callback(self): + return self._return_control_callback + + def return_control(self): + return self._return_control_callback() + + def get_inputhook(self): + return self._callback + + def set_inputhook(self, callback): + """Set inputhook to callback.""" + # We don't (in the context of PyDev console) actually set PyOS_InputHook, but rather + # while waiting for input on xmlrpc we run this code + self._callback = callback + + def clear_inputhook(self, app=None): + """Clear input hook. + + Parameters + ---------- + app : optional, ignored + This parameter is allowed only so that clear_inputhook() can be + called with a similar interface as all the ``enable_*`` methods. But + the actual value of the parameter is ignored. This uniform interface + makes it easier to have user-level entry points in the main IPython + app like :meth:`enable_gui`.""" + self._reset() + + def clear_app_refs(self, gui=None): + """Clear IPython's internal reference to an application instance. + + Whenever we create an app for a user on qt4 or wx, we hold a + reference to the app. This is needed because in some cases bad things + can happen if a user doesn't hold a reference themselves. This + method is provided to clear the references we are holding. + + Parameters + ---------- + gui : None or str + If None, clear all app references. If ('wx', 'qt4') clear + the app for that toolkit. References are not held for gtk or tk + as those toolkits don't have the notion of an app. + """ + if gui is None: + self._apps = {} + elif gui in self._apps: + del self._apps[gui] + + def enable_wx(self, app=None): + """Enable event loop integration with wxPython. + + Parameters + ---------- + app : WX Application, optional. + Running application to use. If not given, we probe WX for an + existing application object, and create a new one if none is found. + + Notes + ----- + This methods sets the ``PyOS_InputHook`` for wxPython, which allows + the wxPython to integrate with terminal based applications like + IPython. + + If ``app`` is not given we probe for an existing one, and return it if + found. If no existing app is found, we create an :class:`wx.App` as + follows:: + + import wx + app = wx.App(redirect=False, clearSigInt=False) + """ + import wx + from distutils.version import LooseVersion as V + wx_version = V(wx.__version__).version # @UndefinedVariable + + if wx_version < [2, 8]: + raise ValueError("requires wxPython >= 2.8, but you have %s" % wx.__version__) # @UndefinedVariable + + from pydev_ipython.inputhookwx import inputhook_wx + self.set_inputhook(inputhook_wx) + self._current_gui = GUI_WX + + if app is None: + app = wx.GetApp() # @UndefinedVariable + if app is None: + app = wx.App(redirect=False, clearSigInt=False) # @UndefinedVariable + app._in_event_loop = True + self._apps[GUI_WX] = app + return app + + def disable_wx(self): + """Disable event loop integration with wxPython. + + This merely sets PyOS_InputHook to NULL. + """ + if GUI_WX in self._apps: + self._apps[GUI_WX]._in_event_loop = False + self.clear_inputhook() + + def enable_qt4(self, app=None): + """Enable event loop integration with PyQt4. + + Parameters + ---------- + app : Qt Application, optional. + Running application to use. If not given, we probe Qt for an + existing application object, and create a new one if none is found. + + Notes + ----- + This methods sets the PyOS_InputHook for PyQt4, which allows + the PyQt4 to integrate with terminal based applications like + IPython. + + If ``app`` is not given we probe for an existing one, and return it if + found. If no existing app is found, we create an :class:`QApplication` + as follows:: + + from PyQt4 import QtCore + app = QtGui.QApplication(sys.argv) + """ + from pydev_ipython.inputhookqt4 import create_inputhook_qt4 + app, inputhook_qt4 = create_inputhook_qt4(self, app) + self.set_inputhook(inputhook_qt4) + + self._current_gui = GUI_QT4 + app._in_event_loop = True + self._apps[GUI_QT4] = app + return app + + def disable_qt4(self): + """Disable event loop integration with PyQt4. + + This merely sets PyOS_InputHook to NULL. + """ + if GUI_QT4 in self._apps: + self._apps[GUI_QT4]._in_event_loop = False + self.clear_inputhook() + + def enable_qt5(self, app=None): + from pydev_ipython.inputhookqt5 import create_inputhook_qt5 + app, inputhook_qt5 = create_inputhook_qt5(self, app) + self.set_inputhook(inputhook_qt5) + + self._current_gui = GUI_QT5 + app._in_event_loop = True + self._apps[GUI_QT5] = app + return app + + def disable_qt5(self): + if GUI_QT5 in self._apps: + self._apps[GUI_QT5]._in_event_loop = False + self.clear_inputhook() + + def enable_gtk(self, app=None): + """Enable event loop integration with PyGTK. + + Parameters + ---------- + app : ignored + Ignored, it's only a placeholder to keep the call signature of all + gui activation methods consistent, which simplifies the logic of + supporting magics. + + Notes + ----- + This methods sets the PyOS_InputHook for PyGTK, which allows + the PyGTK to integrate with terminal based applications like + IPython. + """ + from pydev_ipython.inputhookgtk import create_inputhook_gtk + self.set_inputhook(create_inputhook_gtk(self._stdin_file)) + self._current_gui = GUI_GTK + + def disable_gtk(self): + """Disable event loop integration with PyGTK. + + This merely sets PyOS_InputHook to NULL. + """ + self.clear_inputhook() + + def enable_tk(self, app=None): + """Enable event loop integration with Tk. + + Parameters + ---------- + app : toplevel :class:`Tkinter.Tk` widget, optional. + Running toplevel widget to use. If not given, we probe Tk for an + existing one, and create a new one if none is found. + + Notes + ----- + If you have already created a :class:`Tkinter.Tk` object, the only + thing done by this method is to register with the + :class:`InputHookManager`, since creating that object automatically + sets ``PyOS_InputHook``. + """ + self._current_gui = GUI_TK + if app is None: + try: + import Tkinter as _TK + except: + # Python 3 + import tkinter as _TK # @UnresolvedImport + app = _TK.Tk() + app.withdraw() + self._apps[GUI_TK] = app + + from pydev_ipython.inputhooktk import create_inputhook_tk + self.set_inputhook(create_inputhook_tk(app)) + return app + + def disable_tk(self): + """Disable event loop integration with Tkinter. + + This merely sets PyOS_InputHook to NULL. + """ + self.clear_inputhook() + + + def enable_glut(self, app=None): + """ Enable event loop integration with GLUT. + + Parameters + ---------- + + app : ignored + Ignored, it's only a placeholder to keep the call signature of all + gui activation methods consistent, which simplifies the logic of + supporting magics. + + Notes + ----- + + This methods sets the PyOS_InputHook for GLUT, which allows the GLUT to + integrate with terminal based applications like IPython. Due to GLUT + limitations, it is currently not possible to start the event loop + without first creating a window. You should thus not create another + window but use instead the created one. See 'gui-glut.py' in the + docs/examples/lib directory. + + The default screen mode is set to: + glut.GLUT_DOUBLE | glut.GLUT_RGBA | glut.GLUT_DEPTH + """ + + import OpenGL.GLUT as glut # @UnresolvedImport + from pydev_ipython.inputhookglut import glut_display_mode, \ + glut_close, glut_display, \ + glut_idle, inputhook_glut + + if GUI_GLUT not in self._apps: + glut.glutInit(sys.argv) + glut.glutInitDisplayMode(glut_display_mode) + # This is specific to freeglut + if bool(glut.glutSetOption): + glut.glutSetOption(glut.GLUT_ACTION_ON_WINDOW_CLOSE, + glut.GLUT_ACTION_GLUTMAINLOOP_RETURNS) + glut.glutCreateWindow(sys.argv[0]) + glut.glutReshapeWindow(1, 1) + glut.glutHideWindow() + glut.glutWMCloseFunc(glut_close) + glut.glutDisplayFunc(glut_display) + glut.glutIdleFunc(glut_idle) + else: + glut.glutWMCloseFunc(glut_close) + glut.glutDisplayFunc(glut_display) + glut.glutIdleFunc(glut_idle) + self.set_inputhook(inputhook_glut) + self._current_gui = GUI_GLUT + self._apps[GUI_GLUT] = True + + + def disable_glut(self): + """Disable event loop integration with glut. + + This sets PyOS_InputHook to NULL and set the display function to a + dummy one and set the timer to a dummy timer that will be triggered + very far in the future. + """ + import OpenGL.GLUT as glut # @UnresolvedImport + from glut_support import glutMainLoopEvent # @UnresolvedImport + + glut.glutHideWindow() # This is an event to be processed below + glutMainLoopEvent() + self.clear_inputhook() + + def enable_pyglet(self, app=None): + """Enable event loop integration with pyglet. + + Parameters + ---------- + app : ignored + Ignored, it's only a placeholder to keep the call signature of all + gui activation methods consistent, which simplifies the logic of + supporting magics. + + Notes + ----- + This methods sets the ``PyOS_InputHook`` for pyglet, which allows + pyglet to integrate with terminal based applications like + IPython. + + """ + from pydev_ipython.inputhookpyglet import inputhook_pyglet + self.set_inputhook(inputhook_pyglet) + self._current_gui = GUI_PYGLET + return app + + def disable_pyglet(self): + """Disable event loop integration with pyglet. + + This merely sets PyOS_InputHook to NULL. + """ + self.clear_inputhook() + + def enable_gtk3(self, app=None): + """Enable event loop integration with Gtk3 (gir bindings). + + Parameters + ---------- + app : ignored + Ignored, it's only a placeholder to keep the call signature of all + gui activation methods consistent, which simplifies the logic of + supporting magics. + + Notes + ----- + This methods sets the PyOS_InputHook for Gtk3, which allows + the Gtk3 to integrate with terminal based applications like + IPython. + """ + from pydev_ipython.inputhookgtk3 import create_inputhook_gtk3 + self.set_inputhook(create_inputhook_gtk3(self._stdin_file)) + self._current_gui = GUI_GTK + + def disable_gtk3(self): + """Disable event loop integration with PyGTK. + + This merely sets PyOS_InputHook to NULL. + """ + self.clear_inputhook() + + def enable_mac(self, app=None): + """ Enable event loop integration with MacOSX. + + We call function pyplot.pause, which updates and displays active + figure during pause. It's not MacOSX-specific, but it enables to + avoid inputhooks in native MacOSX backend. + Also we shouldn't import pyplot, until user does it. Cause it's + possible to choose backend before importing pyplot for the first + time only. + """ + def inputhook_mac(app=None): + if self.pyplot_imported: + pyplot = sys.modules['matplotlib.pyplot'] + try: + pyplot.pause(0.01) + except: + pass + else: + if 'matplotlib.pyplot' in sys.modules: + self.pyplot_imported = True + + self.set_inputhook(inputhook_mac) + self._current_gui = GUI_OSX + + def disable_mac(self): + self.clear_inputhook() + + def current_gui(self): + """Return a string indicating the currently active GUI or None.""" + return self._current_gui + +inputhook_manager = InputHookManager() + +enable_wx = inputhook_manager.enable_wx +disable_wx = inputhook_manager.disable_wx +enable_qt4 = inputhook_manager.enable_qt4 +disable_qt4 = inputhook_manager.disable_qt4 +enable_qt5 = inputhook_manager.enable_qt5 +disable_qt5 = inputhook_manager.disable_qt5 +enable_gtk = inputhook_manager.enable_gtk +disable_gtk = inputhook_manager.disable_gtk +enable_tk = inputhook_manager.enable_tk +disable_tk = inputhook_manager.disable_tk +enable_glut = inputhook_manager.enable_glut +disable_glut = inputhook_manager.disable_glut +enable_pyglet = inputhook_manager.enable_pyglet +disable_pyglet = inputhook_manager.disable_pyglet +enable_gtk3 = inputhook_manager.enable_gtk3 +disable_gtk3 = inputhook_manager.disable_gtk3 +enable_mac = inputhook_manager.enable_mac +disable_mac = inputhook_manager.disable_mac +clear_inputhook = inputhook_manager.clear_inputhook +set_inputhook = inputhook_manager.set_inputhook +current_gui = inputhook_manager.current_gui +clear_app_refs = inputhook_manager.clear_app_refs + +# We maintain this as stdin_ready so that the individual inputhooks +# can diverge as little as possible from their IPython sources +stdin_ready = inputhook_manager.return_control +set_return_control_callback = inputhook_manager.set_return_control_callback +get_return_control_callback = inputhook_manager.get_return_control_callback +get_inputhook = inputhook_manager.get_inputhook + +# Convenience function to switch amongst them +def enable_gui(gui=None, app=None): + """Switch amongst GUI input hooks by name. + + This is just a utility wrapper around the methods of the InputHookManager + object. + + Parameters + ---------- + gui : optional, string or None + If None (or 'none'), clears input hook, otherwise it must be one + of the recognized GUI names (see ``GUI_*`` constants in module). + + app : optional, existing application object. + For toolkits that have the concept of a global app, you can supply an + existing one. If not given, the toolkit will be probed for one, and if + none is found, a new one will be created. Note that GTK does not have + this concept, and passing an app if ``gui=="GTK"`` will raise an error. + + Returns + ------- + The output of the underlying gui switch routine, typically the actual + PyOS_InputHook wrapper object or the GUI toolkit app created, if there was + one. + """ + + if get_return_control_callback() is None: + raise ValueError("A return_control_callback must be supplied as a reference before a gui can be enabled") + + guis = {GUI_NONE: clear_inputhook, + GUI_OSX: enable_mac, + GUI_TK: enable_tk, + GUI_GTK: enable_gtk, + GUI_WX: enable_wx, + GUI_QT: enable_qt4, # qt3 not supported + GUI_QT4: enable_qt4, + GUI_QT5: enable_qt5, + GUI_GLUT: enable_glut, + GUI_PYGLET: enable_pyglet, + GUI_GTK3: enable_gtk3, + } + try: + gui_hook = guis[gui] + except KeyError: + if gui is None or gui == '': + gui_hook = clear_inputhook + else: + e = "Invalid GUI request %r, valid ones are:%s" % (gui, guis.keys()) + raise ValueError(e) + return gui_hook(app) + +__all__ = [ + "GUI_WX", + "GUI_QT", + "GUI_QT4", + "GUI_QT5", + "GUI_GTK", + "GUI_TK", + "GUI_OSX", + "GUI_GLUT", + "GUI_PYGLET", + "GUI_GTK3", + "GUI_NONE", + + + "ignore_CTRL_C", + "allow_CTRL_C", + + "InputHookManager", + + "inputhook_manager", + + "enable_wx", + "disable_wx", + "enable_qt4", + "disable_qt4", + "enable_qt5", + "disable_qt5", + "enable_gtk", + "disable_gtk", + "enable_tk", + "disable_tk", + "enable_glut", + "disable_glut", + "enable_pyglet", + "disable_pyglet", + "enable_gtk3", + "disable_gtk3", + "enable_mac", + "disable_mac", + "clear_inputhook", + "set_inputhook", + "current_gui", + "clear_app_refs", + + "stdin_ready", + "set_return_control_callback", + "get_return_control_callback", + "get_inputhook", + + "enable_gui"] diff --git a/ptvsd/pydevd/pydev_ipython/inputhookglut.py b/ptvsd/pydevd/pydev_ipython/inputhookglut.py new file mode 100644 index 00000000..bbd6882f --- /dev/null +++ b/ptvsd/pydevd/pydev_ipython/inputhookglut.py @@ -0,0 +1,153 @@ +# coding: utf-8 +""" +GLUT Inputhook support functions +""" + +#----------------------------------------------------------------------------- +# Copyright (C) 2008-2011 The IPython Development Team +# +# Distributed under the terms of the BSD License. The full license is in +# the file COPYING, distributed as part of this software. +#----------------------------------------------------------------------------- + +# GLUT is quite an old library and it is difficult to ensure proper +# integration within IPython since original GLUT does not allow to handle +# events one by one. Instead, it requires for the mainloop to be entered +# and never returned (there is not even a function to exit he +# mainloop). Fortunately, there are alternatives such as freeglut +# (available for linux and windows) and the OSX implementation gives +# access to a glutCheckLoop() function that blocks itself until a new +# event is received. This means we have to setup the idle callback to +# ensure we got at least one event that will unblock the function. +# +# Furthermore, it is not possible to install these handlers without a window +# being first created. We choose to make this window invisible. This means that +# display mode options are set at this level and user won't be able to change +# them later without modifying the code. This should probably be made available +# via IPython options system. + +#----------------------------------------------------------------------------- +# Imports +#----------------------------------------------------------------------------- +import os +import sys +from _pydev_imps._pydev_saved_modules import time +import signal +import OpenGL.GLUT as glut # @UnresolvedImport +import OpenGL.platform as platform # @UnresolvedImport +from timeit import default_timer as clock +from pydev_ipython.inputhook import stdin_ready + +#----------------------------------------------------------------------------- +# Constants +#----------------------------------------------------------------------------- + +# Frame per second : 60 +# Should probably be an IPython option +glut_fps = 60 + + +# Display mode : double buffeed + rgba + depth +# Should probably be an IPython option +glut_display_mode = (glut.GLUT_DOUBLE | + glut.GLUT_RGBA | + glut.GLUT_DEPTH) + +glutMainLoopEvent = None +if sys.platform == 'darwin': + try: + glutCheckLoop = platform.createBaseFunction( + 'glutCheckLoop', dll=platform.GLUT, resultType=None, + argTypes=[], + doc='glutCheckLoop( ) -> None', + argNames=(), + ) + except AttributeError: + raise RuntimeError( + '''Your glut implementation does not allow interactive sessions''' + '''Consider installing freeglut.''') + glutMainLoopEvent = glutCheckLoop +elif glut.HAVE_FREEGLUT: + glutMainLoopEvent = glut.glutMainLoopEvent +else: + raise RuntimeError( + '''Your glut implementation does not allow interactive sessions. ''' + '''Consider installing freeglut.''') + + +#----------------------------------------------------------------------------- +# Callback functions +#----------------------------------------------------------------------------- + +def glut_display(): + # Dummy display function + pass + +def glut_idle(): + # Dummy idle function + pass + +def glut_close(): + # Close function only hides the current window + glut.glutHideWindow() + glutMainLoopEvent() + +def glut_int_handler(signum, frame): + # Catch sigint and print the defautl message + signal.signal(signal.SIGINT, signal.default_int_handler) + print '\nKeyboardInterrupt' + # Need to reprint the prompt at this stage + + + +#----------------------------------------------------------------------------- +# Code +#----------------------------------------------------------------------------- +def inputhook_glut(): + """Run the pyglet event loop by processing pending events only. + + This keeps processing pending events until stdin is ready. After + processing all pending events, a call to time.sleep is inserted. This is + needed, otherwise, CPU usage is at 100%. This sleep time should be tuned + though for best performance. + """ + # We need to protect against a user pressing Control-C when IPython is + # idle and this is running. We trap KeyboardInterrupt and pass. + + signal.signal(signal.SIGINT, glut_int_handler) + + try: + t = clock() + + # Make sure the default window is set after a window has been closed + if glut.glutGetWindow() == 0: + glut.glutSetWindow( 1 ) + glutMainLoopEvent() + return 0 + + while not stdin_ready(): + glutMainLoopEvent() + # We need to sleep at this point to keep the idle CPU load + # low. However, if sleep to long, GUI response is poor. As + # a compromise, we watch how often GUI events are being processed + # and switch between a short and long sleep time. Here are some + # stats useful in helping to tune this. + # time CPU load + # 0.001 13% + # 0.005 3% + # 0.01 1.5% + # 0.05 0.5% + used_time = clock() - t + if used_time > 10.0: + # print 'Sleep for 1 s' # dbg + time.sleep(1.0) + elif used_time > 0.1: + # Few GUI events coming in, so we can sleep longer + # print 'Sleep for 0.05 s' # dbg + time.sleep(0.05) + else: + # Many GUI events coming in, so sleep only very little + time.sleep(0.001) + except KeyboardInterrupt: + pass + return 0 diff --git a/ptvsd/pydevd/pydev_ipython/inputhookgtk.py b/ptvsd/pydevd/pydev_ipython/inputhookgtk.py new file mode 100644 index 00000000..53006cde --- /dev/null +++ b/ptvsd/pydevd/pydev_ipython/inputhookgtk.py @@ -0,0 +1,36 @@ +# encoding: utf-8 +""" +Enable pygtk to be used interacive by setting PyOS_InputHook. + +Authors: Brian Granger +""" + +#----------------------------------------------------------------------------- +# Copyright (C) 2008-2011 The IPython Development Team +# +# Distributed under the terms of the BSD License. The full license is in +# the file COPYING, distributed as part of this software. +#----------------------------------------------------------------------------- + +#----------------------------------------------------------------------------- +# Imports +#----------------------------------------------------------------------------- + +import gtk, gobject # @UnresolvedImport + +#----------------------------------------------------------------------------- +# Code +#----------------------------------------------------------------------------- + + +def _main_quit(*args, **kwargs): + gtk.main_quit() + return False + +def create_inputhook_gtk(stdin_file): + def inputhook_gtk(): + gobject.io_add_watch(stdin_file, gobject.IO_IN, _main_quit) + gtk.main() + return 0 + return inputhook_gtk + diff --git a/ptvsd/pydevd/pydev_ipython/inputhookgtk3.py b/ptvsd/pydevd/pydev_ipython/inputhookgtk3.py new file mode 100644 index 00000000..f2ca39f3 --- /dev/null +++ b/ptvsd/pydevd/pydev_ipython/inputhookgtk3.py @@ -0,0 +1,35 @@ +# encoding: utf-8 +""" +Enable Gtk3 to be used interacive by IPython. + +Authors: Thomi Richards +""" +#----------------------------------------------------------------------------- +# Copyright (c) 2012, the IPython Development Team. +# +# Distributed under the terms of the Modified BSD License. +# +# The full license is in the file COPYING.txt, distributed with this software. +#----------------------------------------------------------------------------- + +#----------------------------------------------------------------------------- +# Imports +#----------------------------------------------------------------------------- + +from gi.repository import Gtk, GLib # @UnresolvedImport + +#----------------------------------------------------------------------------- +# Code +#----------------------------------------------------------------------------- + +def _main_quit(*args, **kwargs): + Gtk.main_quit() + return False + + +def create_inputhook_gtk3(stdin_file): + def inputhook_gtk3(): + GLib.io_add_watch(stdin_file, GLib.IO_IN, _main_quit) + Gtk.main() + return 0 + return inputhook_gtk3 diff --git a/ptvsd/pydevd/pydev_ipython/inputhookpyglet.py b/ptvsd/pydevd/pydev_ipython/inputhookpyglet.py new file mode 100644 index 00000000..bf08afdc --- /dev/null +++ b/ptvsd/pydevd/pydev_ipython/inputhookpyglet.py @@ -0,0 +1,92 @@ +# encoding: utf-8 +""" +Enable pyglet to be used interacive by setting PyOS_InputHook. + +Authors +------- + +* Nicolas P. Rougier +* Fernando Perez +""" + +#----------------------------------------------------------------------------- +# Copyright (C) 2008-2011 The IPython Development Team +# +# Distributed under the terms of the BSD License. The full license is in +# the file COPYING, distributed as part of this software. +#----------------------------------------------------------------------------- + +#----------------------------------------------------------------------------- +# Imports +#----------------------------------------------------------------------------- + +import os +import sys +from _pydev_imps._pydev_saved_modules import time +from timeit import default_timer as clock +import pyglet # @UnresolvedImport +from pydev_ipython.inputhook import stdin_ready + + +# On linux only, window.flip() has a bug that causes an AttributeError on +# window close. For details, see: +# http://groups.google.com/group/pyglet-users/browse_thread/thread/47c1aab9aa4a3d23/c22f9e819826799e?#c22f9e819826799e + +if sys.platform.startswith('linux'): + def flip(window): + try: + window.flip() + except AttributeError: + pass +else: + def flip(window): + window.flip() + +#----------------------------------------------------------------------------- +# Code +#----------------------------------------------------------------------------- + +def inputhook_pyglet(): + """Run the pyglet event loop by processing pending events only. + + This keeps processing pending events until stdin is ready. After + processing all pending events, a call to time.sleep is inserted. This is + needed, otherwise, CPU usage is at 100%. This sleep time should be tuned + though for best performance. + """ + # We need to protect against a user pressing Control-C when IPython is + # idle and this is running. We trap KeyboardInterrupt and pass. + try: + t = clock() + while not stdin_ready(): + pyglet.clock.tick() + for window in pyglet.app.windows: + window.switch_to() + window.dispatch_events() + window.dispatch_event('on_draw') + flip(window) + + # We need to sleep at this point to keep the idle CPU load + # low. However, if sleep to long, GUI response is poor. As + # a compromise, we watch how often GUI events are being processed + # and switch between a short and long sleep time. Here are some + # stats useful in helping to tune this. + # time CPU load + # 0.001 13% + # 0.005 3% + # 0.01 1.5% + # 0.05 0.5% + used_time = clock() - t + if used_time > 10.0: + # print 'Sleep for 1 s' # dbg + time.sleep(1.0) + elif used_time > 0.1: + # Few GUI events coming in, so we can sleep longer + # print 'Sleep for 0.05 s' # dbg + time.sleep(0.05) + else: + # Many GUI events coming in, so sleep only very little + time.sleep(0.001) + except KeyboardInterrupt: + pass + return 0 diff --git a/ptvsd/pydevd/pydev_ipython/inputhookqt4.py b/ptvsd/pydevd/pydev_ipython/inputhookqt4.py new file mode 100644 index 00000000..b7e1cf05 --- /dev/null +++ b/ptvsd/pydevd/pydev_ipython/inputhookqt4.py @@ -0,0 +1,196 @@ +# -*- coding: utf-8 -*- +""" +Qt4's inputhook support function + +Author: Christian Boos +""" + +#----------------------------------------------------------------------------- +# Copyright (C) 2011 The IPython Development Team +# +# Distributed under the terms of the BSD License. The full license is in +# the file COPYING, distributed as part of this software. +#----------------------------------------------------------------------------- + +#----------------------------------------------------------------------------- +# Imports +#----------------------------------------------------------------------------- + +import os +import signal + +import threading + + +from pydev_ipython.qt_for_kernel import QtCore, QtGui +from pydev_ipython.inputhook import allow_CTRL_C, ignore_CTRL_C, stdin_ready + +# To minimise future merging complexity, rather than edit the entire code base below +# we fake InteractiveShell here +class InteractiveShell: + _instance = None + @classmethod + def instance(cls): + if cls._instance is None: + cls._instance = cls() + return cls._instance + def set_hook(self, *args, **kwargs): + # We don't consider the pre_prompt_hook because we don't have + # KeyboardInterrupts to consider since we are running under PyDev + pass + + +#----------------------------------------------------------------------------- +# Module Globals +#----------------------------------------------------------------------------- + +got_kbdint = False +sigint_timer = None + +#----------------------------------------------------------------------------- +# Code +#----------------------------------------------------------------------------- + +def create_inputhook_qt4(mgr, app=None): + """Create an input hook for running the Qt4 application event loop. + + Parameters + ---------- + mgr : an InputHookManager + + app : Qt Application, optional. + Running application to use. If not given, we probe Qt for an + existing application object, and create a new one if none is found. + + Returns + ------- + A pair consisting of a Qt Application (either the one given or the + one found or created) and a inputhook. + + Notes + ----- + We use a custom input hook instead of PyQt4's default one, as it + interacts better with the readline packages (issue #481). + + The inputhook function works in tandem with a 'pre_prompt_hook' + which automatically restores the hook as an inputhook in case the + latter has been temporarily disabled after having intercepted a + KeyboardInterrupt. + """ + + if app is None: + app = QtCore.QCoreApplication.instance() + if app is None: + app = QtGui.QApplication([" "]) + + # Re-use previously created inputhook if any + ip = InteractiveShell.instance() + if hasattr(ip, '_inputhook_qt4'): + return app, ip._inputhook_qt4 + + # Otherwise create the inputhook_qt4/preprompthook_qt4 pair of + # hooks (they both share the got_kbdint flag) + + def inputhook_qt4(): + """PyOS_InputHook python hook for Qt4. + + Process pending Qt events and if there's no pending keyboard + input, spend a short slice of time (50ms) running the Qt event + loop. + + As a Python ctypes callback can't raise an exception, we catch + the KeyboardInterrupt and temporarily deactivate the hook, + which will let a *second* CTRL+C be processed normally and go + back to a clean prompt line. + """ + try: + allow_CTRL_C() + app = QtCore.QCoreApplication.instance() + if not app: # shouldn't happen, but safer if it happens anyway... + return 0 + app.processEvents(QtCore.QEventLoop.AllEvents, 300) + if not stdin_ready(): + # Generally a program would run QCoreApplication::exec() + # from main() to enter and process the Qt event loop until + # quit() or exit() is called and the program terminates. + # + # For our input hook integration, we need to repeatedly + # enter and process the Qt event loop for only a short + # amount of time (say 50ms) to ensure that Python stays + # responsive to other user inputs. + # + # A naive approach would be to repeatedly call + # QCoreApplication::exec(), using a timer to quit after a + # short amount of time. Unfortunately, QCoreApplication + # emits an aboutToQuit signal before stopping, which has + # the undesirable effect of closing all modal windows. + # + # To work around this problem, we instead create a + # QEventLoop and call QEventLoop::exec(). Other than + # setting some state variables which do not seem to be + # used anywhere, the only thing QCoreApplication adds is + # the aboutToQuit signal which is precisely what we are + # trying to avoid. + timer = QtCore.QTimer() + event_loop = QtCore.QEventLoop() + timer.timeout.connect(event_loop.quit) + while not stdin_ready(): + timer.start(50) + event_loop.exec_() + timer.stop() + except KeyboardInterrupt: + global got_kbdint, sigint_timer + + ignore_CTRL_C() + got_kbdint = True + mgr.clear_inputhook() + + # This generates a second SIGINT so the user doesn't have to + # press CTRL+C twice to get a clean prompt. + # + # Since we can't catch the resulting KeyboardInterrupt here + # (because this is a ctypes callback), we use a timer to + # generate the SIGINT after we leave this callback. + # + # Unfortunately this doesn't work on Windows (SIGINT kills + # Python and CTRL_C_EVENT doesn't work). + if(os.name == 'posix'): + pid = os.getpid() + if(not sigint_timer): + sigint_timer = threading.Timer(.01, os.kill, + args=[pid, signal.SIGINT] ) + sigint_timer.start() + else: + print("\nKeyboardInterrupt - Ctrl-C again for new prompt") + + + except: # NO exceptions are allowed to escape from a ctypes callback + ignore_CTRL_C() + from traceback import print_exc + print_exc() + print("Got exception from inputhook_qt4, unregistering.") + mgr.clear_inputhook() + finally: + allow_CTRL_C() + return 0 + + def preprompthook_qt4(ishell): + """'pre_prompt_hook' used to restore the Qt4 input hook + + (in case the latter was temporarily deactivated after a + CTRL+C) + """ + global got_kbdint, sigint_timer + + if(sigint_timer): + sigint_timer.cancel() + sigint_timer = None + + if got_kbdint: + mgr.set_inputhook(inputhook_qt4) + got_kbdint = False + + ip._inputhook_qt4 = inputhook_qt4 + ip.set_hook('pre_prompt_hook', preprompthook_qt4) + + return app, inputhook_qt4 diff --git a/ptvsd/pydevd/pydev_ipython/inputhookqt5.py b/ptvsd/pydevd/pydev_ipython/inputhookqt5.py new file mode 100644 index 00000000..77b938b4 --- /dev/null +++ b/ptvsd/pydevd/pydev_ipython/inputhookqt5.py @@ -0,0 +1,197 @@ +# -*- coding: utf-8 -*- +""" +Qt5's inputhook support function + +Author: Christian Boos +""" + +#----------------------------------------------------------------------------- +# Copyright (C) 2011 The IPython Development Team +# +# Distributed under the terms of the BSD License. The full license is in +# the file COPYING, distributed as part of this software. +#----------------------------------------------------------------------------- + +#----------------------------------------------------------------------------- +# Imports +#----------------------------------------------------------------------------- + +import os +import signal + +import threading + + +from pydev_ipython.qt_for_kernel import QtCore, QtGui +from pydev_ipython.inputhook import allow_CTRL_C, ignore_CTRL_C, stdin_ready + +# To minimise future merging complexity, rather than edit the entire code base below +# we fake InteractiveShell here +class InteractiveShell: + _instance = None + @classmethod + def instance(cls): + if cls._instance is None: + cls._instance = cls() + return cls._instance + def set_hook(self, *args, **kwargs): + # We don't consider the pre_prompt_hook because we don't have + # KeyboardInterrupts to consider since we are running under PyDev + pass + + +#----------------------------------------------------------------------------- +# Module Globals +#----------------------------------------------------------------------------- + +got_kbdint = False +sigint_timer = None + +#----------------------------------------------------------------------------- +# Code +#----------------------------------------------------------------------------- + +def create_inputhook_qt5(mgr, app=None): + """Create an input hook for running the Qt5 application event loop. + + Parameters + ---------- + mgr : an InputHookManager + + app : Qt Application, optional. + Running application to use. If not given, we probe Qt for an + existing application object, and create a new one if none is found. + + Returns + ------- + A pair consisting of a Qt Application (either the one given or the + one found or created) and a inputhook. + + Notes + ----- + We use a custom input hook instead of PyQt5's default one, as it + interacts better with the readline packages (issue #481). + + The inputhook function works in tandem with a 'pre_prompt_hook' + which automatically restores the hook as an inputhook in case the + latter has been temporarily disabled after having intercepted a + KeyboardInterrupt. + """ + + if app is None: + app = QtCore.QCoreApplication.instance() + if app is None: + from PyQt5 import QtWidgets + app = QtWidgets.QApplication([" "]) + + # Re-use previously created inputhook if any + ip = InteractiveShell.instance() + if hasattr(ip, '_inputhook_qt5'): + return app, ip._inputhook_qt5 + + # Otherwise create the inputhook_qt5/preprompthook_qt5 pair of + # hooks (they both share the got_kbdint flag) + + def inputhook_qt5(): + """PyOS_InputHook python hook for Qt5. + + Process pending Qt events and if there's no pending keyboard + input, spend a short slice of time (50ms) running the Qt event + loop. + + As a Python ctypes callback can't raise an exception, we catch + the KeyboardInterrupt and temporarily deactivate the hook, + which will let a *second* CTRL+C be processed normally and go + back to a clean prompt line. + """ + try: + allow_CTRL_C() + app = QtCore.QCoreApplication.instance() + if not app: # shouldn't happen, but safer if it happens anyway... + return 0 + app.processEvents(QtCore.QEventLoop.AllEvents, 300) + if not stdin_ready(): + # Generally a program would run QCoreApplication::exec() + # from main() to enter and process the Qt event loop until + # quit() or exit() is called and the program terminates. + # + # For our input hook integration, we need to repeatedly + # enter and process the Qt event loop for only a short + # amount of time (say 50ms) to ensure that Python stays + # responsive to other user inputs. + # + # A naive approach would be to repeatedly call + # QCoreApplication::exec(), using a timer to quit after a + # short amount of time. Unfortunately, QCoreApplication + # emits an aboutToQuit signal before stopping, which has + # the undesirable effect of closing all modal windows. + # + # To work around this problem, we instead create a + # QEventLoop and call QEventLoop::exec(). Other than + # setting some state variables which do not seem to be + # used anywhere, the only thing QCoreApplication adds is + # the aboutToQuit signal which is precisely what we are + # trying to avoid. + timer = QtCore.QTimer() + event_loop = QtCore.QEventLoop() + timer.timeout.connect(event_loop.quit) + while not stdin_ready(): + timer.start(50) + event_loop.exec_() + timer.stop() + except KeyboardInterrupt: + global got_kbdint, sigint_timer + + ignore_CTRL_C() + got_kbdint = True + mgr.clear_inputhook() + + # This generates a second SIGINT so the user doesn't have to + # press CTRL+C twice to get a clean prompt. + # + # Since we can't catch the resulting KeyboardInterrupt here + # (because this is a ctypes callback), we use a timer to + # generate the SIGINT after we leave this callback. + # + # Unfortunately this doesn't work on Windows (SIGINT kills + # Python and CTRL_C_EVENT doesn't work). + if(os.name == 'posix'): + pid = os.getpid() + if(not sigint_timer): + sigint_timer = threading.Timer(.01, os.kill, + args=[pid, signal.SIGINT] ) + sigint_timer.start() + else: + print("\nKeyboardInterrupt - Ctrl-C again for new prompt") + + + except: # NO exceptions are allowed to escape from a ctypes callback + ignore_CTRL_C() + from traceback import print_exc + print_exc() + print("Got exception from inputhook_qt5, unregistering.") + mgr.clear_inputhook() + finally: + allow_CTRL_C() + return 0 + + def preprompthook_qt5(ishell): + """'pre_prompt_hook' used to restore the Qt5 input hook + + (in case the latter was temporarily deactivated after a + CTRL+C) + """ + global got_kbdint, sigint_timer + + if(sigint_timer): + sigint_timer.cancel() + sigint_timer = None + + if got_kbdint: + mgr.set_inputhook(inputhook_qt5) + got_kbdint = False + + ip._inputhook_qt5 = inputhook_qt5 + ip.set_hook('pre_prompt_hook', preprompthook_qt5) + + return app, inputhook_qt5 diff --git a/ptvsd/pydevd/pydev_ipython/inputhooktk.py b/ptvsd/pydevd/pydev_ipython/inputhooktk.py new file mode 100644 index 00000000..e245cc05 --- /dev/null +++ b/ptvsd/pydevd/pydev_ipython/inputhooktk.py @@ -0,0 +1,23 @@ +# encoding: utf-8 +# Unlike what IPython does, we need to have an explicit inputhook because tkinter handles +# input hook in the C Source code + +#----------------------------------------------------------------------------- +# Imports +#----------------------------------------------------------------------------- + +from pydev_ipython.inputhook import stdin_ready + +#----------------------------------------------------------------------------- +# Code +#----------------------------------------------------------------------------- + +TCL_DONT_WAIT = 1 << 1 + +def create_inputhook_tk(app): + def inputhook_tk(): + while app.dooneevent(TCL_DONT_WAIT) == 1: + if stdin_ready(): + break + return 0 + return inputhook_tk diff --git a/ptvsd/pydevd/pydev_ipython/inputhookwx.py b/ptvsd/pydevd/pydev_ipython/inputhookwx.py new file mode 100644 index 00000000..88fe2c6e --- /dev/null +++ b/ptvsd/pydevd/pydev_ipython/inputhookwx.py @@ -0,0 +1,166 @@ +# encoding: utf-8 +""" +Enable wxPython to be used interacive by setting PyOS_InputHook. + +Authors: Robin Dunn, Brian Granger, Ondrej Certik +""" + +#----------------------------------------------------------------------------- +# Copyright (C) 2008-2011 The IPython Development Team +# +# Distributed under the terms of the BSD License. The full license is in +# the file COPYING, distributed as part of this software. +#----------------------------------------------------------------------------- + +#----------------------------------------------------------------------------- +# Imports +#----------------------------------------------------------------------------- + +import sys +import signal +from _pydev_imps._pydev_saved_modules import time +from timeit import default_timer as clock +import wx + +from pydev_ipython.inputhook import stdin_ready + + +#----------------------------------------------------------------------------- +# Code +#----------------------------------------------------------------------------- + +def inputhook_wx1(): + """Run the wx event loop by processing pending events only. + + This approach seems to work, but its performance is not great as it + relies on having PyOS_InputHook called regularly. + """ + try: + app = wx.GetApp() # @UndefinedVariable + if app is not None: + assert wx.Thread_IsMain() # @UndefinedVariable + + # Make a temporary event loop and process system events until + # there are no more waiting, then allow idle events (which + # will also deal with pending or posted wx events.) + evtloop = wx.EventLoop() # @UndefinedVariable + ea = wx.EventLoopActivator(evtloop) # @UndefinedVariable + while evtloop.Pending(): + evtloop.Dispatch() + app.ProcessIdle() + del ea + except KeyboardInterrupt: + pass + return 0 + +class EventLoopTimer(wx.Timer): # @UndefinedVariable + + def __init__(self, func): + self.func = func + wx.Timer.__init__(self) # @UndefinedVariable + + def Notify(self): + self.func() + +class EventLoopRunner(object): + + def Run(self, time): + self.evtloop = wx.EventLoop() # @UndefinedVariable + self.timer = EventLoopTimer(self.check_stdin) + self.timer.Start(time) + self.evtloop.Run() + + def check_stdin(self): + if stdin_ready(): + self.timer.Stop() + self.evtloop.Exit() + +def inputhook_wx2(): + """Run the wx event loop, polling for stdin. + + This version runs the wx eventloop for an undetermined amount of time, + during which it periodically checks to see if anything is ready on + stdin. If anything is ready on stdin, the event loop exits. + + The argument to elr.Run controls how often the event loop looks at stdin. + This determines the responsiveness at the keyboard. A setting of 1000 + enables a user to type at most 1 char per second. I have found that a + setting of 10 gives good keyboard response. We can shorten it further, + but eventually performance would suffer from calling select/kbhit too + often. + """ + try: + app = wx.GetApp() # @UndefinedVariable + if app is not None: + assert wx.Thread_IsMain() # @UndefinedVariable + elr = EventLoopRunner() + # As this time is made shorter, keyboard response improves, but idle + # CPU load goes up. 10 ms seems like a good compromise. + elr.Run(time=10) # CHANGE time here to control polling interval + except KeyboardInterrupt: + pass + return 0 + +def inputhook_wx3(): + """Run the wx event loop by processing pending events only. + + This is like inputhook_wx1, but it keeps processing pending events + until stdin is ready. After processing all pending events, a call to + time.sleep is inserted. This is needed, otherwise, CPU usage is at 100%. + This sleep time should be tuned though for best performance. + """ + # We need to protect against a user pressing Control-C when IPython is + # idle and this is running. We trap KeyboardInterrupt and pass. + try: + app = wx.GetApp() # @UndefinedVariable + if app is not None: + assert wx.Thread_IsMain() # @UndefinedVariable + + # The import of wx on Linux sets the handler for signal.SIGINT + # to 0. This is a bug in wx or gtk. We fix by just setting it + # back to the Python default. + if not callable(signal.getsignal(signal.SIGINT)): + signal.signal(signal.SIGINT, signal.default_int_handler) + + evtloop = wx.EventLoop() # @UndefinedVariable + ea = wx.EventLoopActivator(evtloop) # @UndefinedVariable + t = clock() + while not stdin_ready(): + while evtloop.Pending(): + t = clock() + evtloop.Dispatch() + app.ProcessIdle() + # We need to sleep at this point to keep the idle CPU load + # low. However, if sleep to long, GUI response is poor. As + # a compromise, we watch how often GUI events are being processed + # and switch between a short and long sleep time. Here are some + # stats useful in helping to tune this. + # time CPU load + # 0.001 13% + # 0.005 3% + # 0.01 1.5% + # 0.05 0.5% + used_time = clock() - t + if used_time > 10.0: + # print 'Sleep for 1 s' # dbg + time.sleep(1.0) + elif used_time > 0.1: + # Few GUI events coming in, so we can sleep longer + # print 'Sleep for 0.05 s' # dbg + time.sleep(0.05) + else: + # Many GUI events coming in, so sleep only very little + time.sleep(0.001) + del ea + except KeyboardInterrupt: + pass + return 0 + +if sys.platform == 'darwin': + # On OSX, evtloop.Pending() always returns True, regardless of there being + # any events pending. As such we can't use implementations 1 or 3 of the + # inputhook as those depend on a pending/dispatch loop. + inputhook_wx = inputhook_wx2 +else: + # This is our default implementation + inputhook_wx = inputhook_wx3 diff --git a/ptvsd/pydevd/pydev_ipython/matplotlibtools.py b/ptvsd/pydevd/pydev_ipython/matplotlibtools.py new file mode 100644 index 00000000..132cb5b6 --- /dev/null +++ b/ptvsd/pydevd/pydev_ipython/matplotlibtools.py @@ -0,0 +1,148 @@ + +import sys + +backends = {'tk': 'TkAgg', + 'gtk': 'GTKAgg', + 'wx': 'WXAgg', + 'qt': 'Qt4Agg', # qt3 not supported + 'qt4': 'Qt4Agg', + 'qt5': 'Qt5Agg', + 'osx': 'MacOSX'} + +# We also need a reverse backends2guis mapping that will properly choose which +# GUI support to activate based on the desired matplotlib backend. For the +# most part it's just a reverse of the above dict, but we also need to add a +# few others that map to the same GUI manually: +backend2gui = dict(zip(backends.values(), backends.keys())) +backend2gui['Qt4Agg'] = 'qt' +# In the reverse mapping, there are a few extra valid matplotlib backends that +# map to the same GUI support +backend2gui['GTK'] = backend2gui['GTKCairo'] = 'gtk' +backend2gui['WX'] = 'wx' +backend2gui['CocoaAgg'] = 'osx' + + +def do_enable_gui(guiname): + from _pydev_bundle.pydev_versioncheck import versionok_for_gui + if versionok_for_gui(): + try: + from pydev_ipython.inputhook import enable_gui + enable_gui(guiname) + except: + sys.stderr.write("Failed to enable GUI event loop integration for '%s'\n" % guiname) + import traceback + traceback.print_exc() + elif guiname not in ['none', '', None]: + # Only print a warning if the guiname was going to do something + sys.stderr.write("Debug console: Python version does not support GUI event loop integration for '%s'\n" % guiname) + # Return value does not matter, so return back what was sent + return guiname + + +def find_gui_and_backend(): + """Return the gui and mpl backend.""" + matplotlib = sys.modules['matplotlib'] + # WARNING: this assumes matplotlib 1.1 or newer!! + backend = matplotlib.rcParams['backend'] + # In this case, we need to find what the appropriate gui selection call + # should be for IPython, so we can activate inputhook accordingly + gui = backend2gui.get(backend, None) + return gui, backend + + +def is_interactive_backend(backend): + """ Check if backend is interactive """ + matplotlib = sys.modules['matplotlib'] + from matplotlib.rcsetup import interactive_bk, non_interactive_bk # @UnresolvedImport + if backend in interactive_bk: + return True + elif backend in non_interactive_bk: + return False + else: + return matplotlib.is_interactive() + + +def patch_use(enable_gui_function): + """ Patch matplotlib function 'use' """ + matplotlib = sys.modules['matplotlib'] + def patched_use(*args, **kwargs): + matplotlib.real_use(*args, **kwargs) + gui, backend = find_gui_and_backend() + enable_gui_function(gui) + + matplotlib.real_use = matplotlib.use + matplotlib.use = patched_use + + +def patch_is_interactive(): + """ Patch matplotlib function 'use' """ + matplotlib = sys.modules['matplotlib'] + def patched_is_interactive(): + return matplotlib.rcParams['interactive'] + + matplotlib.real_is_interactive = matplotlib.is_interactive + matplotlib.is_interactive = patched_is_interactive + + +def activate_matplotlib(enable_gui_function): + """Set interactive to True for interactive backends. + enable_gui_function - Function which enables gui, should be run in the main thread. + """ + matplotlib = sys.modules['matplotlib'] + gui, backend = find_gui_and_backend() + is_interactive = is_interactive_backend(backend) + if is_interactive: + enable_gui_function(gui) + if not matplotlib.is_interactive(): + sys.stdout.write("Backend %s is interactive backend. Turning interactive mode on.\n" % backend) + matplotlib.interactive(True) + else: + if matplotlib.is_interactive(): + sys.stdout.write("Backend %s is non-interactive backend. Turning interactive mode off.\n" % backend) + matplotlib.interactive(False) + patch_use(enable_gui_function) + patch_is_interactive() + + +def flag_calls(func): + """Wrap a function to detect and flag when it gets called. + + This is a decorator which takes a function and wraps it in a function with + a 'called' attribute. wrapper.called is initialized to False. + + The wrapper.called attribute is set to False right before each call to the + wrapped function, so if the call fails it remains False. After the call + completes, wrapper.called is set to True and the output is returned. + + Testing for truth in wrapper.called allows you to determine if a call to + func() was attempted and succeeded.""" + + # don't wrap twice + if hasattr(func, 'called'): + return func + + def wrapper(*args,**kw): + wrapper.called = False + out = func(*args,**kw) + wrapper.called = True + return out + + wrapper.called = False + wrapper.__doc__ = func.__doc__ + return wrapper + + +def activate_pylab(): + pylab = sys.modules['pylab'] + pylab.show._needmain = False + # We need to detect at runtime whether show() is called by the user. + # For this, we wrap it into a decorator which adds a 'called' flag. + pylab.draw_if_interactive = flag_calls(pylab.draw_if_interactive) + + +def activate_pyplot(): + pyplot = sys.modules['matplotlib.pyplot'] + pyplot.show._needmain = False + # We need to detect at runtime whether show() is called by the user. + # For this, we wrap it into a decorator which adds a 'called' flag. + pyplot.draw_if_interactive = flag_calls(pyplot.draw_if_interactive) diff --git a/ptvsd/pydevd/pydev_ipython/qt.py b/ptvsd/pydevd/pydev_ipython/qt.py new file mode 100644 index 00000000..222c81b9 --- /dev/null +++ b/ptvsd/pydevd/pydev_ipython/qt.py @@ -0,0 +1,23 @@ +""" A Qt API selector that can be used to switch between PyQt and PySide. + +This uses the ETS 4.0 selection pattern of: +PySide first, PyQt with API v2. second. + +Do not use this if you need PyQt with the old QString/QVariant API. +""" + +import os + +from pydev_ipython.qt_loaders import (load_qt, QT_API_PYSIDE, + QT_API_PYQT, QT_API_PYQT5) + +QT_API = os.environ.get('QT_API', None) +if QT_API not in [QT_API_PYSIDE, QT_API_PYQT, QT_API_PYQT5, None]: + raise RuntimeError("Invalid Qt API %r, valid values are: %r, %r" % + (QT_API, QT_API_PYSIDE, QT_API_PYQT, QT_API_PYQT5)) +if QT_API is None: + api_opts = [QT_API_PYSIDE, QT_API_PYQT, QT_API_PYQT5] +else: + api_opts = [QT_API] + +QtCore, QtGui, QtSvg, QT_API = load_qt(api_opts) diff --git a/ptvsd/pydevd/pydev_ipython/qt_for_kernel.py b/ptvsd/pydevd/pydev_ipython/qt_for_kernel.py new file mode 100644 index 00000000..d18a2183 --- /dev/null +++ b/ptvsd/pydevd/pydev_ipython/qt_for_kernel.py @@ -0,0 +1,118 @@ +""" Import Qt in a manner suitable for an IPython kernel. + +This is the import used for the `gui=qt` or `matplotlib=qt` initialization. + +Import Priority: + +if Qt4 has been imported anywhere else: + use that + +if matplotlib has been imported and doesn't support v2 (<= 1.0.1): + use PyQt4 @v1 + +Next, ask ETS' QT_API env variable + +if QT_API not set: + ask matplotlib via rcParams['backend.qt4'] + if it said PyQt: + use PyQt4 @v1 + elif it said PySide: + use PySide + + else: (matplotlib said nothing) + # this is the default path - nobody told us anything + try: + PyQt @v1 + except: + fallback on PySide +else: + use PyQt @v2 or PySide, depending on QT_API + because ETS doesn't work with PyQt @v1. + +""" + +import os +import sys + +from pydev_ipython.version import check_version +from pydev_ipython.qt_loaders import (load_qt, QT_API_PYSIDE, + QT_API_PYQT, QT_API_PYQT_DEFAULT, + loaded_api, QT_API_PYQT5) + +#Constraints placed on an imported matplotlib +def matplotlib_options(mpl): + if mpl is None: + return + + # #PyDev-779: In pysrc/pydev_ipython/qt_for_kernel.py, matplotlib_options should be replaced with latest from ipython + # (i.e.: properly check backend to decide upon qt4/qt5). + + backend = mpl.rcParams.get('backend', None) + if backend == 'Qt4Agg': + mpqt = mpl.rcParams.get('backend.qt4', None) + if mpqt is None: + return None + if mpqt.lower() == 'pyside': + return [QT_API_PYSIDE] + elif mpqt.lower() == 'pyqt4': + return [QT_API_PYQT_DEFAULT] + elif mpqt.lower() == 'pyqt4v2': + return [QT_API_PYQT] + raise ImportError("unhandled value for backend.qt4 from matplotlib: %r" % + mpqt) + + elif backend == 'Qt5Agg': + mpqt = mpl.rcParams.get('backend.qt5', None) + if mpqt is None: + return None + if mpqt.lower() == 'pyqt5': + return [QT_API_PYQT5] + raise ImportError("unhandled value for backend.qt5 from matplotlib: %r" % + mpqt) + + + # Fallback without checking backend (previous code) + mpqt = mpl.rcParams.get('backend.qt4', None) + if mpqt is None: + mpqt = mpl.rcParams.get('backend.qt5', None) + + if mpqt is None: + return None + if mpqt.lower() == 'pyside': + return [QT_API_PYSIDE] + elif mpqt.lower() == 'pyqt4': + return [QT_API_PYQT_DEFAULT] + elif mpqt.lower() == 'pyqt5': + return [QT_API_PYQT5] + raise ImportError("unhandled value for qt backend from matplotlib: %r" % + mpqt) + + +def get_options(): + """Return a list of acceptable QT APIs, in decreasing order of + preference + """ + #already imported Qt somewhere. Use that + loaded = loaded_api() + if loaded is not None: + return [loaded] + + mpl = sys.modules.get('matplotlib', None) + + if mpl is not None and not check_version(mpl.__version__, '1.0.2'): + #1.0.1 only supports PyQt4 v1 + return [QT_API_PYQT_DEFAULT] + + if os.environ.get('QT_API', None) is None: + #no ETS variable. Ask mpl, then use either + return matplotlib_options(mpl) or [QT_API_PYQT_DEFAULT, QT_API_PYSIDE, QT_API_PYQT5] + + #ETS variable present. Will fallback to external.qt + return None + +api_opts = get_options() +if api_opts is not None: + QtCore, QtGui, QtSvg, QT_API = load_qt(api_opts) + +else: # use ETS variable + from pydev_ipython.qt import QtCore, QtGui, QtSvg, QT_API diff --git a/ptvsd/pydevd/pydev_ipython/qt_loaders.py b/ptvsd/pydevd/pydev_ipython/qt_loaders.py new file mode 100644 index 00000000..55d38ba7 --- /dev/null +++ b/ptvsd/pydevd/pydev_ipython/qt_loaders.py @@ -0,0 +1,281 @@ +""" +This module contains factory functions that attempt +to return Qt submodules from the various python Qt bindings. + +It also protects against double-importing Qt with different +bindings, which is unstable and likely to crash + +This is used primarily by qt and qt_for_kernel, and shouldn't +be accessed directly from the outside +""" +import sys +from functools import partial + +from pydev_ipython.version import check_version + +# Available APIs. +QT_API_PYQT = 'pyqt' +QT_API_PYQTv1 = 'pyqtv1' +QT_API_PYQT_DEFAULT = 'pyqtdefault' # don't set SIP explicitly +QT_API_PYSIDE = 'pyside' +QT_API_PYQT5 = 'pyqt5' + + +class ImportDenier(object): + """Import Hook that will guard against bad Qt imports + once IPython commits to a specific binding + """ + + def __init__(self): + self.__forbidden = set() + + def forbid(self, module_name): + sys.modules.pop(module_name, None) + self.__forbidden.add(module_name) + + def find_module(self, fullname, path=None): + if path: + return + if fullname in self.__forbidden: + return self + + def load_module(self, fullname): + raise ImportError(""" + Importing %s disabled by IPython, which has + already imported an Incompatible QT Binding: %s + """ % (fullname, loaded_api())) + +ID = ImportDenier() +sys.meta_path.append(ID) + + +def commit_api(api): + """Commit to a particular API, and trigger ImportErrors on subsequent + dangerous imports""" + + if api == QT_API_PYSIDE: + ID.forbid('PyQt4') + ID.forbid('PyQt5') + else: + ID.forbid('PySide') + + +def loaded_api(): + """Return which API is loaded, if any + + If this returns anything besides None, + importing any other Qt binding is unsafe. + + Returns + ------- + None, 'pyside', 'pyqt', or 'pyqtv1' + """ + if 'PyQt4.QtCore' in sys.modules: + if qtapi_version() == 2: + return QT_API_PYQT + else: + return QT_API_PYQTv1 + elif 'PySide.QtCore' in sys.modules: + return QT_API_PYSIDE + elif 'PyQt5.QtCore' in sys.modules: + return QT_API_PYQT5 + return None + + +def has_binding(api): + """Safely check for PyQt4 or PySide, without importing + submodules + + Parameters + ---------- + api : str [ 'pyqtv1' | 'pyqt' | 'pyside' | 'pyqtdefault'] + Which module to check for + + Returns + ------- + True if the relevant module appears to be importable + """ + # we can't import an incomplete pyside and pyqt4 + # this will cause a crash in sip (#1431) + # check for complete presence before importing + module_name = {QT_API_PYSIDE: 'PySide', + QT_API_PYQT: 'PyQt4', + QT_API_PYQTv1: 'PyQt4', + QT_API_PYQT_DEFAULT: 'PyQt4', + QT_API_PYQT5: 'PyQt5', + } + module_name = module_name[api] + + import imp + try: + #importing top level PyQt4/PySide module is ok... + mod = __import__(module_name) + #...importing submodules is not + imp.find_module('QtCore', mod.__path__) + imp.find_module('QtGui', mod.__path__) + imp.find_module('QtSvg', mod.__path__) + + #we can also safely check PySide version + if api == QT_API_PYSIDE: + return check_version(mod.__version__, '1.0.3') + else: + return True + except ImportError: + return False + + +def qtapi_version(): + """Return which QString API has been set, if any + + Returns + ------- + The QString API version (1 or 2), or None if not set + """ + try: + import sip + except ImportError: + return + try: + return sip.getapi('QString') + except ValueError: + return + + +def can_import(api): + """Safely query whether an API is importable, without importing it""" + if not has_binding(api): + return False + + current = loaded_api() + if api == QT_API_PYQT_DEFAULT: + return current in [QT_API_PYQT, QT_API_PYQTv1, QT_API_PYQT5, None] + else: + return current in [api, None] + + +def import_pyqt4(version=2): + """ + Import PyQt4 + + Parameters + ---------- + version : 1, 2, or None + Which QString/QVariant API to use. Set to None to use the system + default + + ImportErrors raised within this function are non-recoverable + """ + # The new-style string API (version=2) automatically + # converts QStrings to Unicode Python strings. Also, automatically unpacks + # QVariants to their underlying objects. + import sip + + if version is not None: + sip.setapi('QString', version) + sip.setapi('QVariant', version) + + from PyQt4 import QtGui, QtCore, QtSvg + + if not check_version(QtCore.PYQT_VERSION_STR, '4.7'): + raise ImportError("IPython requires PyQt4 >= 4.7, found %s" % + QtCore.PYQT_VERSION_STR) + + # Alias PyQt-specific functions for PySide compatibility. + QtCore.Signal = QtCore.pyqtSignal + QtCore.Slot = QtCore.pyqtSlot + + # query for the API version (in case version == None) + version = sip.getapi('QString') + api = QT_API_PYQTv1 if version == 1 else QT_API_PYQT + return QtCore, QtGui, QtSvg, api + +def import_pyqt5(): + """ + Import PyQt5 + + ImportErrors raised within this function are non-recoverable + """ + from PyQt5 import QtGui, QtCore, QtSvg + + # Alias PyQt-specific functions for PySide compatibility. + QtCore.Signal = QtCore.pyqtSignal + QtCore.Slot = QtCore.pyqtSlot + + return QtCore, QtGui, QtSvg, QT_API_PYQT5 + + +def import_pyside(): + """ + Import PySide + + ImportErrors raised within this function are non-recoverable + """ + from PySide import QtGui, QtCore, QtSvg # @UnresolvedImport + return QtCore, QtGui, QtSvg, QT_API_PYSIDE + + +def load_qt(api_options): + """ + Attempt to import Qt, given a preference list + of permissible bindings + + It is safe to call this function multiple times. + + Parameters + ---------- + api_options: List of strings + The order of APIs to try. Valid items are 'pyside', + 'pyqt', and 'pyqtv1' + + Returns + ------- + + A tuple of QtCore, QtGui, QtSvg, QT_API + The first three are the Qt modules. The last is the + string indicating which module was loaded. + + Raises + ------ + ImportError, if it isn't possible to import any requested + bindings (either becaues they aren't installed, or because + an incompatible library has already been installed) + """ + loaders = {QT_API_PYSIDE: import_pyside, + QT_API_PYQT: import_pyqt4, + QT_API_PYQTv1: partial(import_pyqt4, version=1), + QT_API_PYQT_DEFAULT: partial(import_pyqt4, version=None), + QT_API_PYQT5: import_pyqt5, + } + + for api in api_options: + + if api not in loaders: + raise RuntimeError( + "Invalid Qt API %r, valid values are: %r, %r, %r, %r" % + (api, QT_API_PYSIDE, QT_API_PYQT, + QT_API_PYQTv1, QT_API_PYQT_DEFAULT, QT_API_PYQT5)) + + if not can_import(api): + continue + + #cannot safely recover from an ImportError during this + result = loaders[api]() + api = result[-1] # changed if api = QT_API_PYQT_DEFAULT + commit_api(api) + return result + else: + raise ImportError(""" + Could not load requested Qt binding. Please ensure that + PyQt4 >= 4.7 or PySide >= 1.0.3 is available, + and only one is imported per session. + + Currently-imported Qt library: %r + PyQt4 installed: %s + PyQt5 installed: %s + PySide >= 1.0.3 installed: %s + Tried to load: %r + """ % (loaded_api(), + has_binding(QT_API_PYQT), + has_binding(QT_API_PYQT5), + has_binding(QT_API_PYSIDE), + api_options)) diff --git a/ptvsd/pydevd/pydev_ipython/version.py b/ptvsd/pydevd/pydev_ipython/version.py new file mode 100644 index 00000000..1de0047e --- /dev/null +++ b/ptvsd/pydevd/pydev_ipython/version.py @@ -0,0 +1,36 @@ +# encoding: utf-8 +""" +Utilities for version comparison + +It is a bit ridiculous that we need these. +""" + +#----------------------------------------------------------------------------- +# Copyright (C) 2013 The IPython Development Team +# +# Distributed under the terms of the BSD License. The full license is in +# the file COPYING, distributed as part of this software. +#----------------------------------------------------------------------------- + +#----------------------------------------------------------------------------- +# Imports +#----------------------------------------------------------------------------- + +from distutils.version import LooseVersion + +#----------------------------------------------------------------------------- +# Code +#----------------------------------------------------------------------------- + +def check_version(v, check): + """check version string v >= check + + If dev/prerelease tags result in TypeError for string-number comparison, + it is assumed that the dependency is satisfied. + Users on dev branches are responsible for keeping their own packages up to date. + """ + try: + return LooseVersion(v) >= LooseVersion(check) + except TypeError: + return True + diff --git a/ptvsd/pydevd/pydev_pysrc.py b/ptvsd/pydevd/pydev_pysrc.py new file mode 100644 index 00000000..b9ed49e8 --- /dev/null +++ b/ptvsd/pydevd/pydev_pysrc.py @@ -0,0 +1 @@ +'''An empty file in pysrc that can be imported (from sitecustomize) to find the location of pysrc''' \ No newline at end of file diff --git a/ptvsd/pydevd/pydev_run_in_console.py b/ptvsd/pydevd/pydev_run_in_console.py new file mode 100644 index 00000000..9d862c57 --- /dev/null +++ b/ptvsd/pydevd/pydev_run_in_console.py @@ -0,0 +1,84 @@ +''' +Entry point module to run a file in the interactive console. +''' +import os +import sys +from pydevconsole import do_exit, InterpreterInterface, process_exec_queue, start_console_server, init_mpl_in_console +from _pydev_imps._pydev_saved_modules import threading + +from _pydev_bundle import pydev_imports +from _pydevd_bundle.pydevd_utils import save_main_module +from _pydev_bundle.pydev_console_utils import StdIn + + +def run_file(file, globals=None, locals=None): + if os.path.isdir(file): + new_target = os.path.join(file, '__main__.py') + if os.path.isfile(new_target): + file = new_target + + if globals is None: + m = save_main_module(file, 'pydev_run_in_console') + + globals = m.__dict__ + try: + globals['__builtins__'] = __builtins__ + except NameError: + pass # Not there on Jython... + + if locals is None: + locals = globals + + sys.path.insert(0, os.path.split(file)[0]) + + print('Running %s'%file) + pydev_imports.execfile(file, globals, locals) # execute the script + + return globals + +#======================================================================================================================= +# main +#======================================================================================================================= +if __name__ == '__main__': + port, client_port = sys.argv[1:3] + + del sys.argv[1] + del sys.argv[1] + + file = sys.argv[1] + + del sys.argv[0] + + from _pydev_bundle import pydev_localhost + + if int(port) == 0 and int(client_port) == 0: + (h, p) = pydev_localhost.get_socket_name() + + client_port = p + + host = pydev_localhost.get_localhost() + + + #replace exit (see comments on method) + #note that this does not work in jython!!! (sys method can't be replaced). + sys.exit = do_exit + + interpreter = InterpreterInterface(host, int(client_port), threading.currentThread()) + + server_thread = threading.Thread(target=start_console_server, + name='ServerThread', + args=(host, int(port), interpreter)) + server_thread.setDaemon(True) + server_thread.start() + + sys.stdin = StdIn(interpreter, host, client_port, sys.stdin) + + init_mpl_in_console(interpreter) + + globals = run_file(file, None, None) + + interpreter.get_namespace().update(globals) + + interpreter.ShowConsole() + + process_exec_queue(interpreter) \ No newline at end of file diff --git a/ptvsd/pydevd/pydev_sitecustomize/__not_in_default_pythonpath.txt b/ptvsd/pydevd/pydev_sitecustomize/__not_in_default_pythonpath.txt new file mode 100644 index 00000000..29cdc5bc --- /dev/null +++ b/ptvsd/pydevd/pydev_sitecustomize/__not_in_default_pythonpath.txt @@ -0,0 +1 @@ +(no __init__.py file) \ No newline at end of file diff --git a/ptvsd/pydevd/pydev_sitecustomize/sitecustomize.py b/ptvsd/pydevd/pydev_sitecustomize/sitecustomize.py new file mode 100644 index 00000000..63a6c370 --- /dev/null +++ b/ptvsd/pydevd/pydev_sitecustomize/sitecustomize.py @@ -0,0 +1,197 @@ +''' + This module will: + - change the input() and raw_input() commands to change \r\n or \r into \n + - execute the user site customize -- if available + - change raw_input() and input() to also remove any trailing \r + + Up to PyDev 3.4 it also was setting the default encoding, but it was removed because of differences when + running from a shell (i.e.: now we just set the PYTHONIOENCODING related to that -- which is properly + treated on Py 2.7 onwards). +''' +DEBUG = 0 #0 or 1 because of jython + +import sys +encoding = None + +IS_PYTHON_3_ONWARDS = 0 + +try: + IS_PYTHON_3_ONWARDS = sys.version_info[0] >= 3 +except: + #That's OK, not all versions of python have sys.version_info + if DEBUG: + import traceback;traceback.print_exc() #@Reimport + +#----------------------------------------------------------------------------------------------------------------------- +#Line buffering +if IS_PYTHON_3_ONWARDS: + #Python 3 has a bug (http://bugs.python.org/issue4705) in which -u doesn't properly make output/input unbuffered + #so, we need to enable that ourselves here. + try: + sys.stdout._line_buffering = True + except: + pass + try: + sys.stderr._line_buffering = True + except: + pass + try: + sys.stdin._line_buffering = True + except: + pass + + +try: + import org.python.core.PyDictionary #@UnresolvedImport @UnusedImport -- just to check if it could be valid + def dict_contains(d, key): + return d.has_key(key) +except: + try: + #Py3k does not have has_key anymore, and older versions don't have __contains__ + dict_contains = dict.__contains__ + except: + try: + dict_contains = dict.has_key + except NameError: + def dict_contains(d, key): + return d.has_key(key) + + +#----------------------------------------------------------------------------------------------------------------------- +#now that we've finished the needed pydev sitecustomize, let's run the default one (if available) + +#Ok, some weirdness going on in Python 3k: when removing this module from the sys.module to import the 'real' +#sitecustomize, all the variables in this scope become None (as if it was garbage-collected), so, the the reference +#below is now being kept to create a cyclic reference so that it neven dies) +__pydev_sitecustomize_module__ = sys.modules.get('sitecustomize') #A ref to this module + + +#remove the pydev site customize (and the pythonpath for it) +paths_removed = [] +try: + for c in sys.path[:]: + #Pydev controls the whole classpath in Jython already, so, we don't want a a duplicate for + #what we've already added there (this is needed to support Jython 2.5b1 onwards -- otherwise, as + #we added the sitecustomize to the pythonpath and to the classpath, we'd have to remove it from the + #classpath too -- and I don't think there's a way to do that... or not?) + if c.find('pydev_sitecustomize') != -1 or c == '__classpath__' or c == '__pyclasspath__' or \ + c == '__classpath__/' or c == '__pyclasspath__/' or c == '__classpath__\\' or c == '__pyclasspath__\\': + sys.path.remove(c) + if c.find('pydev_sitecustomize') == -1: + #We'll re-add any paths removed but the pydev_sitecustomize we added from pydev. + paths_removed.append(c) + + if dict_contains(sys.modules, 'sitecustomize'): + del sys.modules['sitecustomize'] #this module +except: + #print the error... should never happen (so, always show, and not only on debug)! + import traceback;traceback.print_exc() #@Reimport +else: + #Now, execute the default sitecustomize + try: + import sitecustomize #@UnusedImport + sitecustomize.__pydev_sitecustomize_module__ = __pydev_sitecustomize_module__ + except: + pass + + if not dict_contains(sys.modules, 'sitecustomize'): + #If there was no sitecustomize, re-add the pydev sitecustomize (pypy gives a KeyError if it's not there) + sys.modules['sitecustomize'] = __pydev_sitecustomize_module__ + + try: + if paths_removed: + if sys is None: + import sys + if sys is not None: + #And after executing the default sitecustomize, restore the paths (if we didn't remove it before, + #the import sitecustomize would recurse). + sys.path.extend(paths_removed) + except: + #print the error... should never happen (so, always show, and not only on debug)! + import traceback;traceback.print_exc() #@Reimport + + + + +if sys.version_info[0] < 3: + try: + #Redefine input and raw_input only after the original sitecustomize was executed + #(because otherwise, the original raw_input and input would still not be defined) + import __builtin__ + original_raw_input = __builtin__.raw_input + original_input = __builtin__.input + + + def raw_input(prompt=''): + #the original raw_input would only remove a trailing \n, so, at + #this point if we had a \r\n the \r would remain (which is valid for eclipse) + #so, let's remove the remaining \r which python didn't expect. + ret = original_raw_input(prompt) + + if ret.endswith('\r'): + return ret[:-1] + + return ret + raw_input.__doc__ = original_raw_input.__doc__ + + def input(prompt=''): + #input must also be rebinded for using the new raw_input defined + return eval(raw_input(prompt)) + input.__doc__ = original_input.__doc__ + + + __builtin__.raw_input = raw_input + __builtin__.input = input + + except: + #Don't report errors at this stage + if DEBUG: + import traceback;traceback.print_exc() #@Reimport + +else: + try: + import builtins #Python 3.0 does not have the __builtin__ module @UnresolvedImport + original_input = builtins.input + def input(prompt=''): + #the original input would only remove a trailing \n, so, at + #this point if we had a \r\n the \r would remain (which is valid for eclipse) + #so, let's remove the remaining \r which python didn't expect. + ret = original_input(prompt) + + if ret.endswith('\r'): + return ret[:-1] + + return ret + input.__doc__ = original_input.__doc__ + builtins.input = input + except: + #Don't report errors at this stage + if DEBUG: + import traceback;traceback.print_exc() #@Reimport + + + +try: + #The original getpass doesn't work from the eclipse console, so, let's put a replacement + #here (note that it'll not go into echo mode in the console, so, what' the user writes + #will actually be seen) + #Note: same thing from the fix_getpass module -- but we don't want to import it in this + #custom sitecustomize. + def fix_get_pass(): + try: + import getpass + except ImportError: + return #If we can't import it, we can't fix it + import warnings + fallback = getattr(getpass, 'fallback_getpass', None) # >= 2.6 + if not fallback: + fallback = getpass.default_getpass # <= 2.5 + getpass.getpass = fallback + if hasattr(getpass, 'GetPassWarning'): + warnings.simplefilter("ignore", category=getpass.GetPassWarning) + fix_get_pass() + +except: + #Don't report errors at this stage + if DEBUG: + import traceback;traceback.print_exc() #@Reimport diff --git a/ptvsd/pydevd/pydevconsole.py b/ptvsd/pydevd/pydevconsole.py new file mode 100644 index 00000000..e8c58733 --- /dev/null +++ b/ptvsd/pydevd/pydevconsole.py @@ -0,0 +1,516 @@ +''' +Entry point module to start the interactive console. +''' +from _pydev_imps._pydev_saved_modules import thread +from _pydevd_bundle.pydevd_constants import IS_JYTHON, dict_iter_items +start_new_thread = thread.start_new_thread + +try: + from code import InteractiveConsole +except ImportError: + from _pydevd_bundle.pydevconsole_code_for_ironpython import InteractiveConsole + +from code import compile_command +from code import InteractiveInterpreter + +import os +import sys + +from _pydev_imps._pydev_saved_modules import threading +from _pydevd_bundle.pydevd_constants import INTERACTIVE_MODE_AVAILABLE + +import traceback +from _pydev_bundle import fix_getpass +fix_getpass.fix_getpass() + +from _pydevd_bundle import pydevd_vars, pydevd_save_locals + +from _pydev_bundle.pydev_imports import Exec, _queue + +try: + import __builtin__ +except: + import builtins as __builtin__ # @UnresolvedImport + +from _pydev_bundle.pydev_console_utils import BaseInterpreterInterface, BaseStdIn +from _pydev_bundle.pydev_console_utils import CodeFragment + +IS_PYTHON_3_ONWARDS = sys.version_info[0] >= 3 +IS_PY24 = sys.version_info[0] == 2 and sys.version_info[1] == 4 + +class Command: + def __init__(self, interpreter, code_fragment): + """ + :type code_fragment: CodeFragment + :type interpreter: InteractiveConsole + """ + self.interpreter = interpreter + self.code_fragment = code_fragment + self.more = None + + + def symbol_for_fragment(code_fragment): + if code_fragment.is_single_line: + symbol = 'single' + else: + if IS_JYTHON: + symbol = 'single' # Jython doesn't support exec + else: + symbol = 'exec' + return symbol + symbol_for_fragment = staticmethod(symbol_for_fragment) + + def run(self): + text = self.code_fragment.text + symbol = self.symbol_for_fragment(self.code_fragment) + + self.more = self.interpreter.runsource(text, '', symbol) + +try: + try: + execfile #Not in Py3k + except NameError: + from _pydev_bundle.pydev_imports import execfile + + __builtin__.execfile = execfile +except: + pass + +# Pull in runfile, the interface to UMD that wraps execfile +from _pydev_bundle.pydev_umd import runfile, _set_globals_function +if sys.version_info[0] >= 3: + import builtins # @UnresolvedImport + builtins.runfile = runfile +else: + import __builtin__ + __builtin__.runfile = runfile + +#======================================================================================================================= +# InterpreterInterface +#======================================================================================================================= +class InterpreterInterface(BaseInterpreterInterface): + ''' + The methods in this class should be registered in the xml-rpc server. + ''' + + def __init__(self, host, client_port, mainThread, show_banner=True): + BaseInterpreterInterface.__init__(self, mainThread) + self.client_port = client_port + self.host = host + self.namespace = {} + self.interpreter = InteractiveConsole(self.namespace) + self._input_error_printed = False + + + def do_add_exec(self, codeFragment): + command = Command(self.interpreter, codeFragment) + command.run() + return command.more + + + def get_namespace(self): + return self.namespace + + + def getCompletions(self, text, act_tok): + try: + from _pydev_bundle._pydev_completer import Completer + + completer = Completer(self.namespace, None) + return completer.complete(act_tok) + except: + import traceback + + traceback.print_exc() + return [] + + def close(self): + sys.exit(0) + + def get_greeting_msg(self): + return 'PyDev console: starting.\n' + + +class _ProcessExecQueueHelper: + _debug_hook = None + _return_control_osc = False + +def set_debug_hook(debug_hook): + _ProcessExecQueueHelper._debug_hook = debug_hook + + +def init_mpl_in_console(interpreter): + from pydev_ipython.inputhook import set_return_control_callback + + def return_control(): + ''' A function that the inputhooks can call (via inputhook.stdin_ready()) to find + out if they should cede control and return ''' + if _ProcessExecQueueHelper._debug_hook: + # Some of the input hooks check return control without doing + # a single operation, so we don't return True on every + # call when the debug hook is in place to allow the GUI to run + # XXX: Eventually the inputhook code will have diverged enough + # from the IPython source that it will be worthwhile rewriting + # it rather than pretending to maintain the old API + _ProcessExecQueueHelper._return_control_osc = not _ProcessExecQueueHelper._return_control_osc + if _ProcessExecQueueHelper._return_control_osc: + return True + + if not interpreter.exec_queue.empty(): + return True + return False + + set_return_control_callback(return_control) + + if not INTERACTIVE_MODE_AVAILABLE: + return + + from _pydev_bundle.pydev_import_hook import import_hook_manager + from pydev_ipython.matplotlibtools import activate_matplotlib, activate_pylab, activate_pyplot + import_hook_manager.add_module_name("matplotlib", lambda: activate_matplotlib(interpreter.enableGui)) + # enable_gui_function in activate_matplotlib should be called in main thread. That's why we call + # interpreter.enableGui which put it into the interpreter's exec_queue and executes it in the main thread. + import_hook_manager.add_module_name("pylab", activate_pylab) + import_hook_manager.add_module_name("pyplot", activate_pyplot) + + +def process_exec_queue(interpreter): + init_mpl_in_console(interpreter) + from pydev_ipython.inputhook import get_inputhook + + while 1: + # Running the request may have changed the inputhook in use + inputhook = get_inputhook() + + if _ProcessExecQueueHelper._debug_hook: + _ProcessExecQueueHelper._debug_hook() + + if inputhook: + try: + # Note: it'll block here until return_control returns True. + inputhook() + except: + import traceback;traceback.print_exc() + try: + try: + code_fragment = interpreter.exec_queue.get(block=True, timeout=1/20.) # 20 calls/second + except _queue.Empty: + continue + + if callable(code_fragment): + # It can be a callable (i.e.: something that must run in the main + # thread can be put in the queue for later execution). + code_fragment() + else: + more = interpreter.add_exec(code_fragment) + except KeyboardInterrupt: + interpreter.buffer = None + continue + except SystemExit: + raise + except: + type, value, tb = sys.exc_info() + traceback.print_exception(type, value, tb, file=sys.__stderr__) + exit() + + +if 'IPYTHONENABLE' in os.environ: + IPYTHON = os.environ['IPYTHONENABLE'] == 'True' +else: + IPYTHON = True + +try: + try: + exitfunc = sys.exitfunc + except AttributeError: + exitfunc = None + + if IPYTHON: + from _pydev_bundle.pydev_ipython_console import InterpreterInterface + if exitfunc is not None: + sys.exitfunc = exitfunc + else: + try: + delattr(sys, 'exitfunc') + except: + pass +except: + IPYTHON = False + pass + +#======================================================================================================================= +# _DoExit +#======================================================================================================================= +def do_exit(*args): + ''' + We have to override the exit because calling sys.exit will only actually exit the main thread, + and as we're in a Xml-rpc server, that won't work. + ''' + + try: + import java.lang.System + + java.lang.System.exit(1) + except ImportError: + if len(args) == 1: + os._exit(args[0]) + else: + os._exit(0) + + +def handshake(): + return "PyCharm" + + +#======================================================================================================================= +# start_console_server +#======================================================================================================================= +def start_console_server(host, port, interpreter): + if port == 0: + host = '' + + #I.e.: supporting the internal Jython version in PyDev to create a Jython interactive console inside Eclipse. + from _pydev_bundle.pydev_imports import SimpleXMLRPCServer as XMLRPCServer #@Reimport + + try: + if IS_PY24: + server = XMLRPCServer((host, port), logRequests=False) + else: + server = XMLRPCServer((host, port), logRequests=False, allow_none=True) + + except: + sys.stderr.write('Error starting server with host: "%s", port: "%s", client_port: "%s"\n' % (host, port, interpreter.client_port)) + sys.stderr.flush() + raise + + # Tell UMD the proper default namespace + _set_globals_function(interpreter.get_namespace) + + server.register_function(interpreter.execLine) + server.register_function(interpreter.execMultipleLines) + server.register_function(interpreter.getCompletions) + server.register_function(interpreter.getFrame) + server.register_function(interpreter.getVariable) + server.register_function(interpreter.changeVariable) + server.register_function(interpreter.getDescription) + server.register_function(interpreter.close) + server.register_function(interpreter.interrupt) + server.register_function(handshake) + server.register_function(interpreter.connectToDebugger) + server.register_function(interpreter.hello) + server.register_function(interpreter.getArray) + server.register_function(interpreter.evaluate) + server.register_function(interpreter.ShowConsole) + + # Functions for GUI main loop integration + server.register_function(interpreter.enableGui) + + if port == 0: + (h, port) = server.socket.getsockname() + + print(port) + print(interpreter.client_port) + + + sys.stderr.write(interpreter.get_greeting_msg()) + sys.stderr.flush() + + while True: + try: + server.serve_forever() + except: + # Ugly code to be py2/3 compatible + # https://sw-brainwy.rhcloud.com/tracker/PyDev/534: + # Unhandled "interrupted system call" error in the pydevconsol.py + e = sys.exc_info()[1] + retry = False + try: + retry = e.args[0] == 4 #errno.EINTR + except: + pass + if not retry: + raise + # Otherwise, keep on going + return server + + +def start_server(host, port, client_port): + #replace exit (see comments on method) + #note that this does not work in jython!!! (sys method can't be replaced). + sys.exit = do_exit + + interpreter = InterpreterInterface(host, client_port, threading.currentThread()) + + start_new_thread(start_console_server,(host, port, interpreter)) + + process_exec_queue(interpreter) + + +def get_ipython_hidden_vars(): + if IPYTHON and hasattr(__builtin__, 'interpreter'): + interpreter = get_interpreter() + return interpreter.get_ipython_hidden_vars_dict() + + +def get_interpreter(): + try: + interpreterInterface = getattr(__builtin__, 'interpreter') + except AttributeError: + interpreterInterface = InterpreterInterface(None, None, threading.currentThread()) + __builtin__.interpreter = interpreterInterface + sys.stderr.write(interpreterInterface.get_greeting_msg()) + sys.stderr.flush() + + return interpreterInterface + + +def get_completions(text, token, globals, locals): + interpreterInterface = get_interpreter() + + interpreterInterface.interpreter.update(globals, locals) + + return interpreterInterface.getCompletions(text, token) + +#=============================================================================== +# Debugger integration +#=============================================================================== + +def exec_code(code, globals, locals, debugger): + interpreterInterface = get_interpreter() + interpreterInterface.interpreter.update(globals, locals) + + res = interpreterInterface.need_more(code) + + if res: + return True + + interpreterInterface.add_exec(code, debugger) + + return False + + + +class ConsoleWriter(InteractiveInterpreter): + skip = 0 + + def __init__(self, locals=None): + InteractiveInterpreter.__init__(self, locals) + + def write(self, data): + #if (data.find("global_vars") == -1 and data.find("pydevd") == -1): + if self.skip > 0: + self.skip -= 1 + else: + if data == "Traceback (most recent call last):\n": + self.skip = 1 + sys.stderr.write(data) + + def showsyntaxerror(self, filename=None): + """Display the syntax error that just occurred.""" + #Override for avoid using sys.excepthook PY-12600 + type, value, tb = sys.exc_info() + sys.last_type = type + sys.last_value = value + sys.last_traceback = tb + if filename and type is SyntaxError: + # Work hard to stuff the correct filename in the exception + try: + msg, (dummy_filename, lineno, offset, line) = value.args + except ValueError: + # Not the format we expect; leave it alone + pass + else: + # Stuff in the right filename + value = SyntaxError(msg, (filename, lineno, offset, line)) + sys.last_value = value + list = traceback.format_exception_only(type, value) + sys.stderr.write(''.join(list)) + + def showtraceback(self): + """Display the exception that just occurred.""" + #Override for avoid using sys.excepthook PY-12600 + try: + type, value, tb = sys.exc_info() + sys.last_type = type + sys.last_value = value + sys.last_traceback = tb + tblist = traceback.extract_tb(tb) + del tblist[:1] + lines = traceback.format_list(tblist) + if lines: + lines.insert(0, "Traceback (most recent call last):\n") + lines.extend(traceback.format_exception_only(type, value)) + finally: + tblist = tb = None + sys.stderr.write(''.join(lines)) + +def console_exec(thread_id, frame_id, expression, dbg): + """returns 'False' in case expression is partially correct + """ + frame = pydevd_vars.find_frame(thread_id, frame_id) + + is_multiline = expression.count('@LINE@') > 1 + expression = str(expression.replace('@LINE@', '\n')) + + #Not using frame.f_globals because of https://sourceforge.net/tracker2/?func=detail&aid=2541355&group_id=85796&atid=577329 + #(Names not resolved in generator expression in method) + #See message: http://mail.python.org/pipermail/python-list/2009-January/526522.html + updated_globals = {} + updated_globals.update(frame.f_globals) + updated_globals.update(frame.f_locals) #locals later because it has precedence over the actual globals + + if IPYTHON: + need_more = exec_code(CodeFragment(expression), updated_globals, frame.f_locals, dbg) + if not need_more: + pydevd_save_locals.save_locals(frame) + return need_more + + + interpreter = ConsoleWriter() + + if not is_multiline: + try: + code = compile_command(expression) + except (OverflowError, SyntaxError, ValueError): + # Case 1 + interpreter.showsyntaxerror() + return False + if code is None: + # Case 2 + return True + else: + code = expression + + #Case 3 + + try: + Exec(code, updated_globals, frame.f_locals) + + except SystemExit: + raise + except: + interpreter.showtraceback() + else: + pydevd_save_locals.save_locals(frame) + return False + +#======================================================================================================================= +# main +#======================================================================================================================= +if __name__ == '__main__': + #Important: don't use this module directly as the __main__ module, rather, import itself as pydevconsole + #so that we don't get multiple pydevconsole modules if it's executed directly (otherwise we'd have multiple + #representations of its classes). + #See: https://sw-brainwy.rhcloud.com/tracker/PyDev/446: + #'Variables' and 'Expressions' views stopped working when debugging interactive console + import pydevconsole + sys.stdin = pydevconsole.BaseStdIn(sys.stdin) + port, client_port = sys.argv[1:3] + from _pydev_bundle import pydev_localhost + + if int(port) == 0 and int(client_port) == 0: + (h, p) = pydev_localhost.get_socket_name() + + client_port = p + + pydevconsole.start_server(pydev_localhost.get_localhost(), int(port), int(client_port)) diff --git a/ptvsd/pydevd/pydevd.py b/ptvsd/pydevd/pydevd.py new file mode 100644 index 00000000..b9221dd8 --- /dev/null +++ b/ptvsd/pydevd/pydevd.py @@ -0,0 +1,1621 @@ +''' +Entry point module (keep at root): + +This module starts the debugger. +''' +import sys + +if sys.version_info[:2] < (2, 6): + raise RuntimeError('The PyDev.Debugger requires Python 2.6 onwards to be run. If you need to use an older Python version, use an older version of the debugger.') + +import atexit +import os +import traceback + +from _pydevd_bundle.pydevd_constants import IS_JYTH_LESS25, IS_PY3K, IS_PY34_OLDER, get_thread_id, dict_keys, \ + dict_iter_items, DebugInfoHolder, PYTHON_SUSPEND, STATE_SUSPEND, STATE_RUN, get_frame, xrange, \ + clear_cached_thread_id, INTERACTIVE_MODE_AVAILABLE +from _pydev_bundle import fix_getpass +from _pydev_bundle import pydev_imports, pydev_log +from _pydev_bundle._pydev_filesystem_encoding import getfilesystemencoding +from _pydev_bundle.pydev_is_thread_alive import is_thread_alive +from _pydev_imps._pydev_saved_modules import threading +from _pydev_imps._pydev_saved_modules import time +from _pydev_imps._pydev_saved_modules import thread +from _pydevd_bundle import pydevd_io, pydevd_vm_type +import pydevd_tracing +from _pydevd_bundle import pydevd_utils +from _pydevd_bundle import pydevd_vars +from _pydevd_bundle.pydevd_additional_thread_info import PyDBAdditionalThreadInfo +from _pydevd_bundle.pydevd_breakpoints import ExceptionBreakpoint, update_exception_hook +from _pydevd_bundle.pydevd_comm import CMD_SET_BREAK, CMD_SET_NEXT_STATEMENT, CMD_STEP_INTO, CMD_STEP_OVER, \ + CMD_STEP_RETURN, CMD_STEP_INTO_MY_CODE, CMD_THREAD_SUSPEND, CMD_RUN_TO_LINE, \ + CMD_ADD_EXCEPTION_BREAK, CMD_SMART_STEP_INTO, InternalConsoleExec, NetCommandFactory, \ + PyDBDaemonThread, _queue, ReaderThread, GetGlobalDebugger, get_global_debugger, \ + set_global_debugger, WriterThread, pydevd_find_thread_by_id, pydevd_log, \ + start_client, start_server, InternalGetBreakpointException, InternalSendCurrExceptionTrace, \ + InternalSendCurrExceptionTraceProceeded +from _pydevd_bundle.pydevd_custom_frames import CustomFramesContainer, custom_frames_container_init +from _pydevd_bundle.pydevd_frame_utils import add_exception_to_frame +from _pydevd_bundle.pydevd_kill_all_pydevd_threads import kill_all_pydev_threads +from _pydevd_bundle.pydevd_trace_dispatch import trace_dispatch as _trace_dispatch, global_cache_skips, global_cache_frame_skips +from _pydevd_frame_eval.pydevd_frame_eval_main import frame_eval_func, stop_frame_eval, enable_cache_frames_without_breaks, dummy_trace_dispatch +from _pydevd_bundle.pydevd_utils import save_main_module +from pydevd_concurrency_analyser.pydevd_concurrency_logger import ThreadingLogger, AsyncioLogger, send_message, cur_time +from pydevd_concurrency_analyser.pydevd_thread_wrappers import wrap_threads + + +__version_info__ = (1, 1, 1) +__version_info_str__ = [] +for v in __version_info__: + __version_info_str__.append(str(v)) + +__version__ = '.'.join(__version_info_str__) + +#IMPORTANT: pydevd_constants must be the 1st thing defined because it'll keep a reference to the original sys._getframe + + + + + + + +SUPPORT_PLUGINS = not IS_JYTH_LESS25 +PluginManager = None +if SUPPORT_PLUGINS: + from _pydevd_bundle.pydevd_plugin_utils import PluginManager + + +threadingEnumerate = threading.enumerate +threadingCurrentThread = threading.currentThread + +try: + 'dummy'.encode('utf-8') # Added because otherwise Jython 2.2.1 wasn't finding the encoding (if it wasn't loaded in the main thread). +except: + pass + + +connected = False +bufferStdOutToServer = False +bufferStdErrToServer = False +remote = False +forked = False + +file_system_encoding = getfilesystemencoding() + + +#======================================================================================================================= +# PyDBCommandThread +#======================================================================================================================= +class PyDBCommandThread(PyDBDaemonThread): + + def __init__(self, py_db): + PyDBDaemonThread.__init__(self) + self._py_db_command_thread_event = py_db._py_db_command_thread_event + self.py_db = py_db + self.setName('pydevd.CommandThread') + + def _on_run(self): + for i in xrange(1, 10): + time.sleep(0.5) #this one will only start later on (because otherwise we may not have any non-daemon threads + if self.killReceived: + return + + if self.pydev_do_not_trace: + self.py_db.SetTrace(None) # no debugging on this thread + + try: + while not self.killReceived: + try: + self.py_db.process_internal_commands() + except: + pydevd_log(0, 'Finishing debug communication...(2)') + self._py_db_command_thread_event.clear() + self._py_db_command_thread_event.wait(0.5) + except: + pydev_log.debug(sys.exc_info()[0]) + + #only got this error in interpreter shutdown + #pydevd_log(0, 'Finishing debug communication...(3)') + + + +#======================================================================================================================= +# CheckOutputThread +# Non-daemonic thread guaranties that all data is written even if program is finished +#======================================================================================================================= +class CheckOutputThread(PyDBDaemonThread): + + def __init__(self, py_db): + PyDBDaemonThread.__init__(self) + self.py_db = py_db + self.setName('pydevd.CheckAliveThread') + self.daemon = False + py_db.output_checker = self + + def _on_run(self): + if self.pydev_do_not_trace: + + disable_tracing = True + + if pydevd_vm_type.get_vm_type() == pydevd_vm_type.PydevdVmType.JYTHON and sys.hexversion <= 0x020201f0: + # don't run untraced threads if we're in jython 2.2.1 or lower + # jython bug: if we start a thread and another thread changes the tracing facility + # it affects other threads (it's not set only for the thread but globally) + # Bug: http://sourceforge.net/tracker/index.php?func=detail&aid=1870039&group_id=12867&atid=112867 + disable_tracing = False + + if disable_tracing: + pydevd_tracing.SetTrace(None) # no debugging on this thread + + while not self.killReceived: + time.sleep(0.3) + if not self.py_db.has_threads_alive() and self.py_db.writer.empty() \ + and not has_data_to_redirect(): + try: + pydev_log.debug("No alive threads, finishing debug session") + self.py_db.finish_debugging_session() + kill_all_pydev_threads() + except: + traceback.print_exc() + + self.killReceived = True + + self.py_db.check_output_redirect() + + + def do_kill_pydev_thread(self): + self.killReceived = True + + + +#======================================================================================================================= +# PyDB +#======================================================================================================================= +class PyDB: + """ Main debugging class + Lots of stuff going on here: + + PyDB starts two threads on startup that connect to remote debugger (RDB) + The threads continuously read & write commands to RDB. + PyDB communicates with these threads through command queues. + Every RDB command is processed by calling process_net_command. + Every PyDB net command is sent to the net by posting NetCommand to WriterThread queue + + Some commands need to be executed on the right thread (suspend/resume & friends) + These are placed on the internal command queue. + """ + + + def __init__(self): + set_global_debugger(self) + pydevd_tracing.replace_sys_set_trace_func() + self.reader = None + self.writer = None + self.output_checker = None + self.quitting = None + self.cmd_factory = NetCommandFactory() + self._cmd_queue = {} # the hash of Queues. Key is thread id, value is thread + + self.breakpoints = {} + + self.file_to_id_to_line_breakpoint = {} + self.file_to_id_to_plugin_breakpoint = {} + + # Note: breakpoints dict should not be mutated: a copy should be created + # and later it should be assigned back (to prevent concurrency issues). + self.break_on_uncaught_exceptions = {} + self.break_on_caught_exceptions = {} + + self.ready_to_run = False + self._main_lock = thread.allocate_lock() + self._lock_running_thread_ids = thread.allocate_lock() + self._py_db_command_thread_event = threading.Event() + CustomFramesContainer._py_db_command_thread_event = self._py_db_command_thread_event + self._finish_debugging_session = False + self._termination_event_set = False + self.signature_factory = None + self.SetTrace = pydevd_tracing.SetTrace + self.break_on_exceptions_thrown_in_same_context = False + self.ignore_exceptions_thrown_in_lines_with_ignore_exception = True + + # Suspend debugger even if breakpoint condition raises an exception + SUSPEND_ON_BREAKPOINT_EXCEPTION = True + self.suspend_on_breakpoint_exception = SUSPEND_ON_BREAKPOINT_EXCEPTION + + # By default user can step into properties getter/setter/deleter methods + self.disable_property_trace = False + self.disable_property_getter_trace = False + self.disable_property_setter_trace = False + self.disable_property_deleter_trace = False + + #this is a dict of thread ids pointing to thread ids. Whenever a command is passed to the java end that + #acknowledges that a thread was created, the thread id should be passed here -- and if at some time we do not + #find that thread alive anymore, we must remove it from this list and make the java side know that the thread + #was killed. + self._running_thread_ids = {} + self._set_breakpoints_with_id = False + + # This attribute holds the file-> lines which have an @IgnoreException. + self.filename_to_lines_where_exceptions_are_ignored = {} + + #working with plugins (lazily initialized) + self.plugin = None + self.has_plugin_line_breaks = False + self.has_plugin_exception_breaks = False + self.thread_analyser = None + self.asyncio_analyser = None + + # matplotlib support in debugger and debug console + self.mpl_in_use = False + self.mpl_hooks_in_debug_console = False + self.mpl_modules_for_patching = {} + + self._filename_to_not_in_scope = {} + self.first_breakpoint_reached = False + self.is_filter_enabled = pydevd_utils.is_filter_enabled() + self.is_filter_libraries = pydevd_utils.is_filter_libraries() + self.show_return_values = False + self.remove_return_values_flag = False + + # this flag disables frame evaluation even if it's available + self.do_not_use_frame_eval = False + + def get_plugin_lazy_init(self): + if self.plugin is None and SUPPORT_PLUGINS: + self.plugin = PluginManager(self) + return self.plugin + + def not_in_scope(self, filename): + return pydevd_utils.not_in_project_roots(filename) + + def is_ignored_by_filters(self, filename): + return pydevd_utils.is_ignored_by_filter(filename) + + def first_appearance_in_scope(self, trace): + if trace is None or self.not_in_scope(trace.tb_frame.f_code.co_filename): + return False + else: + trace = trace.tb_next + while trace is not None: + frame = trace.tb_frame + if not self.not_in_scope(frame.f_code.co_filename): + return False + trace = trace.tb_next + return True + + def has_threads_alive(self): + for t in threadingEnumerate(): + if getattr(t, 'is_pydev_daemon_thread', False): + #Important: Jython 2.5rc4 has a bug where a thread created with thread.start_new_thread won't be + #set as a daemon thread, so, we also have to check for the 'is_pydev_daemon_thread' flag. + #See: https://github.com/fabioz/PyDev.Debugger/issues/11 + continue + + if isinstance(t, PyDBDaemonThread): + pydev_log.error_once( + 'Error in debugger: Found PyDBDaemonThread not marked with is_pydev_daemon_thread=True.\n') + + if is_thread_alive(t): + if not t.isDaemon() or hasattr(t, "__pydevd_main_thread"): + return True + + return False + + def finish_debugging_session(self): + self._finish_debugging_session = True + + + def initialize_network(self, sock): + try: + sock.settimeout(None) # infinite, no timeouts from now on - jython does not have it + except: + pass + self.writer = WriterThread(sock) + self.reader = ReaderThread(sock) + self.writer.start() + self.reader.start() + + time.sleep(0.1) # give threads time to start + + def connect(self, host, port): + if host: + s = start_client(host, port) + else: + s = start_server(port) + + self.initialize_network(s) + + + def get_internal_queue(self, thread_id): + """ returns internal command queue for a given thread. + if new queue is created, notify the RDB about it """ + if thread_id.startswith('__frame__'): + thread_id = thread_id[thread_id.rfind('|') + 1:] + try: + return self._cmd_queue[thread_id] + except KeyError: + return self._cmd_queue.setdefault(thread_id, _queue.Queue()) #@UndefinedVariable + + + def post_internal_command(self, int_cmd, thread_id): + """ if thread_id is *, post to all """ + if thread_id == "*": + threads = threadingEnumerate() + for t in threads: + thread_id = get_thread_id(t) + queue = self.get_internal_queue(thread_id) + queue.put(int_cmd) + + else: + queue = self.get_internal_queue(thread_id) + queue.put(int_cmd) + + def check_output_redirect(self): + global bufferStdOutToServer + global bufferStdErrToServer + + if bufferStdOutToServer: + init_stdout_redirect() + self.check_output(sys.stdoutBuf, 1) #@UndefinedVariable + + if bufferStdErrToServer: + init_stderr_redirect() + self.check_output(sys.stderrBuf, 2) #@UndefinedVariable + + def check_output(self, out, outCtx): + '''Checks the output to see if we have to send some buffered output to the debug server + + @param out: sys.stdout or sys.stderr + @param outCtx: the context indicating: 1=stdout and 2=stderr (to know the colors to write it) + ''' + + try: + v = out.getvalue() + + if v: + self.cmd_factory.make_io_message(v, outCtx, self) + except: + traceback.print_exc() + + + def init_matplotlib_in_debug_console(self): + # import hook and patches for matplotlib support in debug console + from _pydev_bundle.pydev_import_hook import import_hook_manager + for module in dict_keys(self.mpl_modules_for_patching): + import_hook_manager.add_module_name(module, self.mpl_modules_for_patching.pop(module)) + + def init_matplotlib_support(self): + # prepare debugger for integration with matplotlib GUI event loop + from pydev_ipython.matplotlibtools import activate_matplotlib, activate_pylab, activate_pyplot, do_enable_gui + # enable_gui_function in activate_matplotlib should be called in main thread. Unlike integrated console, + # in the debug console we have no interpreter instance with exec_queue, but we run this code in the main + # thread and can call it directly. + class _MatplotlibHelper: + _return_control_osc = False + + def return_control(): + # Some of the input hooks (e.g. Qt4Agg) check return control without doing + # a single operation, so we don't return True on every + # call when the debug hook is in place to allow the GUI to run + _MatplotlibHelper._return_control_osc = not _MatplotlibHelper._return_control_osc + return _MatplotlibHelper._return_control_osc + + from pydev_ipython.inputhook import set_return_control_callback + set_return_control_callback(return_control) + + self.mpl_modules_for_patching = {"matplotlib": lambda: activate_matplotlib(do_enable_gui), + "matplotlib.pyplot": activate_pyplot, + "pylab": activate_pylab } + + def _activate_mpl_if_needed(self): + if len(self.mpl_modules_for_patching) > 0: + for module in dict_keys(self.mpl_modules_for_patching): + if module in sys.modules: + activate_function = self.mpl_modules_for_patching.pop(module) + activate_function() + self.mpl_in_use = True + + def _call_mpl_hook(self): + try: + from pydev_ipython.inputhook import get_inputhook + inputhook = get_inputhook() + if inputhook: + inputhook() + except: + pass + + def suspend_all_other_threads(self, thread_suspended_at_bp): + all_threads = threadingEnumerate() + for t in all_threads: + if getattr(t, 'is_pydev_daemon_thread', False): + pass # I.e.: skip the DummyThreads created from pydev daemon threads + elif hasattr(t, 'pydev_do_not_trace'): + pass # skip some other threads, i.e. ipython history saving thread from debug console + else: + if t is thread_suspended_at_bp: + continue + additional_info = None + try: + additional_info = t.additional_info + except AttributeError: + pass # that's ok, no info currently set + + if additional_info is not None: + for frame in additional_info.iter_frames(t): + self.set_trace_for_frame_and_parents(frame, overwrite_prev_trace=True) + del frame + + self.set_suspend(t, CMD_THREAD_SUSPEND) + else: + sys.stderr.write("Can't suspend thread: %s\n" % (t,)) + + def process_internal_commands(self): + '''This function processes internal commands + ''' + self._main_lock.acquire() + try: + + self.check_output_redirect() + + curr_thread_id = get_thread_id(threadingCurrentThread()) + program_threads_alive = {} + all_threads = threadingEnumerate() + program_threads_dead = [] + self._lock_running_thread_ids.acquire() + try: + for t in all_threads: + if getattr(t, 'is_pydev_daemon_thread', False): + pass # I.e.: skip the DummyThreads created from pydev daemon threads + elif isinstance(t, PyDBDaemonThread): + pydev_log.error_once('Error in debugger: Found PyDBDaemonThread not marked with is_pydev_daemon_thread=True.\n') + + elif is_thread_alive(t): + if not self._running_thread_ids: + # Fix multiprocessing debug with breakpoints in both main and child processes + # (https://youtrack.jetbrains.com/issue/PY-17092) When the new process is created, the main + # thread in the new process already has the attribute 'pydevd_id', so the new thread doesn't + # get new id with its process number and the debugger loses access to both threads. + # Therefore we should update thread_id for every main thread in the new process. + + # TODO: Investigate: should we do this for all threads in threading.enumerate()? + # (i.e.: if a fork happens on Linux, this seems likely). + old_thread_id = get_thread_id(t) + if old_thread_id != 'console_main': + # The console_main is a special thread id used in the console and its id should never be reset + # (otherwise we may no longer be able to get its variables -- see: https://www.brainwy.com/tracker/PyDev/776). + clear_cached_thread_id(t) + clear_cached_thread_id(threadingCurrentThread()) + + thread_id = get_thread_id(t) + curr_thread_id = get_thread_id(threadingCurrentThread()) + if pydevd_vars.has_additional_frames_by_id(old_thread_id): + frames_by_id = pydevd_vars.get_additional_frames_by_id(old_thread_id) + pydevd_vars.add_additional_frame_by_id(thread_id, frames_by_id) + else: + thread_id = get_thread_id(t) + program_threads_alive[thread_id] = t + + if thread_id not in self._running_thread_ids: + if not hasattr(t, 'additional_info'): + # see http://sourceforge.net/tracker/index.php?func=detail&aid=1955428&group_id=85796&atid=577329 + # Let's create the additional info right away! + t.additional_info = PyDBAdditionalThreadInfo() + self._running_thread_ids[thread_id] = t + self.writer.add_command(self.cmd_factory.make_thread_created_message(t)) + + + queue = self.get_internal_queue(thread_id) + cmdsToReadd = [] # some commands must be processed by the thread itself... if that's the case, + # we will re-add the commands to the queue after executing. + try: + while True: + int_cmd = queue.get(False) + + if not self.mpl_hooks_in_debug_console and isinstance(int_cmd, InternalConsoleExec): + # add import hooks for matplotlib patches if only debug console was started + try: + self.init_matplotlib_in_debug_console() + self.mpl_in_use = True + except: + pydevd_log(2, "Matplotlib support in debug console failed", traceback.format_exc()) + self.mpl_hooks_in_debug_console = True + + if int_cmd.can_be_executed_by(curr_thread_id): + pydevd_log(2, "processing internal command ", str(int_cmd)) + int_cmd.do_it(self) + else: + pydevd_log(2, "NOT processing internal command ", str(int_cmd)) + cmdsToReadd.append(int_cmd) + + + except _queue.Empty: #@UndefinedVariable + for int_cmd in cmdsToReadd: + queue.put(int_cmd) + # this is how we exit + + + thread_ids = list(self._running_thread_ids.keys()) + for tId in thread_ids: + if tId not in program_threads_alive: + program_threads_dead.append(tId) + finally: + self._lock_running_thread_ids.release() + + for tId in program_threads_dead: + try: + self._process_thread_not_alive(tId) + except: + sys.stderr.write('Error iterating through %s (%s) - %s\n' % ( + program_threads_alive, program_threads_alive.__class__, dir(program_threads_alive))) + raise + + + if len(program_threads_alive) == 0: + self.finish_debugging_session() + for t in all_threads: + if hasattr(t, 'do_kill_pydev_thread'): + t.do_kill_pydev_thread() + + finally: + self._main_lock.release() + + def disable_tracing_while_running_if_frame_eval(self): + pydevd_tracing.settrace_while_running_if_frame_eval(self, self.dummy_trace_dispatch) + + def enable_tracing_in_frames_while_running_if_frame_eval(self): + pydevd_tracing.settrace_while_running_if_frame_eval(self, self.trace_dispatch) + + def set_tracing_for_untraced_contexts_if_not_frame_eval(self, ignore_frame=None, overwrite_prev_trace=False): + if self.frame_eval_func is not None: + return + self.set_tracing_for_untraced_contexts(ignore_frame, overwrite_prev_trace) + + def set_tracing_for_untraced_contexts(self, ignore_frame=None, overwrite_prev_trace=False): + # Enable the tracing for existing threads (because there may be frames being executed that + # are currently untraced). + if self.frame_eval_func is not None: + return + threads = threadingEnumerate() + try: + for t in threads: + if getattr(t, 'is_pydev_daemon_thread', False): + continue + + # TODO: optimize so that we only actually add that tracing if it's in + # the new breakpoint context. + additional_info = None + try: + additional_info = t.additional_info + except AttributeError: + pass # that's ok, no info currently set + + if additional_info is not None: + for frame in additional_info.iter_frames(t): + if frame is not ignore_frame: + self.set_trace_for_frame_and_parents(frame, overwrite_prev_trace=overwrite_prev_trace) + finally: + frame = None + t = None + threads = None + additional_info = None + + + def consolidate_breakpoints(self, file, id_to_breakpoint, breakpoints): + break_dict = {} + for breakpoint_id, pybreakpoint in dict_iter_items(id_to_breakpoint): + break_dict[pybreakpoint.line] = pybreakpoint + + breakpoints[file] = break_dict + global_cache_skips.clear() + global_cache_frame_skips.clear() + + def add_break_on_exception( + self, + exception, + notify_always, + notify_on_terminate, + notify_on_first_raise_only, + ignore_libraries=False + ): + try: + eb = ExceptionBreakpoint( + exception, + notify_always, + notify_on_terminate, + notify_on_first_raise_only, + ignore_libraries + ) + except ImportError: + pydev_log.error("Error unable to add break on exception for: %s (exception could not be imported)\n" % (exception,)) + return None + + if eb.notify_on_terminate: + cp = self.break_on_uncaught_exceptions.copy() + cp[exception] = eb + if DebugInfoHolder.DEBUG_TRACE_BREAKPOINTS > 0: + pydev_log.error("Exceptions to hook on terminate: %s\n" % (cp,)) + self.break_on_uncaught_exceptions = cp + + if eb.notify_always: + cp = self.break_on_caught_exceptions.copy() + cp[exception] = eb + if DebugInfoHolder.DEBUG_TRACE_BREAKPOINTS > 0: + pydev_log.error("Exceptions to hook always: %s\n" % (cp,)) + self.break_on_caught_exceptions = cp + + return eb + + def update_after_exceptions_added(self, added): + updated_on_caught = False + updated_on_uncaught = False + + for eb in added: + if not updated_on_uncaught and eb.notify_on_terminate: + updated_on_uncaught = True + update_exception_hook(self) + + if not updated_on_caught and eb.notify_always: + updated_on_caught = True + self.set_tracing_for_untraced_contexts_if_not_frame_eval() + + def _process_thread_not_alive(self, threadId): + """ if thread is not alive, cancel trace_dispatch processing """ + self._lock_running_thread_ids.acquire() + try: + thread = self._running_thread_ids.pop(threadId, None) + if thread is None: + return + + wasNotified = thread.additional_info.pydev_notify_kill + if not wasNotified: + thread.additional_info.pydev_notify_kill = True + + finally: + self._lock_running_thread_ids.release() + + cmd = self.cmd_factory.make_thread_killed_message(threadId) + self.writer.add_command(cmd) + + + def set_suspend(self, thread, stop_reason): + thread.additional_info.suspend_type = PYTHON_SUSPEND + thread.additional_info.pydev_state = STATE_SUSPEND + thread.stop_reason = stop_reason + + # If conditional breakpoint raises any exception during evaluation send details to Java + if stop_reason == CMD_SET_BREAK and self.suspend_on_breakpoint_exception: + self._send_breakpoint_condition_exception(thread) + + + def _send_breakpoint_condition_exception(self, thread): + """If conditional breakpoint raises an exception during evaluation + send exception details to java + """ + thread_id = get_thread_id(thread) + conditional_breakpoint_exception_tuple = thread.additional_info.conditional_breakpoint_exception + # conditional_breakpoint_exception_tuple - should contain 2 values (exception_type, stacktrace) + if conditional_breakpoint_exception_tuple and len(conditional_breakpoint_exception_tuple) == 2: + exc_type, stacktrace = conditional_breakpoint_exception_tuple + int_cmd = InternalGetBreakpointException(thread_id, exc_type, stacktrace) + # Reset the conditional_breakpoint_exception details to None + thread.additional_info.conditional_breakpoint_exception = None + self.post_internal_command(int_cmd, thread_id) + + + def send_caught_exception_stack(self, thread, arg, curr_frame_id): + """Sends details on the exception which was caught (and where we stopped) to the java side. + + arg is: exception type, description, traceback object + """ + thread_id = get_thread_id(thread) + int_cmd = InternalSendCurrExceptionTrace(thread_id, arg, curr_frame_id) + self.post_internal_command(int_cmd, thread_id) + + + def send_caught_exception_stack_proceeded(self, thread): + """Sends that some thread was resumed and is no longer showing an exception trace. + """ + thread_id = get_thread_id(thread) + int_cmd = InternalSendCurrExceptionTraceProceeded(thread_id) + self.post_internal_command(int_cmd, thread_id) + self.process_internal_commands() + + + def send_process_created_message(self): + """Sends a message that a new process has been created. + """ + cmd = self.cmd_factory.make_process_created_message() + self.writer.add_command(cmd) + + + def do_wait_suspend(self, thread, frame, event, arg, suspend_type="trace"): #@UnusedVariable + """ busy waits until the thread state changes to RUN + it expects thread's state as attributes of the thread. + Upon running, processes any outstanding Stepping commands. + """ + self.process_internal_commands() + + message = thread.additional_info.pydev_message + + cmd = self.cmd_factory.make_thread_suspend_message(get_thread_id(thread), frame, thread.stop_reason, message, suspend_type) + self.writer.add_command(cmd) + + CustomFramesContainer.custom_frames_lock.acquire() # @UndefinedVariable + try: + from_this_thread = [] + + for frame_id, custom_frame in dict_iter_items(CustomFramesContainer.custom_frames): + if custom_frame.thread_id == thread.ident: + # print >> sys.stderr, 'Frame created: ', frame_id + self.writer.add_command(self.cmd_factory.make_custom_frame_created_message(frame_id, custom_frame.name)) + self.writer.add_command(self.cmd_factory.make_thread_suspend_message(frame_id, custom_frame.frame, CMD_THREAD_SUSPEND, "", suspend_type)) + + from_this_thread.append(frame_id) + + finally: + CustomFramesContainer.custom_frames_lock.release() # @UndefinedVariable + + imported = False + info = thread.additional_info + + if info.pydev_state == STATE_SUSPEND and not self._finish_debugging_session: + # before every stop check if matplotlib modules were imported inside script code + self._activate_mpl_if_needed() + + while info.pydev_state == STATE_SUSPEND and not self._finish_debugging_session: + if self.mpl_in_use: + # call input hooks if only matplotlib is in use + self._call_mpl_hook() + + self.process_internal_commands() + time.sleep(0.01) + + # process any stepping instructions + if info.pydev_step_cmd == CMD_STEP_INTO or info.pydev_step_cmd == CMD_STEP_INTO_MY_CODE: + info.pydev_step_stop = None + info.pydev_smart_step_stop = None + + elif info.pydev_step_cmd == CMD_STEP_OVER: + info.pydev_step_stop = frame + info.pydev_smart_step_stop = None + self.set_trace_for_frame_and_parents(frame) + + elif info.pydev_step_cmd == CMD_SMART_STEP_INTO: + self.set_trace_for_frame_and_parents(frame) + info.pydev_step_stop = None + info.pydev_smart_step_stop = frame + + elif info.pydev_step_cmd == CMD_RUN_TO_LINE or info.pydev_step_cmd == CMD_SET_NEXT_STATEMENT : + self.set_trace_for_frame_and_parents(frame) + + if event == 'line' or event == 'exception': + #If we're already in the correct context, we have to stop it now, because we can act only on + #line events -- if a return was the next statement it wouldn't work (so, we have this code + #repeated at pydevd_frame). + stop = False + curr_func_name = frame.f_code.co_name + + #global context is set with an empty name + if curr_func_name in ('?', ''): + curr_func_name = '' + + if curr_func_name == info.pydev_func_name: + line = info.pydev_next_line + if frame.f_lineno == line: + stop = True + else : + if frame.f_trace is None: + frame.f_trace = self.trace_dispatch + frame.f_lineno = line + frame.f_trace = None + stop = True + if stop: + info.pydev_state = STATE_SUSPEND + self.do_wait_suspend(thread, frame, event, arg, "trace") + return + + + elif info.pydev_step_cmd == CMD_STEP_RETURN: + back_frame = frame.f_back + if back_frame is not None: + # steps back to the same frame (in a return call it will stop in the 'back frame' for the user) + info.pydev_step_stop = frame + self.set_trace_for_frame_and_parents(frame) + else: + # No back frame?!? -- this happens in jython when we have some frame created from an awt event + # (the previous frame would be the awt event, but this doesn't make part of 'jython', only 'java') + # so, if we're doing a step return in this situation, it's the same as just making it run + info.pydev_step_stop = None + info.pydev_step_cmd = -1 + info.pydev_state = STATE_RUN + + if self.frame_eval_func is not None and info.pydev_state == STATE_RUN: + if info.pydev_step_cmd == -1: + if not self.do_not_use_frame_eval: + self.SetTrace(self.dummy_trace_dispatch) + self.set_trace_for_frame_and_parents(frame, overwrite_prev_trace=True, dispatch_func=dummy_trace_dispatch) + else: + if info.pydev_step_cmd == CMD_STEP_INTO or info.pydev_step_cmd == CMD_STEP_INTO_MY_CODE: + self.set_trace_for_frame_and_parents(frame) + # enable old tracing function for stepping + self.SetTrace(self.trace_dispatch) + + del frame + cmd = self.cmd_factory.make_thread_run_message(get_thread_id(thread), info.pydev_step_cmd) + self.writer.add_command(cmd) + + CustomFramesContainer.custom_frames_lock.acquire() # @UndefinedVariable + try: + # The ones that remained on last_running must now be removed. + for frame_id in from_this_thread: + # print >> sys.stderr, 'Removing created frame: ', frame_id + self.writer.add_command(self.cmd_factory.make_thread_killed_message(frame_id)) + + finally: + CustomFramesContainer.custom_frames_lock.release() # @UndefinedVariable + + def handle_post_mortem_stop(self, thread, frame, frames_byid, exception): + pydev_log.debug("We are stopping in post-mortem\n") + thread_id = get_thread_id(thread) + pydevd_vars.add_additional_frame_by_id(thread_id, frames_byid) + try: + try: + add_exception_to_frame(frame, exception) + self.set_suspend(thread, CMD_ADD_EXCEPTION_BREAK) + self.do_wait_suspend(thread, frame, 'exception', None, "trace") + except: + pydev_log.error("We've got an error while stopping in post-mortem: %s\n"%sys.exc_info()[0]) + finally: + pydevd_vars.remove_additional_frame_by_id(thread_id) + + + def set_trace_for_frame_and_parents(self, frame, also_add_to_passed_frame=True, overwrite_prev_trace=False, dispatch_func=None): + if dispatch_func is None: + dispatch_func = self.trace_dispatch + + if also_add_to_passed_frame: + self.update_trace(frame, dispatch_func, overwrite_prev_trace) + + frame = frame.f_back + while frame: + self.update_trace(frame, dispatch_func, overwrite_prev_trace) + + frame = frame.f_back + del frame + + def update_trace(self, frame, dispatch_func, overwrite_prev): + if frame.f_trace is None: + frame.f_trace = dispatch_func + else: + if overwrite_prev: + frame.f_trace = dispatch_func + else: + try: + #If it's the trace_exception, go back to the frame trace dispatch! + if frame.f_trace.im_func.__name__ == 'trace_exception': + frame.f_trace = frame.f_trace.im_self.trace_dispatch + except AttributeError: + pass + frame = frame.f_back + del frame + + def prepare_to_run(self): + ''' Shared code to prepare debugging by installing traces and registering threads ''' + self.patch_threads() + pydevd_tracing.SetTrace(self.trace_dispatch, self.frame_eval_func, self.dummy_trace_dispatch) + # There is no need to set tracing function if frame evaluation is available. Moreover, there is no need to patch thread + # functions, because frame evaluation function is set to all threads by default. + + PyDBCommandThread(self).start() + if self.signature_factory is not None or self.thread_analyser is not None: + # we need all data to be sent to IDE even after program finishes + CheckOutputThread(self).start() + + + def patch_threads(self): + try: + # not available in jython! + import threading + threading.settrace(self.trace_dispatch) # for all future threads + except: + pass + + from _pydev_bundle.pydev_monkey import patch_thread_modules + patch_thread_modules() + + def get_fullname(self, mod_name): + if IS_PY3K: + import pkgutil + else: + from _pydev_imps import _pydev_pkgutil_old as pkgutil + try: + loader = pkgutil.get_loader(mod_name) + except: + return None + if loader is not None: + for attr in ("get_filename", "_get_filename"): + meth = getattr(loader, attr, None) + if meth is not None: + return meth(mod_name) + return None + + def run(self, file, globals=None, locals=None, is_module=False, set_trace=True): + module_name = None + if is_module: + file, _, entry_point_fn = file.partition(':') + module_name = file + filename = self.get_fullname(file) + if filename is None: + sys.stderr.write("No module named %s\n" % file) + return + else: + file = filename + + if os.path.isdir(file): + new_target = os.path.join(file, '__main__.py') + if os.path.isfile(new_target): + file = new_target + + if globals is None: + m = save_main_module(file, 'pydevd') + globals = m.__dict__ + try: + globals['__builtins__'] = __builtins__ + except NameError: + pass # Not there on Jython... + + if locals is None: + locals = globals + + if set_trace: + # Predefined (writable) attributes: __name__ is the module's name; + # __doc__ is the module's documentation string, or None if unavailable; + # __file__ is the pathname of the file from which the module was loaded, + # if it was loaded from a file. The __file__ attribute is not present for + # C modules that are statically linked into the interpreter; for extension modules + # loaded dynamically from a shared library, it is the pathname of the shared library file. + + + # I think this is an ugly hack, bug it works (seems to) for the bug that says that sys.path should be the same in + # debug and run. + if m.__file__.startswith(sys.path[0]): + # print >> sys.stderr, 'Deleting: ', sys.path[0] + del sys.path[0] + + if not is_module: + # now, the local directory has to be added to the pythonpath + # sys.path.insert(0, os.getcwd()) + # Changed: it's not the local directory, but the directory of the file launched + # The file being run must be in the pythonpath (even if it was not before) + sys.path.insert(0, os.path.split(file)[0]) + + while not self.ready_to_run: + time.sleep(0.1) # busy wait until we receive run command + + if self.break_on_caught_exceptions or (self.plugin and self.plugin.has_exception_breaks()) or self.signature_factory: + # disable frame evaluation if there are exception breakpoints with 'On raise' activation policy + # or if there are plugin exception breakpoints or if collecting run-time types is enabled + self.frame_eval_func = None + + # call prepare_to_run when we already have all information about breakpoints + self.prepare_to_run() + + if self.thread_analyser is not None: + wrap_threads() + t = threadingCurrentThread() + self.thread_analyser.set_start_time(cur_time()) + send_message("threading_event", 0, t.getName(), get_thread_id(t), "thread", "start", file, 1, None, parent=get_thread_id(t)) + + if self.asyncio_analyser is not None: + # we don't have main thread in asyncio graph, so we should add a fake event + send_message("asyncio_event", 0, "Task", "Task", "thread", "stop", file, 1, frame=None, parent=None) + + try: + if INTERACTIVE_MODE_AVAILABLE: + self.init_matplotlib_support() + except: + sys.stderr.write("Matplotlib support in debugger failed\n") + traceback.print_exc() + + if not is_module: + pydev_imports.execfile(file, globals, locals) # execute the script + else: + # treat ':' as a seperator between module and entry point function + # if there is no entry point we run we same as with -m switch. Otherwise we perform + # an import and execute the entry point + if entry_point_fn: + mod = __import__(module_name, level=0, fromlist=[entry_point_fn], globals=globals, locals=locals) + func = getattr(mod, entry_point_fn) + func() + else: + # Run with the -m switch + import runpy + if hasattr(runpy, '_run_module_as_main'): + # Newer versions of Python actually use this when the -m switch is used. + if sys.version_info[:2] <= (2, 6): + runpy._run_module_as_main(module_name, set_argv0=False) + else: + runpy._run_module_as_main(module_name, alter_argv=False) + else: + runpy.run_module(module_name) + return globals + + def exiting(self): + sys.stdout.flush() + sys.stderr.flush() + self.check_output_redirect() + cmd = self.cmd_factory.make_exit_message() + self.writer.add_command(cmd) + + def wait_for_commands(self, globals): + self._activate_mpl_if_needed() + + thread = threading.currentThread() + from _pydevd_bundle import pydevd_frame_utils + frame = pydevd_frame_utils.Frame(None, -1, pydevd_frame_utils.FCode("Console", + os.path.abspath(os.path.dirname(__file__))), globals, globals) + thread_id = get_thread_id(thread) + pydevd_vars.add_additional_frame_by_id(thread_id, {id(frame): frame}) + + cmd = self.cmd_factory.make_show_console_message(thread_id, frame) + self.writer.add_command(cmd) + + while True: + if self.mpl_in_use: + # call input hooks if only matplotlib is in use + self._call_mpl_hook() + self.process_internal_commands() + time.sleep(0.01) + + trace_dispatch = _trace_dispatch + frame_eval_func = frame_eval_func + dummy_trace_dispatch = dummy_trace_dispatch + enable_cache_frames_without_breaks = enable_cache_frames_without_breaks + +def set_debug(setup): + setup['DEBUG_RECORD_SOCKET_READS'] = True + setup['DEBUG_TRACE_BREAKPOINTS'] = 1 + setup['DEBUG_TRACE_LEVEL'] = 3 + + +def enable_qt_support(qt_support_mode): + from _pydev_bundle import pydev_monkey_qt + pydev_monkey_qt.patch_qt(qt_support_mode) + + + +def usage(doExit=0): + sys.stdout.write('Usage:\n') + sys.stdout.write('pydevd.py --port N [(--client hostname) | --server] --file executable [file_options]\n') + if doExit: + sys.exit(0) + + +def init_stdout_redirect(): + if not getattr(sys, 'stdoutBuf', None): + sys.stdoutBuf = pydevd_io.IOBuf() + sys.stdout_original = sys.stdout + sys.stdout = pydevd_io.IORedirector(sys.stdout, sys.stdoutBuf) #@UndefinedVariable + +def init_stderr_redirect(): + if not getattr(sys, 'stderrBuf', None): + sys.stderrBuf = pydevd_io.IOBuf() + sys.stderr_original = sys.stderr + sys.stderr = pydevd_io.IORedirector(sys.stderr, sys.stderrBuf) #@UndefinedVariable + + +def has_data_to_redirect(): + if getattr(sys, 'stdoutBuf', None): + if not sys.stdoutBuf.empty(): + return True + if getattr(sys, 'stderrBuf', None): + if not sys.stderrBuf.empty(): + return True + + return False + +#======================================================================================================================= +# settrace +#======================================================================================================================= +def settrace( + host=None, + stdoutToServer=False, + stderrToServer=False, + port=5678, + suspend=True, + trace_only_current_thread=False, + overwrite_prev_trace=False, + patch_multiprocessing=False, + ): + '''Sets the tracing function with the pydev debug function and initializes needed facilities. + + @param host: the user may specify another host, if the debug server is not in the same machine (default is the local + host) + + @param stdoutToServer: when this is true, the stdout is passed to the debug server + + @param stderrToServer: when this is true, the stderr is passed to the debug server + so that they are printed in its console and not in this process console. + + @param port: specifies which port to use for communicating with the server (note that the server must be started + in the same port). @note: currently it's hard-coded at 5678 in the client + + @param suspend: whether a breakpoint should be emulated as soon as this function is called. + + @param trace_only_current_thread: determines if only the current thread will be traced or all current and future + threads will also have the tracing enabled. + + @param overwrite_prev_trace: if True we'll reset the frame.f_trace of frames which are already being traced + + @param patch_multiprocessing: if True we'll patch the functions which create new processes so that launched + processes are debugged. + ''' + _set_trace_lock.acquire() + try: + _locked_settrace( + host, + stdoutToServer, + stderrToServer, + port, + suspend, + trace_only_current_thread, + overwrite_prev_trace, + patch_multiprocessing, + ) + finally: + _set_trace_lock.release() + + + +_set_trace_lock = thread.allocate_lock() + +def _locked_settrace( + host, + stdoutToServer, + stderrToServer, + port, + suspend, + trace_only_current_thread, + overwrite_prev_trace, + patch_multiprocessing, + ): + if patch_multiprocessing: + try: + from _pydev_bundle import pydev_monkey + except: + pass + else: + pydev_monkey.patch_new_process_functions() + + if host is None: + from _pydev_bundle import pydev_localhost + host = pydev_localhost.get_localhost() + + global connected + global bufferStdOutToServer + global bufferStdErrToServer + + if not connected: + pydevd_vm_type.setup_type() + + if SetupHolder.setup is None: + setup = { + 'client': host, # dispatch expects client to be set to the host address when server is False + 'server': False, + 'port': int(port), + 'multiprocess': patch_multiprocessing, + } + SetupHolder.setup = setup + + debugger = PyDB() + debugger.connect(host, port) # Note: connect can raise error. + + # Mark connected only if it actually succeeded. + connected = True + bufferStdOutToServer = stdoutToServer + bufferStdErrToServer = stderrToServer + + if bufferStdOutToServer: + init_stdout_redirect() + + if bufferStdErrToServer: + init_stderr_redirect() + + patch_stdin(debugger) + debugger.set_trace_for_frame_and_parents(get_frame(), False, overwrite_prev_trace=overwrite_prev_trace) + + + CustomFramesContainer.custom_frames_lock.acquire() # @UndefinedVariable + try: + for _frameId, custom_frame in dict_iter_items(CustomFramesContainer.custom_frames): + debugger.set_trace_for_frame_and_parents(custom_frame.frame, False) + finally: + CustomFramesContainer.custom_frames_lock.release() # @UndefinedVariable + + + t = threadingCurrentThread() + try: + additional_info = t.additional_info + except AttributeError: + additional_info = PyDBAdditionalThreadInfo() + t.additional_info = additional_info + + while not debugger.ready_to_run: + time.sleep(0.1) # busy wait until we receive run command + + global forked + frame_eval_for_tracing = debugger.frame_eval_func + if frame_eval_func is not None and not forked: + # Disable frame evaluation for Remote Debug Server + frame_eval_for_tracing = None + + # note that we do that through pydevd_tracing.SetTrace so that the tracing + # is not warned to the user! + pydevd_tracing.SetTrace(debugger.trace_dispatch, frame_eval_for_tracing, debugger.dummy_trace_dispatch) + + if not trace_only_current_thread: + # Trace future threads? + debugger.patch_threads() + + # As this is the first connection, also set tracing for any untraced threads + debugger.set_tracing_for_untraced_contexts(ignore_frame=get_frame(), overwrite_prev_trace=overwrite_prev_trace) + + # Stop the tracing as the last thing before the actual shutdown for a clean exit. + atexit.register(stoptrace) + + PyDBCommandThread(debugger).start() + CheckOutputThread(debugger).start() + + #Suspend as the last thing after all tracing is in place. + if suspend: + debugger.set_suspend(t, CMD_THREAD_SUSPEND) + + + else: + # ok, we're already in debug mode, with all set, so, let's just set the break + debugger = get_global_debugger() + + debugger.set_trace_for_frame_and_parents(get_frame(), False) + + t = threadingCurrentThread() + try: + additional_info = t.additional_info + except AttributeError: + additional_info = PyDBAdditionalThreadInfo() + t.additional_info = additional_info + + pydevd_tracing.SetTrace(debugger.trace_dispatch, debugger.frame_eval_func, debugger.dummy_trace_dispatch) + + if not trace_only_current_thread: + # Trace future threads? + debugger.patch_threads() + + + if suspend: + debugger.set_suspend(t, CMD_THREAD_SUSPEND) + + +def stoptrace(): + global connected + if connected: + pydevd_tracing.restore_sys_set_trace_func() + sys.settrace(None) + try: + #not available in jython! + threading.settrace(None) # for all future threads + except: + pass + + from _pydev_bundle.pydev_monkey import undo_patch_thread_modules + undo_patch_thread_modules() + + debugger = get_global_debugger() + + if debugger: + + debugger.set_trace_for_frame_and_parents( + get_frame(), also_add_to_passed_frame=True, overwrite_prev_trace=True, dispatch_func=lambda *args:None) + debugger.exiting() + + kill_all_pydev_threads() + + connected = False + +class Dispatcher(object): + def __init__(self): + self.port = None + + def connect(self, host, port): + self.host = host + self.port = port + self.client = start_client(self.host, self.port) + self.reader = DispatchReader(self) + self.reader.pydev_do_not_trace = False #we run reader in the same thread so we don't want to loose tracing + self.reader.run() + + def close(self): + try: + self.reader.do_kill_pydev_thread() + except : + pass + +class DispatchReader(ReaderThread): + def __init__(self, dispatcher): + self.dispatcher = dispatcher + ReaderThread.__init__(self, self.dispatcher.client) + + def _on_run(self): + dummy_thread = threading.currentThread() + dummy_thread.is_pydev_daemon_thread = False + return ReaderThread._on_run(self) + + def handle_except(self): + ReaderThread.handle_except(self) + + def process_command(self, cmd_id, seq, text): + if cmd_id == 99: + self.dispatcher.port = int(text) + self.killReceived = True + + +DISPATCH_APPROACH_NEW_CONNECTION = 1 # Used by PyDev +DISPATCH_APPROACH_EXISTING_CONNECTION = 2 # Used by PyCharm +DISPATCH_APPROACH = DISPATCH_APPROACH_NEW_CONNECTION + +def dispatch(): + setup = SetupHolder.setup + host = setup['client'] + port = setup['port'] + if DISPATCH_APPROACH == DISPATCH_APPROACH_EXISTING_CONNECTION: + dispatcher = Dispatcher() + try: + dispatcher.connect(host, port) + port = dispatcher.port + finally: + dispatcher.close() + return host, port + + +def settrace_forked(): + ''' + When creating a fork from a process in the debugger, we need to reset the whole debugger environment! + ''' + host, port = dispatch() + + import pydevd_tracing + pydevd_tracing.restore_sys_set_trace_func() + + if port is not None: + global connected + connected = False + global forked + forked = True + + custom_frames_container_init() + + settrace( + host, + port=port, + suspend=False, + trace_only_current_thread=False, + overwrite_prev_trace=True, + patch_multiprocessing=True, + ) + +#======================================================================================================================= +# SetupHolder +#======================================================================================================================= +class SetupHolder: + + setup = None + + +def apply_debugger_options(setup_options): + """ + + :type setup_options: dict[str, bool] + """ + default_options = {'save-signatures': False, 'qt-support': ''} + default_options.update(setup_options) + setup_options = default_options + + debugger = GetGlobalDebugger() + if setup_options['save-signatures']: + if pydevd_vm_type.get_vm_type() == pydevd_vm_type.PydevdVmType.JYTHON: + sys.stderr.write("Collecting run-time type information is not supported for Jython\n") + else: + # Only import it if we're going to use it! + from _pydevd_bundle.pydevd_signature import SignatureFactory + debugger.signature_factory = SignatureFactory() + + if setup_options['qt-support']: + enable_qt_support(setup_options['qt-support']) + + +def patch_stdin(debugger): + from _pydev_bundle.pydev_console_utils import DebugConsoleStdIn + orig_stdin = sys.stdin + sys.stdin = DebugConsoleStdIn(debugger, orig_stdin) + +# Dispatch on_debugger_modules_loaded here, after all primary debugger modules are loaded +from _pydevd_bundle.pydevd_extension_api import DebuggerEventHandler +from _pydevd_bundle import pydevd_extension_utils + +for handler in pydevd_extension_utils.extensions_of_type(DebuggerEventHandler): + handler.on_debugger_modules_loaded(debugger_version=__version__) +#======================================================================================================================= +# main +#======================================================================================================================= +def main(): + + # parse the command line. --file is our last argument that is required + try: + from _pydevd_bundle.pydevd_command_line_handling import process_command_line + setup = process_command_line(sys.argv) + SetupHolder.setup = setup + except ValueError: + traceback.print_exc() + usage(1) + + if setup['print-in-debugger-startup']: + try: + pid = ' (pid: %s)' % os.getpid() + except: + pid = '' + sys.stderr.write("pydev debugger: starting%s\n" % pid) + + fix_getpass.fix_getpass() + + pydev_log.debug("Executing file %s" % setup['file']) + pydev_log.debug("arguments: %s"% str(sys.argv)) + + + pydevd_vm_type.setup_type(setup.get('vm_type', None)) + + if os.getenv('PYCHARM_DEBUG') == 'True' or os.getenv('PYDEV_DEBUG') == 'True': + set_debug(setup) + + DebugInfoHolder.DEBUG_RECORD_SOCKET_READS = setup.get('DEBUG_RECORD_SOCKET_READS', DebugInfoHolder.DEBUG_RECORD_SOCKET_READS) + DebugInfoHolder.DEBUG_TRACE_BREAKPOINTS = setup.get('DEBUG_TRACE_BREAKPOINTS', DebugInfoHolder.DEBUG_TRACE_BREAKPOINTS) + DebugInfoHolder.DEBUG_TRACE_LEVEL = setup.get('DEBUG_TRACE_LEVEL', DebugInfoHolder.DEBUG_TRACE_LEVEL) + + port = setup['port'] + host = setup['client'] + f = setup['file'] + fix_app_engine_debug = False + + debugger = PyDB() + + try: + from _pydev_bundle import pydev_monkey + except: + pass #Not usable on jython 2.1 + else: + if setup['multiprocess']: # PyDev + pydev_monkey.patch_new_process_functions() + + elif setup['multiproc']: # PyCharm + pydev_log.debug("Started in multiproc mode\n") + # Note: we're not inside method, so, no need for 'global' + DISPATCH_APPROACH = DISPATCH_APPROACH_EXISTING_CONNECTION + + dispatcher = Dispatcher() + try: + dispatcher.connect(host, port) + if dispatcher.port is not None: + port = dispatcher.port + pydev_log.debug("Received port %d\n" %port) + pydev_log.info("pydev debugger: process %d is connecting\n"% os.getpid()) + + try: + pydev_monkey.patch_new_process_functions() + except: + pydev_log.error("Error patching process functions\n") + traceback.print_exc() + else: + pydev_log.error("pydev debugger: couldn't get port for new debug process\n") + finally: + dispatcher.close() + else: + pydev_log.info("pydev debugger: starting\n") + + try: + pydev_monkey.patch_new_process_functions_with_warning() + except: + pydev_log.error("Error patching process functions\n") + traceback.print_exc() + + # Only do this patching if we're not running with multiprocess turned on. + if f.find('dev_appserver.py') != -1: + if os.path.basename(f).startswith('dev_appserver.py'): + appserver_dir = os.path.dirname(f) + version_file = os.path.join(appserver_dir, 'VERSION') + if os.path.exists(version_file): + try: + stream = open(version_file, 'r') + try: + for line in stream.read().splitlines(): + line = line.strip() + if line.startswith('release:'): + line = line[8:].strip() + version = line.replace('"', '') + version = version.split('.') + if int(version[0]) > 1: + fix_app_engine_debug = True + + elif int(version[0]) == 1: + if int(version[1]) >= 7: + # Only fix from 1.7 onwards + fix_app_engine_debug = True + break + finally: + stream.close() + except: + traceback.print_exc() + + try: + # In the default run (i.e.: run directly on debug mode), we try to patch stackless as soon as possible + # on a run where we have a remote debug, we may have to be more careful because patching stackless means + # that if the user already had a stackless.set_schedule_callback installed, he'd loose it and would need + # to call it again (because stackless provides no way of getting the last function which was registered + # in set_schedule_callback). + # + # So, ideally, if there's an application using stackless and the application wants to use the remote debugger + # and benefit from stackless debugging, the application itself must call: + # + # import pydevd_stackless + # pydevd_stackless.patch_stackless() + # + # itself to be able to benefit from seeing the tasklets created before the remote debugger is attached. + from _pydevd_bundle import pydevd_stackless + pydevd_stackless.patch_stackless() + except: + # It's ok not having stackless there... + try: + sys.exc_clear() # the exception information should be cleaned in Python 2 + except: + pass + + is_module = setup['module'] + patch_stdin(debugger) + + if fix_app_engine_debug: + sys.stderr.write("pydev debugger: google app engine integration enabled\n") + curr_dir = os.path.dirname(__file__) + app_engine_startup_file = os.path.join(curr_dir, 'pydev_app_engine_debug_startup.py') + + sys.argv.insert(1, '--python_startup_script=' + app_engine_startup_file) + import json + setup['pydevd'] = __file__ + sys.argv.insert(2, '--python_startup_args=%s' % json.dumps(setup),) + sys.argv.insert(3, '--automatic_restart=no') + sys.argv.insert(4, '--max_module_instances=1') + + # Run the dev_appserver + debugger.run(setup['file'], None, None, is_module, set_trace=False) + else: + if setup['save-threading']: + debugger.thread_analyser = ThreadingLogger() + if setup['save-asyncio']: + if IS_PY34_OLDER: + debugger.asyncio_analyser = AsyncioLogger() + + apply_debugger_options(setup) + + try: + debugger.connect(host, port) + except: + sys.stderr.write("Could not connect to %s: %s\n" % (host, port)) + traceback.print_exc() + sys.exit(1) + + connected = True # Mark that we're connected when started from inside ide. + + globals = debugger.run(setup['file'], None, None, is_module) + + if setup['cmd-line']: + debugger.wait_for_commands(globals) + +if __name__ == '__main__': + main() diff --git a/ptvsd/pydevd/pydevd_attach_to_process/README.txt b/ptvsd/pydevd/pydevd_attach_to_process/README.txt new file mode 100644 index 00000000..138c1039 --- /dev/null +++ b/ptvsd/pydevd/pydevd_attach_to_process/README.txt @@ -0,0 +1,11 @@ +This folder contains the utilities to attach a target process to the pydev debugger. + +The main module to be called for the attach is: + +attach_pydevd.py + +it should be called as; + +python attach_pydevd.py --port 5678 --pid 1234 + +Note that the client is responsible for having a remote debugger alive in the given port for the attach to work. \ No newline at end of file diff --git a/ptvsd/pydevd/pydevd_attach_to_process/_always_live_program.py b/ptvsd/pydevd/pydevd_attach_to_process/_always_live_program.py new file mode 100644 index 00000000..6369508e --- /dev/null +++ b/ptvsd/pydevd/pydevd_attach_to_process/_always_live_program.py @@ -0,0 +1,32 @@ +import sys +import struct +print('Executable: %s' % sys.executable) +import os +def loop_in_thread(): + while True: + import time + time.sleep(.5) + sys.stdout.write('#') + sys.stdout.flush() + +import threading +threading.Thread(target=loop_in_thread).start() + + +def is_python_64bit(): + return (struct.calcsize('P') == 8) + +print('Is 64: %s' % is_python_64bit()) + +if __name__ == '__main__': + print('pid:%s' % (os.getpid())) + i = 0 + while True: + i += 1 + import time + time.sleep(.5) + sys.stdout.write('.') + sys.stdout.flush() + if i % 40 == 0: + sys.stdout.write('\n') + sys.stdout.flush() diff --git a/ptvsd/pydevd/pydevd_attach_to_process/_check.py b/ptvsd/pydevd/pydevd_attach_to_process/_check.py new file mode 100644 index 00000000..82f8e122 --- /dev/null +++ b/ptvsd/pydevd/pydevd_attach_to_process/_check.py @@ -0,0 +1,2 @@ +import add_code_to_python_process +print add_code_to_python_process.run_python_code(3736, "print(20)", connect_debugger_tracing=False) \ No newline at end of file diff --git a/ptvsd/pydevd/pydevd_attach_to_process/_test_attach_to_process.py b/ptvsd/pydevd/pydevd_attach_to_process/_test_attach_to_process.py new file mode 100644 index 00000000..8000aae6 --- /dev/null +++ b/ptvsd/pydevd/pydevd_attach_to_process/_test_attach_to_process.py @@ -0,0 +1,9 @@ +import subprocess +import sys +print(sys.executable) + +if __name__ == '__main__': + p = subprocess.Popen([sys.executable, '-u', '_always_live_program.py']) + import attach_pydevd + attach_pydevd.main(attach_pydevd.process_command_line(['--pid', str(p.pid)])) + p.wait() diff --git a/ptvsd/pydevd/pydevd_attach_to_process/_test_attach_to_process_linux.py b/ptvsd/pydevd/pydevd_attach_to_process/_test_attach_to_process_linux.py new file mode 100644 index 00000000..8bc3d38b --- /dev/null +++ b/ptvsd/pydevd/pydevd_attach_to_process/_test_attach_to_process_linux.py @@ -0,0 +1,78 @@ +''' +This module is just for testing concepts. It should be erased later on. + +Experiments: + +// gdb -p 4957 +// call dlopen("/home/fabioz/Desktop/dev/PyDev.Debugger/pydevd_attach_to_process/linux/attach_linux.so", 2) +// call dlsym($1, "hello") +// call hello() + + +// call open("/home/fabioz/Desktop/dev/PyDev.Debugger/pydevd_attach_to_process/linux/attach_linux.so", 2) +// call mmap(0, 6672, 1 | 2 | 4, 1, 3 , 0) +// add-symbol-file +// cat /proc/pid/maps + +// call dlopen("/home/fabioz/Desktop/dev/PyDev.Debugger/pydevd_attach_to_process/linux/attach_linux.so", 1|8) +// call dlsym($1, "hello") +// call hello() +''' + +import subprocess +import sys +import os +import time + +if __name__ == '__main__': + + linux_dir = os.path.join(os.path.dirname(__file__), 'linux') + os.chdir(linux_dir) + so_location = os.path.join(linux_dir, 'attach_linux.so') + try: + os.remove(so_location) + except: + pass + subprocess.call('g++ -shared -o attach_linux.so -fPIC -nostartfiles attach_linux.c'.split()) + print('Finished compiling') + assert os.path.exists('/home/fabioz/Desktop/dev/PyDev.Debugger/pydevd_attach_to_process/linux/attach_linux.so') + os.chdir(os.path.dirname(linux_dir)) +# import attach_pydevd +# attach_pydevd.main(attach_pydevd.process_command_line(['--pid', str(p.pid)])) + p = subprocess.Popen([sys.executable, '-u', '_always_live_program.py']) + print('Size of file: %s' % (os.stat(so_location).st_size)) + + #(gdb) set architecture + # Requires an argument. Valid arguments are i386, i386:x86-64, i386:x64-32, i8086, i386:intel, i386:x86-64:intel, i386:x64-32:intel, i386:nacl, i386:x86-64:nacl, i386:x64-32:nacl, auto. + + cmd = [ + 'gdb', + '--pid', + str(p.pid), + '--batch', + ] + + arch = 'i386:x86-64' + if arch: + cmd.extend(["--eval-command='set architecture %s'" % arch]) + + cmd.extend([ + "--eval-command='call dlopen(\"/home/fabioz/Desktop/dev/PyDev.Debugger/pydevd_attach_to_process/linux/attach_linux.so\", 2)'", + "--eval-command='call DoAttach(1, \"print(\\\"check11111check\\\")\", 0)'", + #"--eval-command='call SetSysTraceFunc(1, 0)'", -- never call this way, always use "--command='...gdb_threads_settrace.py'", + #So that threads are all stopped! + "--command='/home/fabioz/Desktop/dev/PyDev.Debugger/pydevd_attach_to_process/linux/gdb_threads_settrace.py'", + ]) + + cmd.extend(['--command=/home/fabioz/Desktop/dev/PyDev.Debugger/pydevd_attach_to_process/linux/gdb_threads_settrace.py']) + + + print(' '.join(cmd)) + time.sleep(.5) + env = os.environ.copy() + env.pop('PYTHONIOENCODING', None) + env.pop('PYTHONPATH', None) + p2 = subprocess.call(' '.join(cmd), env=env, shell=True) + + time.sleep(1) + p.kill() diff --git a/ptvsd/pydevd/pydevd_attach_to_process/add_code_to_python_process.py b/ptvsd/pydevd/pydevd_attach_to_process/add_code_to_python_process.py new file mode 100644 index 00000000..2b5c2929 --- /dev/null +++ b/ptvsd/pydevd/pydevd_attach_to_process/add_code_to_python_process.py @@ -0,0 +1,620 @@ +r''' +Copyright: Brainwy Software Ltda. + +License: EPL. +============= + +Works for Windows relying on a fork of winappdbg which works in py2/3 (at least for the part we're interested in). + +See: https://github.com/fabioz/winappdbg (py3 branch). +Note that the official branch for winappdbg is: https://github.com/MarioVilas/winappdbg, which should be used when it works in Py3. +A private copy is added here to make deployment easier, but changes should always be done upstream first. + +Works for Linux relying on gdb. + +Limitations: +============ + + Linux: + ------ + + 1. It possible that ptrace is disabled: /etc/sysctl.d/10-ptrace.conf + + Note that even enabling it in /etc/sysctl.d/10-ptrace.conf (i.e.: making the + ptrace_scope=0), it's possible that we need to run the application that'll use ptrace (or + gdb in this case) as root (so, we must sudo the python which'll run this module). + + 2. It currently doesn't work in debug builds (i.e.: python_d) + + +Other implementations: +- pyrasite.com: + GPL + Windows/linux (in Linux it also uses gdb to connect -- although specifics are different as we use a dll to execute + code with other threads stopped). It's Windows approach is more limited because it doesn't seem to deal properly with + Python 3 if threading is disabled. + +- https://github.com/google/pyringe: + Apache v2. + Only linux/Python 2. + +- http://pytools.codeplex.com: + Apache V2 + Windows Only (but supports mixed mode debugging) + Our own code relies heavily on a part of it: http://pytools.codeplex.com/SourceControl/latest#Python/Product/PyDebugAttach/PyDebugAttach.cpp + to overcome some limitations of attaching and running code in the target python executable on Python 3. + See: attach.cpp + +Linux: References if we wanted to use a pure-python debugger: + https://bitbucket.org/haypo/python-ptrace/ + http://stackoverflow.com/questions/7841573/how-to-get-an-error-message-for-errno-value-in-python + Jugaad: + https://www.defcon.org/images/defcon-19/dc-19-presentations/Jakhar/DEFCON-19-Jakhar-Jugaad-Linux-Thread-Injection.pdf + https://github.com/aseemjakhar/jugaad + +Something else (general and not Python related): +- http://www.codeproject.com/Articles/4610/Three-Ways-to-Inject-Your-Code-into-Another-Proces + +Other references: +- https://github.com/haypo/faulthandler +- http://nedbatchelder.com/text/trace-function.html +- https://github.com/python-git/python/blob/master/Python/sysmodule.c (sys_settrace) +- https://github.com/python-git/python/blob/master/Python/ceval.c (PyEval_SetTrace) +- https://github.com/python-git/python/blob/master/Python/thread.c (PyThread_get_key_value) + + +To build the dlls needed on windows, visual studio express 13 was used (see compile_dll.bat) + +See: attach_pydevd.py to attach the pydev debugger to a running python process. +''' + +# Note: to work with nasm compiling asm to code and decompiling to see asm with shellcode: +# x:\nasm\nasm-2.07-win32\nasm-2.07\nasm.exe +# nasm.asm&x:\nasm\nasm-2.07-win32\nasm-2.07\ndisasm.exe -b arch nasm +import ctypes +import os +import struct +import subprocess +import sys +import time + +class AutoExit(object): + + def __init__(self, on_exit): + self.on_exit = on_exit + + def __enter__(self): + pass + + def __exit__(self, *args): + self.on_exit() + + +class GenShellCodeHelper(object): + + def __init__(self, is_64): + from winappdbg import compat + self.is_64 = is_64 + self._code = [] + if not is_64: + self._translations = { + 'push esi': compat.b('\x56'), + 'push eax': compat.b('\x50'), + 'push ebp': compat.b('\x55'), + 'push ebx': compat.b('\x53'), + + 'pop esi': compat.b('\x5E'), + 'pop eax': compat.b('\x58'), + 'pop ebp': compat.b('\x5D'), + 'pop ebx': compat.b('\x5B'), + + 'mov esi': compat.b('\xBE'), + 'mov eax': compat.b('\xB8'), + 'mov ebp': compat.b('\xBD'), + 'mov ebx': compat.b('\xBB'), + + 'call ebp': compat.b('\xFF\xD5'), + 'call eax': compat.b('\xFF\xD0'), + 'call ebx': compat.b('\xFF\xD3'), + + 'mov ebx,eax': compat.b('\x89\xC3'), + 'mov eax,ebx': compat.b('\x89\xD8'), + 'mov ebp,esp': compat.b('\x89\xE5'), + 'mov esp,ebp': compat.b('\x89\xEC'), + 'push dword': compat.b('\x68'), + + 'mov ebp,eax': compat.b('\x89\xC5'), + 'mov eax,ebp': compat.b('\x89\xE8'), + + 'ret': compat.b('\xc3'), + } + else: + # Translate 64 bits + self._translations = { + 'push rsi': compat.b('\x56'), + 'push rax': compat.b('\x50'), + 'push rbp': compat.b('\x55'), + 'push rbx': compat.b('\x53'), + 'push rsp': compat.b('\x54'), + 'push rdi': compat.b('\x57'), + + 'pop rsi': compat.b('\x5E'), + 'pop rax': compat.b('\x58'), + 'pop rbp': compat.b('\x5D'), + 'pop rbx': compat.b('\x5B'), + 'pop rsp': compat.b('\x5C'), + 'pop rdi': compat.b('\x5F'), + + 'mov rsi': compat.b('\x48\xBE'), + 'mov rax': compat.b('\x48\xB8'), + 'mov rbp': compat.b('\x48\xBD'), + 'mov rbx': compat.b('\x48\xBB'), + 'mov rdi': compat.b('\x48\xBF'), + 'mov rcx': compat.b('\x48\xB9'), + 'mov rdx': compat.b('\x48\xBA'), + + 'call rbp': compat.b('\xFF\xD5'), + 'call rax': compat.b('\xFF\xD0'), + 'call rbx': compat.b('\xFF\xD3'), + + 'mov rbx,rax': compat.b('\x48\x89\xC3'), + 'mov rax,rbx': compat.b('\x48\x89\xD8'), + 'mov rbp,rsp': compat.b('\x48\x89\xE5'), + 'mov rsp,rbp': compat.b('\x48\x89\xEC'), + 'mov rcx,rbp': compat.b('\x48\x89\xE9'), + + 'mov rbp,rax': compat.b('\x48\x89\xC5'), + 'mov rax,rbp': compat.b('\x48\x89\xE8'), + + 'mov rdi,rbp': compat.b('\x48\x89\xEF'), + + 'ret': compat.b('\xc3'), + } + + def push_addr(self, addr): + self._code.append(self.translate('push dword')) + self._code.append(addr) + + def push(self, register): + self._code.append(self.translate('push %s' % register)) + return AutoExit(lambda: self.pop(register)) + + def pop(self, register): + self._code.append(self.translate('pop %s' % register)) + + def mov_to_register_addr(self, register, addr): + self._code.append(self.translate('mov %s' % register)) + self._code.append(addr) + + def mov_register_to_from(self, register_to, register_from): + self._code.append(self.translate('mov %s,%s' % (register_to, register_from))) + + def call(self, register): + self._code.append(self.translate('call %s' % register)) + + def preserve_stack(self): + self.mov_register_to_from('ebp', 'esp') + return AutoExit(lambda: self.restore_stack()) + + def restore_stack(self): + self.mov_register_to_from('esp', 'ebp') + + def ret(self): + self._code.append(self.translate('ret')) + + def get_code(self): + from winappdbg import compat + return compat.b('').join(self._code) + + def translate(self, code): + return self._translations[code] + + def pack_address(self, address): + if self.is_64: + return struct.pack(' + +// DECLDIR will perform an export for us +#define DLL_EXPORT + +#include "attach.h" +#include "stdafx.h" +#include "python.h" + +#pragma comment(lib, "kernel32.lib") +#pragma comment(lib, "user32.lib") +#pragma comment(lib, "advapi32.lib") +#pragma comment(lib, "psapi.lib") + +// _Always_ is not defined for all versions, so make it a no-op if missing. +#ifndef _Always_ +#define _Always_(x) x +#endif + +using namespace std; + +typedef int (Py_IsInitialized)(); +typedef void (PyEval_Lock)(); // Acquire/Release lock +typedef void (PyThreadState_API)(PyThreadState *); // Acquire/Release lock +typedef PyInterpreterState* (PyInterpreterState_Head)(); +typedef PyThreadState* (PyInterpreterState_ThreadHead)(PyInterpreterState* interp); +typedef PyThreadState* (PyThreadState_Next)(PyThreadState *tstate); +typedef PyThreadState* (PyThreadState_Swap)(PyThreadState *tstate); +typedef int (PyRun_SimpleString)(const char *command); +typedef PyObject* (PyDict_New)(); +typedef PyObject* (PyModule_New)(const char *name); +typedef PyObject* (PyModule_GetDict)(PyObject *module); +typedef PyObject* (Py_CompileString)(const char *str, const char *filename, int start); +typedef PyObject* (PyEval_EvalCode)(PyObject *co, PyObject *globals, PyObject *locals); +typedef PyObject* (PyDict_GetItemString)(PyObject *p, const char *key); +typedef PyObject* (PyObject_CallFunctionObjArgs)(PyObject *callable, ...); // call w/ varargs, last arg should be NULL +typedef void (PyErr_Fetch)(PyObject **, PyObject **, PyObject **); +typedef PyObject* (PyEval_GetBuiltins)(); +typedef int (PyDict_SetItemString)(PyObject *dp, const char *key, PyObject *item); +typedef int (PyEval_ThreadsInitialized)(); +typedef void (Py_AddPendingCall)(int (*func)(void *), void*); +typedef PyObject* (PyInt_FromLong)(long); +typedef PyObject* (PyString_FromString)(const char* s); +typedef void PyEval_SetTrace(Py_tracefunc func, PyObject *obj); +typedef void (PyErr_Restore)(PyObject *type, PyObject *value, PyObject *traceback); +typedef void (PyErr_Fetch)(PyObject **ptype, PyObject **pvalue, PyObject **ptraceback); +typedef PyObject* (PyErr_Occurred)(); +typedef PyObject* (PyErr_Print)(); +typedef PyObject* (PyImport_ImportModule) (const char *name); +typedef PyObject* (PyObject_GetAttrString)(PyObject *o, const char *attr_name); +typedef PyObject* (PyObject_HasAttrString)(PyObject *o, const char *attr_name); +typedef PyObject* (PyObject_SetAttrString)(PyObject *o, const char *attr_name, PyObject* value); +typedef PyObject* (PyBool_FromLong)(long v); +typedef enum { PyGILState_LOCKED, PyGILState_UNLOCKED } PyGILState_STATE; +typedef PyGILState_STATE(PyGILState_Ensure)(); +typedef void (PyGILState_Release)(PyGILState_STATE); +typedef unsigned long (_PyEval_GetSwitchInterval)(void); +typedef void (_PyEval_SetSwitchInterval)(unsigned long microseconds); +typedef void* (PyThread_get_key_value)(int); +typedef int (PyThread_set_key_value)(int, void*); +typedef void (PyThread_delete_key_value)(int); +typedef PyGILState_STATE PyGILState_EnsureFunc(void); +typedef void PyGILState_ReleaseFunc(PyGILState_STATE); +typedef PyObject* PyInt_FromSize_t(size_t ival); +typedef PyThreadState *PyThreadState_NewFunc(PyInterpreterState *interp); + +class PyObjectHolder; +PyObject* GetPyObjectPointerNoDebugInfo(bool isDebug, PyObject* object); +void DecRef(PyObject* object, bool isDebug); +void IncRef(PyObject* object, bool isDebug); + +#define MAX_INTERPRETERS 10 + +// Helper class so we can use RAII for freeing python objects when they go out of scope +class PyObjectHolder { +private: + PyObject* _object; +public: + bool _isDebug; + + PyObjectHolder(bool isDebug) { + _object = nullptr; + _isDebug = isDebug; + } + + PyObjectHolder(bool isDebug, PyObject *object) { + _object = object; + _isDebug = isDebug; + }; + + PyObjectHolder(bool isDebug, PyObject *object, bool addRef) { + _object = object; + _isDebug = isDebug; + if (_object != nullptr && addRef) { + GetPyObjectPointerNoDebugInfo(_isDebug, _object)->ob_refcnt++; + } + }; + + PyObject* ToPython() { + return _object; + } + + ~PyObjectHolder() { + DecRef(_object, _isDebug); + } + + PyObject* operator* () { + return GetPyObjectPointerNoDebugInfo(_isDebug, _object); + } +}; + +class InterpreterInfo { +public: + InterpreterInfo(HMODULE module, bool debug) : + Interpreter(module), + CurrentThread(nullptr), + NewThreadFunction(nullptr), + PyGILState_Ensure(nullptr), + Version(PythonVersion_Unknown), + Call(nullptr), + IsDebug(debug), + SetTrace(nullptr), + PyThreadState_New(nullptr), + ThreadState_Swap(nullptr) { + } + + ~InterpreterInfo() { + if (NewThreadFunction != nullptr) { + delete NewThreadFunction; + } + } + + PyObjectHolder* NewThreadFunction; + PyThreadState** CurrentThread; + + HMODULE Interpreter; + PyGILState_EnsureFunc* PyGILState_Ensure; + PyEval_SetTrace* SetTrace; + PyThreadState_NewFunc* PyThreadState_New; + PyThreadState_Swap* ThreadState_Swap; + + PythonVersion GetVersion() { + if (Version == PythonVersion_Unknown) { + Version = ::GetPythonVersion(Interpreter); + } + return Version; + } + + PyObject_CallFunctionObjArgs* GetCall() { + if (Call == nullptr) { + Call = (PyObject_CallFunctionObjArgs*)GetProcAddress(Interpreter, "PyObject_CallFunctionObjArgs"); + } + + return Call; + } + + bool EnsureSetTrace() { + if (SetTrace == nullptr) { + auto setTrace = (PyEval_SetTrace*)(void*)GetProcAddress(Interpreter, "PyEval_SetTrace"); + SetTrace = setTrace; + } + return SetTrace != nullptr; + } + + bool EnsureThreadStateSwap() { + if (ThreadState_Swap == nullptr) { + auto swap = (PyThreadState_Swap*)(void*)GetProcAddress(Interpreter, "PyThreadState_Swap"); + ThreadState_Swap = swap; + } + return ThreadState_Swap != nullptr; + } + + bool EnsureCurrentThread() { + if (CurrentThread == nullptr) { + auto curPythonThread = (PyThreadState**)(void*)GetProcAddress( + Interpreter, "_PyThreadState_Current"); + CurrentThread = curPythonThread; + } + + return CurrentThread != nullptr; + } + +private: + PythonVersion Version; + PyObject_CallFunctionObjArgs* Call; + bool IsDebug; +}; + +DWORD _interpreterCount = 0; +InterpreterInfo* _interpreterInfo[MAX_INTERPRETERS]; + +void PatchIAT(PIMAGE_DOS_HEADER dosHeader, PVOID replacingFunc, LPSTR exportingDll, LPVOID newFunction) { + if (dosHeader->e_magic != IMAGE_DOS_SIGNATURE) { + return; + } + + auto ntHeader = (IMAGE_NT_HEADERS*)(((BYTE*)dosHeader) + dosHeader->e_lfanew); + if (ntHeader->Signature != IMAGE_NT_SIGNATURE) { + return; + } + + auto importAddr = ntHeader->OptionalHeader.DataDirectory[IMAGE_DIRECTORY_ENTRY_IMPORT].VirtualAddress; + if (importAddr == 0) { + return; + } + + auto import = (PIMAGE_IMPORT_DESCRIPTOR)(importAddr + ((BYTE*)dosHeader)); + + while (import->Name) { + char* name = (char*)(import->Name + ((BYTE*)dosHeader)); + if (_stricmp(name, exportingDll) == 0) { + auto thunkData = (PIMAGE_THUNK_DATA)((import->FirstThunk) + ((BYTE*)dosHeader)); + + while (thunkData->u1.Function) { + PVOID funcAddr = (char*)(thunkData->u1.Function); + + if (funcAddr == replacingFunc) { + DWORD flOldProtect; + if (VirtualProtect(&thunkData->u1, sizeof(SIZE_T), PAGE_READWRITE, &flOldProtect)) { + thunkData->u1.Function = (SIZE_T)newFunction; + VirtualProtect(&thunkData->u1, sizeof(SIZE_T), flOldProtect, &flOldProtect); + } + } + thunkData++; + } + } + + import++; + } +} + +typedef BOOL WINAPI EnumProcessModulesFunc( + __in HANDLE hProcess, + __out HMODULE *lphModule, + __in DWORD cb, + __out LPDWORD lpcbNeeded + ); + +typedef __kernel_entry NTSTATUS NTAPI + NtQueryInformationProcessFunc( + IN HANDLE ProcessHandle, + IN PROCESSINFOCLASS ProcessInformationClass, + OUT PVOID ProcessInformation, + IN ULONG ProcessInformationLength, + OUT PULONG ReturnLength OPTIONAL + ); + + +// A helper version of EnumProcessModules. On Win7 uses the real EnumProcessModules which +// lives in kernel32, and so is safe to use in DLLMain. Pre-Win7 we use NtQueryInformationProcess +// (http://msdn.microsoft.com/en-us/library/windows/desktop/ms684280(v=vs.85).aspx) and walk the +// LDR_DATA_TABLE_ENTRY data structures http://msdn.microsoft.com/en-us/library/windows/desktop/aa813708(v=vs.85).aspx +// which have changed in Windows 7, and may change more in the future, so we can't use them there. +__success(return) BOOL EnumProcessModulesHelper( + __in HANDLE hProcess, + __out HMODULE *lphModule, + __in DWORD cb, + _Always_(__out) LPDWORD lpcbNeeded + ) { + if (lpcbNeeded == nullptr) { + return FALSE; + } + *lpcbNeeded = 0; + + auto kernel32 = GetModuleHandle(L"kernel32.dll"); + if (kernel32 == nullptr) { + return FALSE; + } + + auto enumProc = (EnumProcessModulesFunc*)GetProcAddress(kernel32, "K32EnumProcessModules"); + if (enumProc == nullptr) { + // Fallback to pre-Win7 method + PROCESS_BASIC_INFORMATION basicInfo; + auto ntdll = GetModuleHandle(L"ntdll.dll"); + if (ntdll == nullptr) { + return FALSE; + } + + // http://msdn.microsoft.com/en-us/library/windows/desktop/ms684280(v=vs.85).aspx + NtQueryInformationProcessFunc* queryInfo = (NtQueryInformationProcessFunc*)GetProcAddress(ntdll, "NtQueryInformationProcess"); + if (queryInfo == nullptr) { + return FALSE; + } + + auto result = queryInfo( + GetCurrentProcess(), + ProcessBasicInformation, + &basicInfo, + sizeof(PROCESS_BASIC_INFORMATION), + NULL + ); + + if (FAILED(result)) { + return FALSE; + } + + // http://msdn.microsoft.com/en-us/library/windows/desktop/aa813708(v=vs.85).aspx + PEB* peb = basicInfo.PebBaseAddress; + auto start = (LDR_DATA_TABLE_ENTRY*)(peb->Ldr->InMemoryOrderModuleList.Flink); + + auto cur = start; + *lpcbNeeded = 0; + + do { + if ((*lpcbNeeded + sizeof(SIZE_T)) <= cb) { + PVOID *curLink = (PVOID*)cur; + curLink -= 2; + LDR_DATA_TABLE_ENTRY* curTable = (LDR_DATA_TABLE_ENTRY*)curLink; + if (curTable->DllBase == nullptr) { + break; + } + lphModule[(*lpcbNeeded) / sizeof(SIZE_T)] = (HMODULE)curTable->DllBase; + } + + (*lpcbNeeded) += sizeof(SIZE_T); + cur = (LDR_DATA_TABLE_ENTRY*)((LIST_ENTRY*)cur)->Flink; + } while (cur != start && cur != 0); + + return *lpcbNeeded <= cb; + } + + return enumProc(hProcess, lphModule, cb, lpcbNeeded); +} + +// This function will work with Win7 and later versions of the OS and is safe to call under +// the loader lock (all APIs used are in kernel32). +BOOL PatchFunction(LPSTR exportingDll, PVOID replacingFunc, LPVOID newFunction) { + HANDLE hProcess = GetCurrentProcess(); + DWORD modSize = sizeof(HMODULE) * 1024; + HMODULE* hMods = (HMODULE*)_malloca(modSize); + DWORD modsNeeded = 0; + if (hMods == nullptr) { + modsNeeded = 0; + return FALSE; + } + + while (!EnumProcessModulesHelper(hProcess, hMods, modSize, &modsNeeded)) { + // try again w/ more space... + _freea(hMods); + hMods = (HMODULE*)_malloca(modsNeeded); + if (hMods == nullptr) { + modsNeeded = 0; + break; + } + modSize = modsNeeded; + } + + for (DWORD tmp = 0; tmp < modsNeeded / sizeof(HMODULE); tmp++) { + PIMAGE_DOS_HEADER dosHeader = (PIMAGE_DOS_HEADER)hMods[tmp]; + + PatchIAT(dosHeader, replacingFunc, exportingDll, newFunction); + } + + return TRUE; +} + +wstring GetCurrentModuleFilename() { + HMODULE hModule = NULL; + if (GetModuleHandleEx(GET_MODULE_HANDLE_EX_FLAG_FROM_ADDRESS | GET_MODULE_HANDLE_EX_FLAG_UNCHANGED_REFCOUNT, (LPCTSTR)GetCurrentModuleFilename, &hModule) != 0) { + wchar_t filename[MAX_PATH]; + GetModuleFileName(hModule, filename, MAX_PATH); + return filename; + } + return wstring(); +} + +struct AttachInfo { + PyEval_Lock* InitThreads; + HANDLE Event; +}; + +HANDLE g_initedEvent; +int AttachCallback(void *initThreads) { + // initialize us for threading, this will acquire the GIL if not already created, and is a nop if the GIL is created. + // This leaves us in the proper state when we return back to the runtime whether the GIL was created or not before + // we were called. + ((PyEval_Lock*)initThreads)(); + SetEvent(g_initedEvent); + return 0; +} + +char* ReadCodeFromFile(wchar_t* filePath) { + ifstream filestr; + filestr.open(filePath, ios::binary); + if (filestr.fail()) { + return nullptr; + } + + // get length of file: + filestr.seekg(0, ios::end); + auto length = filestr.tellg(); + filestr.seekg(0, ios::beg); + + int len = (int)length; + char* buffer = new char[len + 1]; + filestr.read(buffer, len); + buffer[len] = 0; + + // remove carriage returns, copy zero byte + for (int read = 0, write = 0; read <= len; read++) { + if (buffer[read] == '\r') { + continue; + } else if (write != read) { + buffer[write] = buffer[read]; + } + write++; + } + + return buffer; +} + +// create a custom heap for our unordered map. This is necessary because if we suspend a thread while in a heap function +// then we could deadlock here. We need to be VERY careful about what we do while the threads are suspended. +static HANDLE g_heap = 0; + +template +class PrivateHeapAllocator { +public: + typedef size_t size_type; + typedef ptrdiff_t difference_type; + typedef T* pointer; + typedef const T* const_pointer; + typedef T& reference; + typedef const T& const_reference; + typedef T value_type; + + template + struct rebind { + typedef PrivateHeapAllocator other; + }; + + explicit PrivateHeapAllocator() {} + + PrivateHeapAllocator(PrivateHeapAllocator const&) {} + + ~PrivateHeapAllocator() {} + + template + PrivateHeapAllocator(PrivateHeapAllocator const&) {} + + pointer allocate(size_type size, allocator::const_pointer hint = 0) { + if (g_heap == nullptr) { + g_heap = HeapCreate(0, 0, 0); + } + auto mem = HeapAlloc(g_heap, 0, size * sizeof(T)); + return static_cast(mem); + } + + void deallocate(pointer p, size_type n) { + HeapFree(g_heap, 0, p); + } + + size_type max_size() const { + return (std::numeric_limits::max)() / sizeof(T); + } + + void construct(pointer p, const T& t) { + new(p) T(t); + } + + void destroy(pointer p) { + p->~T(); + } +}; + +typedef unordered_map, std::equal_to, PrivateHeapAllocator>> ThreadMap; + +void ResumeThreads(ThreadMap &suspendedThreads) { + for (auto start = suspendedThreads.begin(); start != suspendedThreads.end(); start++) { + ResumeThread((*start).second); + CloseHandle((*start).second); + } + suspendedThreads.clear(); +} + +// Suspends all threads ensuring that they are not currently in a call to Py_AddPendingCall. +void SuspendThreads(ThreadMap &suspendedThreads, Py_AddPendingCall* addPendingCall, PyEval_ThreadsInitialized* threadsInited) { + DWORD curThreadId = GetCurrentThreadId(); + DWORD curProcess = GetCurrentProcessId(); + // suspend all the threads in the process so we can do things safely... + bool suspended; + + do { + suspended = false; + HANDLE h = CreateToolhelp32Snapshot(TH32CS_SNAPTHREAD, 0); + if (h != INVALID_HANDLE_VALUE) { + + THREADENTRY32 te; + te.dwSize = sizeof(te); + if (Thread32First(h, &te)) { + do { + if (te.dwSize >= FIELD_OFFSET(THREADENTRY32, th32OwnerProcessID) + sizeof(te.th32OwnerProcessID) && te.th32OwnerProcessID == curProcess) { + + + if (te.th32ThreadID != curThreadId && suspendedThreads.find(te.th32ThreadID) == suspendedThreads.end()) { + auto hThread = OpenThread(THREAD_ALL_ACCESS, FALSE, te.th32ThreadID); + if (hThread != nullptr) { + SuspendThread(hThread); + + bool addingPendingCall = false; + + CONTEXT context; + memset(&context, 0x00, sizeof(CONTEXT)); + context.ContextFlags = CONTEXT_ALL; + GetThreadContext(hThread, &context); + +#if defined(_X86_) + if(context.Eip >= *((DWORD*)addPendingCall) && context.Eip <= (*((DWORD*)addPendingCall)) + 0x100) { + addingPendingCall = true; + } +#elif defined(_AMD64_) + if (context.Rip >= *((DWORD64*)addPendingCall) && context.Rip <= *((DWORD64*)addPendingCall + 0x100)) { + addingPendingCall = true; + } +#endif + + if (addingPendingCall) { + // we appear to be adding a pending call via this thread - wait for this to finish so we can add our own pending call... + ResumeThread(hThread); + SwitchToThread(); // yield to the resumed thread if it's on our CPU... + CloseHandle(hThread); + } else { + suspendedThreads[te.th32ThreadID] = hThread; + } + suspended = true; + } + } + } + + te.dwSize = sizeof(te); + } while (Thread32Next(h, &te) && !threadsInited()); + } + CloseHandle(h); + } + } while (suspended && !threadsInited()); +} + +PyObject* GetPyObjectPointerNoDebugInfo(bool isDebug, PyObject* object) { + if (object != nullptr && isDebug) { + // debug builds have 2 extra pointers at the front that we don't care about + return (PyObject*)((size_t*)object + 2); + } + return object; +} + +void DecRef(PyObject* object, bool isDebug) { + auto noDebug = GetPyObjectPointerNoDebugInfo(isDebug, object); + + if (noDebug != nullptr && --noDebug->ob_refcnt == 0) { + ((PyTypeObject*)GetPyObjectPointerNoDebugInfo(isDebug, noDebug->ob_type))->tp_dealloc(object); + } +} + +void IncRef(PyObject* object) { + object->ob_refcnt++; +} + + +// Ensures handles are closed when they go out of scope +class HandleHolder { + HANDLE _handle; +public: + HandleHolder(HANDLE handle) : _handle(handle) { + } + + ~HandleHolder() { + CloseHandle(_handle); + } +}; + +long GetPythonThreadId(PythonVersion version, PyThreadState* curThread) { + long threadId = 0; + if (PyThreadState_25_27::IsFor(version)) { + threadId = ((PyThreadState_25_27*)curThread)->thread_id; + } else if (PyThreadState_30_33::IsFor(version)) { + threadId = ((PyThreadState_30_33*)curThread)->thread_id; + } else if (PyThreadState_34_36::IsFor(version)) { + threadId = ((PyThreadState_34_36*)curThread)->thread_id; + } + return threadId; +} + +// holder to ensure we release the GIL even in error conditions +class GilHolder { + PyGILState_STATE _gilState; + PyGILState_Release* _release; +public: + GilHolder(PyGILState_Ensure* acquire, PyGILState_Release* release) { + _gilState = acquire(); + _release = release; + } + + ~GilHolder() { + _release(_gilState); + } +}; + +bool LoadAndEvaluateCode( + wchar_t* filePath, const char* fileName, bool isDebug, PyObject* globalsDict, + Py_CompileString* pyCompileString, PyDict_SetItemString* dictSetItem, + PyEval_EvalCode* pyEvalCode, PyString_FromString* strFromString, PyEval_GetBuiltins* getBuiltins, + PyErr_Print pyErrPrint + ) { + auto debuggerCode = ReadCodeFromFile(filePath); + if (debuggerCode == nullptr) { + return false; + } + + auto code = PyObjectHolder(isDebug, pyCompileString(debuggerCode, fileName, 257 /*Py_file_input*/)); + delete[] debuggerCode; + + if (*code == nullptr) { + return false; + } + + dictSetItem(globalsDict, "__builtins__", getBuiltins()); + auto size = WideCharToMultiByte(CP_UTF8, 0, filePath, (DWORD)wcslen(filePath), NULL, 0, NULL, NULL); + char* filenameBuffer = new char[size]; + if (WideCharToMultiByte(CP_UTF8, 0, filePath, (DWORD)wcslen(filePath), filenameBuffer, size, NULL, NULL) != 0) { + filenameBuffer[size] = 0; + dictSetItem(globalsDict, "__file__", strFromString(filenameBuffer)); + } + + auto evalResult = PyObjectHolder(isDebug, pyEvalCode(code.ToPython(), globalsDict, globalsDict)); +#if !NDEBUG + if (*evalResult == nullptr) { + pyErrPrint(); + } +#endif + + return true; +} + +// Checks to see if the specified module is likely a Python interpreter. +bool IsPythonModule(HMODULE module, bool &isDebug) { + wchar_t mod_name[MAX_PATH]; + isDebug = false; + if (GetModuleBaseName(GetCurrentProcess(), module, mod_name, MAX_PATH)) { + if (_wcsnicmp(mod_name, L"python", 6) == 0) { + if (wcslen(mod_name) >= 10 && _wcsnicmp(mod_name + 8, L"_d", 2) == 0) { + isDebug = true; + } + return true; + } + } + return false; +} + +extern "C" +{ + + /** + * The returned value signals the error that happened! + * + * Return codes: + * 0 = all OK. + * 1 = Py_IsInitialized not found + * 2 = Py_IsInitialized returned false + * 3 = Missing Python API + * 4 = Interpreter not initialized + * 5 = Python version unknown + * 6 = Connect timeout + **/ + int DoAttach(HMODULE module, bool isDebug, const char *command, bool showDebugInfo ) + { + auto isInit = (Py_IsInitialized*)GetProcAddress(module, "Py_IsInitialized"); + + if (isInit == nullptr) { + if(showDebugInfo){ + std::cout << "Py_IsInitialized not found. " << std::endl << std::flush; + } + return 1; + } + if (!isInit()) { + if(showDebugInfo){ + std::cout << "Py_IsInitialized returned false. " << std::endl << std::flush; + } + return 2; + } + + auto version = GetPythonVersion(module); + + // found initialized Python runtime, gather and check the APIs we need for a successful attach... + auto addPendingCall = (Py_AddPendingCall*)GetProcAddress(module, "Py_AddPendingCall"); + auto curPythonThread = (PyThreadState**)(void*)GetProcAddress(module, "_PyThreadState_Current"); + auto interpHead = (PyInterpreterState_Head*)GetProcAddress(module, "PyInterpreterState_Head"); + auto gilEnsure = (PyGILState_Ensure*)GetProcAddress(module, "PyGILState_Ensure"); + auto gilRelease = (PyGILState_Release*)GetProcAddress(module, "PyGILState_Release"); + auto threadHead = (PyInterpreterState_ThreadHead*)GetProcAddress(module, "PyInterpreterState_ThreadHead"); + auto initThreads = (PyEval_Lock*)GetProcAddress(module, "PyEval_InitThreads"); + auto acquireLock = (PyEval_Lock*)GetProcAddress(module, "PyEval_AcquireLock"); + auto releaseLock = (PyEval_Lock*)GetProcAddress(module, "PyEval_ReleaseLock"); + auto threadsInited = (PyEval_ThreadsInitialized*)GetProcAddress(module, "PyEval_ThreadsInitialized"); + auto threadNext = (PyThreadState_Next*)GetProcAddress(module, "PyThreadState_Next"); + auto threadSwap = (PyThreadState_Swap*)GetProcAddress(module, "PyThreadState_Swap"); + auto pyDictNew = (PyDict_New*)GetProcAddress(module, "PyDict_New"); + auto pyModuleNew = (PyModule_New*)GetProcAddress(module, "PyModule_New"); + auto pyModuleGetDict = (PyModule_GetDict*)GetProcAddress(module, "PyModule_GetDict"); + auto pyCompileString = (Py_CompileString*)GetProcAddress(module, "Py_CompileString"); + auto pyEvalCode = (PyEval_EvalCode*)GetProcAddress(module, "PyEval_EvalCode"); + auto getDictItem = (PyDict_GetItemString*)GetProcAddress(module, "PyDict_GetItemString"); + auto call = (PyObject_CallFunctionObjArgs*)GetProcAddress(module, "PyObject_CallFunctionObjArgs"); + auto getBuiltins = (PyEval_GetBuiltins*)GetProcAddress(module, "PyEval_GetBuiltins"); + auto dictSetItem = (PyDict_SetItemString*)GetProcAddress(module, "PyDict_SetItemString"); + PyInt_FromLong* intFromLong; + PyString_FromString* strFromString; + PyInt_FromSize_t* intFromSizeT; + if (version >= PythonVersion_30) { + intFromLong = (PyInt_FromLong*)GetProcAddress(module, "PyLong_FromLong"); + intFromSizeT = (PyInt_FromSize_t*)GetProcAddress(module, "PyLong_FromSize_t"); + if (version >= PythonVersion_33) { + strFromString = (PyString_FromString*)GetProcAddress(module, "PyUnicode_FromString"); + } else { + strFromString = (PyString_FromString*)GetProcAddress(module, "PyUnicodeUCS2_FromString"); + } + } else { + intFromLong = (PyInt_FromLong*)GetProcAddress(module, "PyInt_FromLong"); + strFromString = (PyString_FromString*)GetProcAddress(module, "PyString_FromString"); + intFromSizeT = (PyInt_FromSize_t*)GetProcAddress(module, "PyInt_FromSize_t"); + } + auto intervalCheck = (int*)GetProcAddress(module, "_Py_CheckInterval"); + auto errOccurred = (PyErr_Occurred*)GetProcAddress(module, "PyErr_Occurred"); + auto pyErrFetch = (PyErr_Fetch*)GetProcAddress(module, "PyErr_Fetch"); + auto pyErrRestore = (PyErr_Restore*)GetProcAddress(module, "PyErr_Restore"); + auto pyErrPrint = (PyErr_Print*)GetProcAddress(module, "PyErr_Print"); + auto pyImportMod = (PyImport_ImportModule*) GetProcAddress(module, "PyImport_ImportModule"); + auto pyGetAttr = (PyObject_GetAttrString*)GetProcAddress(module, "PyObject_GetAttrString"); + auto pySetAttr = (PyObject_SetAttrString*)GetProcAddress(module, "PyObject_SetAttrString"); + auto pyNone = (PyObject*)GetProcAddress(module, "_Py_NoneStruct"); + auto getSwitchInterval = (_PyEval_GetSwitchInterval*)GetProcAddress(module, "_PyEval_GetSwitchInterval"); + auto setSwitchInterval = (_PyEval_SetSwitchInterval*)GetProcAddress(module, "_PyEval_SetSwitchInterval"); + auto boolFromLong = (PyBool_FromLong*)GetProcAddress(module, "PyBool_FromLong"); + auto getThreadTls = (PyThread_get_key_value*)GetProcAddress(module, "PyThread_get_key_value"); + auto setThreadTls = (PyThread_set_key_value*)GetProcAddress(module, "PyThread_set_key_value"); + auto delThreadTls = (PyThread_delete_key_value*)GetProcAddress(module, "PyThread_delete_key_value"); + auto pyGilStateEnsure = (PyGILState_EnsureFunc*)GetProcAddress(module, "PyGILState_Ensure"); + auto pyGilStateRelease = (PyGILState_ReleaseFunc*)GetProcAddress(module, "PyGILState_Release"); + auto PyCFrame_Type = (PyTypeObject*)GetProcAddress(module, "PyCFrame_Type"); + auto pyRun_SimpleString = (PyRun_SimpleString*)GetProcAddress(module, "PyRun_SimpleString"); + + if (addPendingCall == nullptr || curPythonThread == nullptr || interpHead == nullptr || gilEnsure == nullptr || gilRelease == nullptr || threadHead == nullptr || + initThreads == nullptr || releaseLock == nullptr || threadsInited == nullptr || threadNext == nullptr || threadSwap == nullptr || + pyDictNew == nullptr || pyCompileString == nullptr || pyEvalCode == nullptr || getDictItem == nullptr || call == nullptr || + getBuiltins == nullptr || dictSetItem == nullptr || intFromLong == nullptr || pyErrRestore == nullptr || pyErrFetch == nullptr || + errOccurred == nullptr || pyImportMod == nullptr || pyGetAttr == nullptr || pyNone == nullptr || pySetAttr == nullptr || boolFromLong == nullptr || + getThreadTls == nullptr || setThreadTls == nullptr || delThreadTls == nullptr || releaseLock == nullptr || + pyGilStateEnsure == nullptr || pyGilStateRelease == nullptr || pyRun_SimpleString == nullptr) { + // we're missing some APIs, we cannot attach. + if(showDebugInfo){ + std::cout << "Error, missing Python API!! " << std::endl << std::flush; + } + return 3; + } + + auto head = interpHead(); + if (head == nullptr) { + // this interpreter is loaded but not initialized. + if(showDebugInfo){ + std::cout << "Interpreter not initialized! " << std::endl << std::flush; + } + return 4; + } + + bool threadSafeAddPendingCall = false; + + // check that we're a supported version + if (version == PythonVersion_Unknown) { + if(showDebugInfo){ + std::cout << "Python version unknown! " << std::endl << std::flush; + } + return 5; + } else if (version >= PythonVersion_27 && version != PythonVersion_30) { + threadSafeAddPendingCall = true; + } + + + + + + + if (!threadsInited()) { + int saveIntervalCheck; + unsigned long saveLongIntervalCheck; + if (intervalCheck != nullptr) { + // not available on 3.2 + saveIntervalCheck = *intervalCheck; + *intervalCheck = -1; // lower the interval check so pending calls are processed faster + } else if (getSwitchInterval != nullptr && setSwitchInterval != nullptr) { + saveLongIntervalCheck = getSwitchInterval(); + setSwitchInterval(0); + } + + // + // Multiple thread support has not been initialized in the interpreter. We need multi threading support + // to block any actively running threads and setup the debugger attach state. + // + // We need to initialize multiple threading support but we need to do so safely. One option is to call + // Py_AddPendingCall and have our callback then initialize multi threading. This is completely safe on 2.7 + // and up. Unfortunately that doesn't work if we're not actively running code on the main thread (blocked on a lock + // or reading input). It's also not thread safe pre-2.7 so we need to make sure it's safe to call on down-level + // interpreters. + // + // Another option is to make sure no code is running - if there is no active thread then we can safely call + // PyEval_InitThreads and we're in business. But to know this is safe we need to first suspend all the other + // threads in the process and then inspect if any code is running. + // + // Finally if code is running after we've suspended the threads then we can go ahead and do Py_AddPendingCall + // on down-level interpreters as long as we're sure no one else is making a call to Py_AddPendingCall at the same + // time. + // + // Therefore our strategy becomes: Make the Py_AddPendingCall on interpreters where it's thread safe. Then suspend + // all threads - if a threads IP is in Py_AddPendingCall resume and try again. Once we've got all of the threads + // stopped and not in Py_AddPendingCall (which calls no functions its self, you can see this and it's size in the + // debugger) then see if we have a current thread. If not go ahead and initialize multiple threading (it's now safe, + // no Python code is running). Otherwise add the pending call and repeat. If at any point during this process + // threading becomes initialized (due to our pending call or the Python code creating a new thread) then we're done + // and we just resume all of the presently suspended threads. + + ThreadMap suspendedThreads; + + g_initedEvent = CreateEvent(NULL, TRUE, FALSE, NULL); + HandleHolder holder(g_initedEvent); + + bool addedPendingCall = false; + if (addPendingCall != nullptr && threadSafeAddPendingCall) { + // we're on a thread safe Python version, go ahead and pend our call to initialize threading. + addPendingCall(&AttachCallback, initThreads); + addedPendingCall = true; + } + + #define TICKS_DIFF(prev, cur) ((cur) >= (prev)) ? ((cur)-(prev)) : ((0xFFFFFFFF-(prev))+(cur)) + const DWORD ticksPerSecond = 1000; + + DWORD startTickCount = GetTickCount(); + do { + SuspendThreads(suspendedThreads, addPendingCall, threadsInited); + + if (!threadsInited()) { + if (*curPythonThread == nullptr) { + // no threads are currently running, it is safe to initialize multi threading. + PyGILState_STATE gilState; + if (version >= PythonVersion_34) { + // in 3.4 due to http://bugs.python.org/issue20891, + // we need to create our thread state manually + // before we can call PyGILState_Ensure() before we + // can call PyEval_InitThreads(). + + // Don't require this function unless we need it. + auto threadNew = (PyThreadState_NewFunc*)GetProcAddress(module, "PyThreadState_New"); + if (threadNew != nullptr) { + threadNew(head); + } + } + + if (version >= PythonVersion_32) { + // in 3.2 due to the new GIL and later we can't call Py_InitThreads + // without a thread being initialized. + // So we use PyGilState_Ensure here to first + // initialize the current thread, and then we use + // Py_InitThreads to bring up multi-threading. + // Some context here: http://bugs.python.org/issue11329 + // http://pytools.codeplex.com/workitem/834 + gilState = pyGilStateEnsure(); + } + initThreads(); + + if (version >= PythonVersion_32) { + // we will release the GIL here + pyGilStateRelease(gilState); + } else { + releaseLock(); + } + } else if (!addedPendingCall) { + // someone holds the GIL but no one is actively adding any pending calls. We can pend our call + // and initialize threads. + addPendingCall(&AttachCallback, initThreads); + addedPendingCall = true; + } + } + ResumeThreads(suspendedThreads); + } while (!threadsInited() && + (TICKS_DIFF(startTickCount, GetTickCount())) < (ticksPerSecond * 20) && + !addedPendingCall); + + if (!threadsInited()) { + if (addedPendingCall) { + // we've added our call to initialize multi-threading, we can now wait + // until Python code actually starts running. + if(showDebugInfo){ + std::cout << "Waiting for threads to be initialized! " << std::endl << std::flush; + } + + ::WaitForSingleObject(g_initedEvent, INFINITE); + } else { + if(showDebugInfo){ + std::cout << "Connect timeout! " << std::endl << std::flush; + } + return 6; + } + } else { + if(showDebugInfo){ + std::cout << "Threads initialized! " << std::endl << std::flush; + } + } + + if (intervalCheck != nullptr) { + *intervalCheck = saveIntervalCheck; + } else if (setSwitchInterval != nullptr) { + setSwitchInterval(saveLongIntervalCheck); + } + } else { + if(showDebugInfo){ + std::cout << "Threads already initialized! " << std::endl << std::flush; + } + } + + if (g_heap != nullptr) { + HeapDestroy(g_heap); + g_heap = nullptr; + } + + + GilHolder gilLock(gilEnsure, gilRelease); // acquire and hold the GIL until done... + + pyRun_SimpleString(command); + return 0; + + } + + + + + int SetSysTraceFunc(HMODULE module, bool isDebug, bool showDebugInfo) + { + + if(showDebugInfo){ + std::cout << "SetSysTraceFunc started. " << std::endl << std::flush; + } + auto isInit = (Py_IsInitialized*)GetProcAddress(module, "Py_IsInitialized"); + + if (isInit == nullptr) { + if(showDebugInfo){ + std::cout << "Py_IsInitialized not found. " << std::endl << std::flush; + } + return 1; + } + if (!isInit()) { + if(showDebugInfo){ + std::cout << "Py_IsInitialized returned false. " << std::endl << std::flush; + } + return 2; + } + + auto version = GetPythonVersion(module); + + // found initialized Python runtime, gather and check the APIs we need for a successful attach... + auto addPendingCall = (Py_AddPendingCall*)GetProcAddress(module, "Py_AddPendingCall"); + auto curPythonThread = (PyThreadState**)(void*)GetProcAddress(module, "_PyThreadState_Current"); + auto interpHead = (PyInterpreterState_Head*)GetProcAddress(module, "PyInterpreterState_Head"); + auto gilEnsure = (PyGILState_Ensure*)GetProcAddress(module, "PyGILState_Ensure"); + auto gilRelease = (PyGILState_Release*)GetProcAddress(module, "PyGILState_Release"); + auto threadHead = (PyInterpreterState_ThreadHead*)GetProcAddress(module, "PyInterpreterState_ThreadHead"); + auto initThreads = (PyEval_Lock*)GetProcAddress(module, "PyEval_InitThreads"); + auto acquireLock = (PyEval_Lock*)GetProcAddress(module, "PyEval_AcquireLock"); + auto releaseLock = (PyEval_Lock*)GetProcAddress(module, "PyEval_ReleaseLock"); + auto threadsInited = (PyEval_ThreadsInitialized*)GetProcAddress(module, "PyEval_ThreadsInitialized"); + auto threadNext = (PyThreadState_Next*)GetProcAddress(module, "PyThreadState_Next"); + auto threadSwap = (PyThreadState_Swap*)GetProcAddress(module, "PyThreadState_Swap"); + auto pyDictNew = (PyDict_New*)GetProcAddress(module, "PyDict_New"); + auto pyModuleNew = (PyModule_New*)GetProcAddress(module, "PyModule_New"); + auto pyModuleGetDict = (PyModule_GetDict*)GetProcAddress(module, "PyModule_GetDict"); + auto pyCompileString = (Py_CompileString*)GetProcAddress(module, "Py_CompileString"); + auto pyEvalCode = (PyEval_EvalCode*)GetProcAddress(module, "PyEval_EvalCode"); + auto getDictItem = (PyDict_GetItemString*)GetProcAddress(module, "PyDict_GetItemString"); + auto call = (PyObject_CallFunctionObjArgs*)GetProcAddress(module, "PyObject_CallFunctionObjArgs"); + auto getBuiltins = (PyEval_GetBuiltins*)GetProcAddress(module, "PyEval_GetBuiltins"); + auto dictSetItem = (PyDict_SetItemString*)GetProcAddress(module, "PyDict_SetItemString"); + PyInt_FromLong* intFromLong; + PyString_FromString* strFromString; + PyInt_FromSize_t* intFromSizeT; + if (version >= PythonVersion_30) { + intFromLong = (PyInt_FromLong*)GetProcAddress(module, "PyLong_FromLong"); + intFromSizeT = (PyInt_FromSize_t*)GetProcAddress(module, "PyLong_FromSize_t"); + if (version >= PythonVersion_33) { + strFromString = (PyString_FromString*)GetProcAddress(module, "PyUnicode_FromString"); + } else { + strFromString = (PyString_FromString*)GetProcAddress(module, "PyUnicodeUCS2_FromString"); + } + } else { + intFromLong = (PyInt_FromLong*)GetProcAddress(module, "PyInt_FromLong"); + strFromString = (PyString_FromString*)GetProcAddress(module, "PyString_FromString"); + intFromSizeT = (PyInt_FromSize_t*)GetProcAddress(module, "PyInt_FromSize_t"); + } + auto intervalCheck = (int*)GetProcAddress(module, "_Py_CheckInterval"); + auto errOccurred = (PyErr_Occurred*)GetProcAddress(module, "PyErr_Occurred"); + auto pyErrFetch = (PyErr_Fetch*)GetProcAddress(module, "PyErr_Fetch"); + auto pyErrRestore = (PyErr_Restore*)GetProcAddress(module, "PyErr_Restore"); + auto pyErrPrint = (PyErr_Print*)GetProcAddress(module, "PyErr_Print"); + auto pyImportMod = (PyImport_ImportModule*) GetProcAddress(module, "PyImport_ImportModule"); + auto pyGetAttr = (PyObject_GetAttrString*)GetProcAddress(module, "PyObject_GetAttrString"); + auto pySetAttr = (PyObject_SetAttrString*)GetProcAddress(module, "PyObject_SetAttrString"); + auto pyHasAttr = (PyObject_HasAttrString*)GetProcAddress(module, "PyObject_HasAttrString"); + auto pyNone = (PyObject*)GetProcAddress(module, "_Py_NoneStruct"); + auto getSwitchInterval = (_PyEval_GetSwitchInterval*)GetProcAddress(module, "_PyEval_GetSwitchInterval"); + auto setSwitchInterval = (_PyEval_SetSwitchInterval*)GetProcAddress(module, "_PyEval_SetSwitchInterval"); + auto boolFromLong = (PyBool_FromLong*)GetProcAddress(module, "PyBool_FromLong"); + auto getThreadTls = (PyThread_get_key_value*)GetProcAddress(module, "PyThread_get_key_value"); + auto setThreadTls = (PyThread_set_key_value*)GetProcAddress(module, "PyThread_set_key_value"); + auto delThreadTls = (PyThread_delete_key_value*)GetProcAddress(module, "PyThread_delete_key_value"); + auto pyGilStateEnsure = (PyGILState_EnsureFunc*)GetProcAddress(module, "PyGILState_Ensure"); + auto pyGilStateRelease = (PyGILState_ReleaseFunc*)GetProcAddress(module, "PyGILState_Release"); + auto PyCFrame_Type = (PyTypeObject*)GetProcAddress(module, "PyCFrame_Type"); + auto pyRun_SimpleString = (PyRun_SimpleString*)GetProcAddress(module, "PyRun_SimpleString"); + + if (addPendingCall == nullptr || curPythonThread == nullptr || interpHead == nullptr || gilEnsure == nullptr || gilRelease == nullptr || threadHead == nullptr || + initThreads == nullptr || releaseLock == nullptr || threadsInited == nullptr || threadNext == nullptr || threadSwap == nullptr || + pyDictNew == nullptr || pyCompileString == nullptr || pyEvalCode == nullptr || getDictItem == nullptr || call == nullptr || + getBuiltins == nullptr || dictSetItem == nullptr || intFromLong == nullptr || pyErrRestore == nullptr || pyErrFetch == nullptr || + errOccurred == nullptr || pyImportMod == nullptr || pyGetAttr == nullptr || pyNone == nullptr || pySetAttr == nullptr || boolFromLong == nullptr || + getThreadTls == nullptr || setThreadTls == nullptr || delThreadTls == nullptr || releaseLock == nullptr || + pyGilStateEnsure == nullptr || pyGilStateRelease == nullptr || pyRun_SimpleString == nullptr) { + // we're missing some APIs, we cannot attach. + if(showDebugInfo){ + std::cout << "Error, missing Python API!! " << std::endl << std::flush; + } + return 3; + } + + auto head = interpHead(); + if (head == nullptr) { + // this interpreter is loaded but not initialized. + if(showDebugInfo){ + std::cout << "Interpreter not initialized! " << std::endl << std::flush; + } + return 4; + } + + GilHolder gilLock(gilEnsure, gilRelease); // acquire and hold the GIL until done... + + auto pyTrue = boolFromLong(1); + auto pyFalse = boolFromLong(0); + + + auto pydevdTracingMod = PyObjectHolder(isDebug, pyImportMod("pydevd_tracing")); + if (*pydevdTracingMod == nullptr) { + if(showDebugInfo){ + std::cout << "pydevd_tracing module null! " << std::endl << std::flush; + } + return 7; + } + + if(!pyHasAttr(pydevdTracingMod.ToPython(), "_original_settrace")){ + if(showDebugInfo){ + std::cout << "pydevd_tracing module has no _original_settrace! " << std::endl << std::flush; + } + return 8; + } + + auto settrace = PyObjectHolder(isDebug, pyGetAttr(pydevdTracingMod.ToPython(), "_original_settrace")); + if (*settrace == nullptr) { + if(showDebugInfo){ + std::cout << "pydevd_tracing._original_settrace null! " << std::endl << std::flush; + } + return 9; + } + + auto pydevdMod = PyObjectHolder(isDebug, pyImportMod("pydevd")); + if (*pydevdMod == nullptr) { + if(showDebugInfo){ + std::cout << "pydevd module null! " << std::endl << std::flush; + } + return 10; + } + + auto getGlobalDebugger = PyObjectHolder(isDebug, pyGetAttr(pydevdMod.ToPython(), "GetGlobalDebugger")); + if (*getGlobalDebugger == nullptr) { + if(showDebugInfo){ + std::cout << "pydevd.GetGlobalDebugger null! " << std::endl << std::flush; + } + return 11; + } + + auto globalDbg = PyObjectHolder(isDebug, call(getGlobalDebugger.ToPython(), NULL)); + if (*globalDbg == nullptr) { + if(showDebugInfo){ + std::cout << "pydevd.GetGlobalDebugger() returned null! " << std::endl << std::flush; + } + return 12; + } + + if(!pyHasAttr(globalDbg.ToPython(), "trace_dispatch")){ + if(showDebugInfo){ + std::cout << "pydevd.GetGlobalDebugger() has no attribute trace_dispatch! " << std::endl << std::flush; + } + return 13; + } + + auto traceFunc = PyObjectHolder(isDebug, pyGetAttr(globalDbg.ToPython(), "trace_dispatch")); + if (*traceFunc == nullptr) { + if(showDebugInfo){ + std::cout << "pydevd.GetGlobalDebugger().trace_dispatch returned null! " << std::endl << std::flush; + } + return 14; + } + + + + // we need to walk the thread list each time after we've initialized a thread so that we are always + // dealing w/ a valid thread list (threads can exit when we run code and therefore the current thread + // could be corrupt). We also don't care about newly created threads as our start_new_thread wrapper + // will handle those. So we collect the initial set of threads first here so that we don't keep iterating + // if the program is spawning large numbers of threads. + unordered_set initialThreads; + for (auto curThread = threadHead(head); curThread != nullptr; curThread = threadNext(curThread)) { + initialThreads.insert(curThread); + } + + int retVal = 0; + unordered_set seenThreads; + { + // find what index is holding onto the thread state... + auto curPyThread = *curPythonThread; + int threadStateIndex = -1; + for (int i = 0; i < 100000; i++) { + void* value = getThreadTls(i); + if (value == curPyThread) { + threadStateIndex = i; + break; + } + } + + bool foundThread; + int processedThreads = 0; + do { + foundThread = false; + for (auto curThread = threadHead(head); curThread != nullptr; curThread = threadNext(curThread)) { + if (initialThreads.find(curThread) == initialThreads.end() || + seenThreads.insert(curThread).second == false) { + continue; + } + foundThread = true; + processedThreads++; + + long threadId = GetPythonThreadId(version, curThread); + // skip this thread - it doesn't really have any Python code on it... + if (threadId != GetCurrentThreadId()) { + // create new debugger Thread object on our injected thread + auto pyThreadId = PyObjectHolder(isDebug, intFromLong(threadId)); + PyFrameObject* frame; + // update all of the frames so they have our trace func + if (PyThreadState_25_27::IsFor(version)) { + frame = ((PyThreadState_25_27*)curThread)->frame; + } else if (PyThreadState_30_33::IsFor(version)) { + frame = ((PyThreadState_30_33*)curThread)->frame; + } else if (PyThreadState_34_36::IsFor(version)) { + frame = ((PyThreadState_34_36*)curThread)->frame; + }else{ + if(showDebugInfo){ + std::cout << "Python version not handled! " << version << std::endl << std::flush; + } + retVal = 15; + break; + } + + // switch to our new thread so we can call sys.settrace on it... + // all of the work here needs to be minimal - in particular we shouldn't + // ever evaluate user defined code as we could end up switching to this + // thread on the main thread and corrupting state. + auto prevThreadState = getThreadTls(threadStateIndex); + delThreadTls(threadStateIndex); + setThreadTls(threadStateIndex, curThread); + auto prevThread = threadSwap(curThread); + + // save and restore the error in case something funky happens... + auto errOccured = errOccurred(); + PyObject *type, *value, *traceback; + if (errOccured) { + pyErrFetch(&type, &value, &traceback); + } + + if(showDebugInfo){ + std::cout << "setting trace for thread: " << threadId << std::endl << std::flush; + } + + DecRef(call(settrace.ToPython(), traceFunc.ToPython(), NULL), isDebug); + + if (errOccured) { + pyErrRestore(type, value, traceback); + } + + // update all of the frames so they have our trace func + auto curFrame = (PyFrameObject*)GetPyObjectPointerNoDebugInfo(isDebug, frame); + while (curFrame != nullptr) { + // Special case for CFrame objects + // Stackless CFrame does not have a trace function + // This will just prevent a crash on attach. + if (((PyObject*)curFrame)->ob_type != PyCFrame_Type) { + DecRef(curFrame->f_trace, isDebug); + IncRef(*traceFunc); + curFrame->f_trace = traceFunc.ToPython(); + } + curFrame = (PyFrameObject*)GetPyObjectPointerNoDebugInfo(isDebug, curFrame->f_back); + } + + delThreadTls(threadStateIndex); + setThreadTls(threadStateIndex, prevThread); + threadSwap(prevThread); + } + break; + } + } while (foundThread); + } + + + + return retVal; + + } + + + + /** + * Return codes: + * + * -2 = could not allocate memory + * -3 = could not allocate memory to enumerate processes + * + * 0 = all OK. + * 1 = Py_IsInitialized not found + * 2 = Py_IsInitialized returned false + * 3 = Missing Python API + * 4 = Interpreter not initialized + * 5 = Python version unknown + * 6 = Connect timeout + * + * result[0] should have the same result from the return function + * result[0] is also used to set the startup info (on whether to show debug info + * and if the debugger tracing should be set). + **/ + DECLDIR int AttachAndRunPythonCode(const char *command, int *result ) + { + + int SHOW_DEBUG_INFO = 1; + int CONNECT_DEBUGGER = 2; + + bool showDebugInfo = (result[0] & SHOW_DEBUG_INFO) != 0; + + if(showDebugInfo){ + std::cout << "AttachAndRunPythonCode started (showing debug info). " << std::endl << std::flush; + } + + bool connectDebuggerTracing = (result[0] & CONNECT_DEBUGGER) != 0; + if(showDebugInfo){ + std::cout << "connectDebuggerTracing: " << connectDebuggerTracing << std::endl << std::flush; + } + + HANDLE hProcess = GetCurrentProcess(); + DWORD modSize = sizeof(HMODULE) * 1024; + HMODULE* hMods = (HMODULE*)_malloca(modSize); + if (hMods == nullptr) { + result[0] = -2; + return result[0]; + } + + DWORD modsNeeded; + while (!EnumProcessModules(hProcess, hMods, modSize, &modsNeeded)) { + // try again w/ more space... + _freea(hMods); + hMods = (HMODULE*)_malloca(modsNeeded); + if (hMods == nullptr) { + result[0] = -3; + return result[0]; + } + modSize = modsNeeded; + } + int attached = -1; + { + bool pythonFound = false; + for (size_t i = 0; i < modsNeeded / sizeof(HMODULE); i++) { + bool isDebug; + if (IsPythonModule(hMods[i], isDebug)) { + pythonFound = true; + int temp = DoAttach(hMods[i], isDebug, command, showDebugInfo); + if (temp == 0) { + // we've successfully attached the debugger + attached = 0; + if(connectDebuggerTracing){ + if(showDebugInfo){ + std::cout << "SetSysTraceFunc " << std::endl << std::flush; + } + attached = SetSysTraceFunc(hMods[i], isDebug, showDebugInfo); + } + break; + }else{ + if(temp > attached){ + //I.e.: the higher the value the more significant it is. + attached = temp; + } + } + } + } + } + + if(showDebugInfo){ + std::cout << "Result: " << attached << std::endl << std::flush; + } + result[0] = attached; + return result[0]; + } + + + + + + /** + * + * + * + * + * + **/ + DECLDIR int AttachDebuggerTracing(bool showDebugInfo) + { + HANDLE hProcess = GetCurrentProcess(); + DWORD modSize = sizeof(HMODULE) * 1024; + HMODULE* hMods = (HMODULE*)_malloca(modSize); + if (hMods == nullptr) { + if(showDebugInfo){ + std::cout << "hmods not allocated! " << std::endl << std::flush; + } + return -2; + } + + DWORD modsNeeded; + while (!EnumProcessModules(hProcess, hMods, modSize, &modsNeeded)) { + // try again w/ more space... + _freea(hMods); + hMods = (HMODULE*)_malloca(modsNeeded); + if (hMods == nullptr) { + if(showDebugInfo){ + std::cout << "hmods not allocated (2)! " << std::endl << std::flush; + } + return -3; + } + modSize = modsNeeded; + } + int attached = -1; + { + bool pythonFound = false; + for (size_t i = 0; i < modsNeeded / sizeof(HMODULE); i++) { + bool isDebug; + if (IsPythonModule(hMods[i], isDebug)) { + pythonFound = true; + if(showDebugInfo){ + std::cout << "setting sys trace! " << std::endl << std::flush; + } + int temp = SetSysTraceFunc(hMods[i], isDebug, showDebugInfo); + if (temp == 0) { + // we've successfully attached the debugger + attached = 0; + break; + }else{ + if(temp > attached){ + //I.e.: the higher the value the more significant it is. + attached = temp; + } + } + } + } + } + + + return attached; + } + +} \ No newline at end of file diff --git a/ptvsd/pydevd/pydevd_attach_to_process/dll/attach.h b/ptvsd/pydevd/pydevd_attach_to_process/dll/attach.h new file mode 100644 index 00000000..24ff0df9 --- /dev/null +++ b/ptvsd/pydevd/pydevd_attach_to_process/dll/attach.h @@ -0,0 +1,46 @@ +/* **************************************************************************** + * + * Copyright (c) Brainwy software Ltda. + * + * This source code is subject to terms and conditions of the Apache License, Version 2.0. A + * copy of the license can be found in the License.html file at the root of this distribution. If + * you cannot locate the Apache License, Version 2.0, please send an email to + * vspython@microsoft.com. By using this source code in any fashion, you are agreeing to be bound + * by the terms of the Apache License, Version 2.0. + * + * You must not remove this notice, or any other, from this software. + * + * ***************************************************************************/ + +#ifndef _ATTACH_DLL_H_ +#define _ATTACH_DLL_H_ + +#if defined DLL_EXPORT +#define DECLDIR __declspec(dllexport) +#else +#define DECLDIR __declspec(dllimport) +#endif + +extern "C" +{ + DECLDIR int AttachAndRunPythonCode(const char *command, int *result ); + + + /* + Could be used with ctypes (note that the threading should be initialized, so, + doing it in a thread as below is recommended): + + def check(): + + import ctypes + lib = ctypes.cdll.LoadLibrary(r'C:\...\attach_x86.dll') + print 'result', lib.AttachDebuggerTracing(0) + + t = threading.Thread(target=check) + t.start() + t.join() + */ + DECLDIR int AttachDebuggerTracing(bool showDebugInfo); +} + +#endif \ No newline at end of file diff --git a/ptvsd/pydevd/pydevd_attach_to_process/dll/compile_dll.bat b/ptvsd/pydevd/pydevd_attach_to_process/dll/compile_dll.bat new file mode 100644 index 00000000..27737f31 --- /dev/null +++ b/ptvsd/pydevd/pydevd_attach_to_process/dll/compile_dll.bat @@ -0,0 +1,9 @@ +call "C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\vcvarsall.bat" x86 +cl -DUNICODE -D_UNICODE /EHsc /LD attach.cpp /link /out:attach_x86.dll +copy attach_x86.dll ..\attach_x86.dll /Y + + + +call "C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\vcvarsall.bat" x86_amd64 +cl -DUNICODE -D_UNICODE /EHsc /LD attach.cpp /link /out:attach_amd64.dll +copy attach_amd64.dll ..\attach_amd64.dll /Y \ No newline at end of file diff --git a/ptvsd/pydevd/pydevd_attach_to_process/dll/python.h b/ptvsd/pydevd/pydevd_attach_to_process/dll/python.h new file mode 100644 index 00000000..b4cd3f86 --- /dev/null +++ b/ptvsd/pydevd/pydevd_attach_to_process/dll/python.h @@ -0,0 +1,611 @@ +/* **************************************************************************** + * + * Copyright (c) Microsoft Corporation. + * + * This source code is subject to terms and conditions of the Apache License, Version 2.0. A + * copy of the license can be found in the License.html file at the root of this distribution. If + * you cannot locate the Apache License, Version 2.0, please send an email to + * vspython@microsoft.com. By using this source code in any fashion, you are agreeing to be bound + * by the terms of the Apache License, Version 2.0. + * + * You must not remove this notice, or any other, from this software. + * + * ***************************************************************************/ + +#ifndef __PYTHON_H__ +#define __PYTHON_H__ + +// must be kept in sync with PythonLanguageVersion.cs +enum PythonVersion { + PythonVersion_Unknown, + PythonVersion_25 = 0x0205, + PythonVersion_26 = 0x0206, + PythonVersion_27 = 0x0207, + PythonVersion_30 = 0x0300, + PythonVersion_31 = 0x0301, + PythonVersion_32 = 0x0302, + PythonVersion_33 = 0x0303, + PythonVersion_34 = 0x0304, + PythonVersion_35 = 0x0305, + PythonVersion_36 = 0x0306 +}; + + +// defines limited header of Python API for compatible access across a number of Pythons. + +class PyTypeObject; +class PyThreadState; + +#define PyObject_HEAD \ + size_t ob_refcnt; \ + PyTypeObject *ob_type; + +#define PyObject_VAR_HEAD \ + PyObject_HEAD \ + size_t ob_size; /* Number of items in variable part */ + +class PyObject { +public: + PyObject_HEAD +}; + +class PyVarObject : public PyObject { +public: + size_t ob_size; /* Number of items in variable part */ +}; + +// 2.4 - 2.7 compatible +class PyCodeObject25_27 : public PyObject { +public: + int co_argcount; /* #arguments, except *args */ + int co_nlocals; /* #local variables */ + int co_stacksize; /* #entries needed for evaluation stack */ + int co_flags; /* CO_..., see below */ + PyObject *co_code; /* instruction opcodes */ + PyObject *co_consts; /* list (constants used) */ + PyObject *co_names; /* list of strings (names used) */ + PyObject *co_varnames; /* tuple of strings (local variable names) */ + PyObject *co_freevars; /* tuple of strings (free variable names) */ + PyObject *co_cellvars; /* tuple of strings (cell variable names) */ + /* The rest doesn't count for hash/cmp */ + PyObject *co_filename; /* string (where it was loaded from) */ + PyObject *co_name; /* string (name, for reference) */ + int co_firstlineno; /* first source line number */ + PyObject *co_lnotab; /* string (encoding addr<->lineno mapping) */ + + static bool IsFor(int majorVersion, int minorVersion) { + return majorVersion == 2 && (minorVersion >= 5 && minorVersion <= 7); + } + + static bool IsFor(PythonVersion version) { + return version >= PythonVersion_25 && version <= PythonVersion_27; + } +}; + +// 3.0-3.2 +class PyCodeObject30_32 : public PyObject { +public: + int co_argcount; /* #arguments, except *args */ + int co_kwonlyargcount; /* #keyword only arguments */ + int co_nlocals; /* #local variables */ + int co_stacksize; /* #entries needed for evaluation stack */ + int co_flags; /* CO_..., see below */ + PyObject *co_code; /* instruction opcodes */ + PyObject *co_consts; /* list (constants used) */ + PyObject *co_names; /* list of strings (names used) */ + PyObject *co_varnames; /* tuple of strings (local variable names) */ + PyObject *co_freevars; /* tuple of strings (free variable names) */ + PyObject *co_cellvars; /* tuple of strings (cell variable names) */ + /* The rest doesn't count for hash or comparisons */ + PyObject *co_filename; /* unicode (where it was loaded from) */ + PyObject *co_name; /* unicode (name, for reference) */ + int co_firstlineno; /* first source line number */ + PyObject *co_lnotab; /* string (encoding addr<->lineno mapping) */ + void *co_zombieframe; /* for optimization only (see frameobject.c) */ + + static bool IsFor(int majorVersion, int minorVersion) { + return majorVersion == 3 && (minorVersion >= 0 && minorVersion <= 2); + } + + static bool IsFor(PythonVersion version) { + return version >= PythonVersion_30 && version <= PythonVersion_32; + } +}; + +// 3.3-3.5 +class PyCodeObject33_35 : public PyObject { +public: + int co_argcount; /* #arguments, except *args */ + int co_kwonlyargcount; /* #keyword only arguments */ + int co_nlocals; /* #local variables */ + int co_stacksize; /* #entries needed for evaluation stack */ + int co_flags; /* CO_..., see below */ + PyObject *co_code; /* instruction opcodes */ + PyObject *co_consts; /* list (constants used) */ + PyObject *co_names; /* list of strings (names used) */ + PyObject *co_varnames; /* tuple of strings (local variable names) */ + PyObject *co_freevars; /* tuple of strings (free variable names) */ + PyObject *co_cellvars; /* tuple of strings (cell variable names) */ + /* The rest doesn't count for hash or comparisons */ + unsigned char *co_cell2arg; /* Maps cell vars which are arguments. */ + PyObject *co_filename; /* unicode (where it was loaded from) */ + PyObject *co_name; /* unicode (name, for reference) */ + int co_firstlineno; /* first source line number */ + PyObject *co_lnotab; /* string (encoding addr<->lineno mapping) */ + void *co_zombieframe; /* for optimization only (see frameobject.c) */ + + static bool IsFor(int majorVersion, int minorVersion) { + return majorVersion == 3 && (minorVersion >= 3 && minorVersion <= 5); + } + + static bool IsFor(PythonVersion version) { + return version >= PythonVersion_33 && version <= PythonVersion_35; + } +}; + +// 3.6 +class PyCodeObject36 : public PyObject { +public: + int co_argcount; /* #arguments, except *args */ + int co_kwonlyargcount; /* #keyword only arguments */ + int co_nlocals; /* #local variables */ + int co_stacksize; /* #entries needed for evaluation stack */ + int co_flags; /* CO_..., see below */ + int co_firstlineno; /* first source line number */ + PyObject *co_code; /* instruction opcodes */ + PyObject *co_consts; /* list (constants used) */ + PyObject *co_names; /* list of strings (names used) */ + PyObject *co_varnames; /* tuple of strings (local variable names) */ + PyObject *co_freevars; /* tuple of strings (free variable names) */ + PyObject *co_cellvars; /* tuple of strings (cell variable names) */ + /* The rest doesn't count for hash or comparisons */ + unsigned char *co_cell2arg; /* Maps cell vars which are arguments. */ + PyObject *co_filename; /* unicode (where it was loaded from) */ + PyObject *co_name; /* unicode (name, for reference) */ + PyObject *co_lnotab; /* string (encoding addr<->lineno mapping) */ + void *co_zombieframe; /* for optimization only (see frameobject.c) */ + + static bool IsFor(int majorVersion, int minorVersion) { + return majorVersion == 3 && minorVersion >= 6; + } + + static bool IsFor(PythonVersion version) { + return version >= PythonVersion_36; + } +}; + +// 2.5 - 3.6 +class PyFunctionObject : public PyObject { +public: + PyObject *func_code; /* A code object */ +}; + +// 2.5 - 2.7 compatible +class PyStringObject : public PyVarObject { +public: + long ob_shash; + int ob_sstate; + char ob_sval[1]; + + /* Invariants: + * ob_sval contains space for 'ob_size+1' elements. + * ob_sval[ob_size] == 0. + * ob_shash is the hash of the string or -1 if not computed yet. + * ob_sstate != 0 iff the string object is in stringobject.c's + * 'interned' dictionary; in this case the two references + * from 'interned' to this object are *not counted* in ob_refcnt. + */ +}; + +// 2.4 - 3.2 compatible +typedef struct { + PyObject_HEAD + size_t length; /* Length of raw Unicode data in buffer */ + wchar_t *str; /* Raw Unicode buffer */ + long hash; /* Hash value; -1 if not set */ +} PyUnicodeObject; + +// 2.4 - 3.6 compatible +class PyFrameObject : public PyVarObject { +public: + PyFrameObject *f_back; /* previous frame, or NULL */ + PyObject *f_code; /* code segment */ + PyObject *f_builtins; /* builtin symbol table (PyDictObject) */ + PyObject *f_globals; /* global symbol table (PyDictObject) */ + PyObject *f_locals; /* local symbol table (any mapping) */ + PyObject **f_valuestack; /* points after the last local */ + /* Next free slot in f_valuestack. Frame creation sets to f_valuestack. + Frame evaluation usually NULLs it, but a frame that yields sets it + to the current stack top. */ + PyObject **f_stacktop; + PyObject *f_trace; /* Trace function */ + PyObject *f_exc_type, *f_exc_value, *f_exc_traceback; +}; + +#define CO_MAXBLOCKS 20 +typedef struct { + int b_type; /* what kind of block this is */ + int b_handler; /* where to jump to find handler */ + int b_level; /* value stack level to pop to */ +} PyTryBlock; + +class PyFrameObject25_33 : public PyFrameObject { +public: + PyThreadState* f_tstate; + int f_lasti; /* Last instruction if called */ + /* As of 2.3 f_lineno is only valid when tracing is active (i.e. when + f_trace is set) -- at other times use PyCode_Addr2Line instead. */ + int f_lineno; /* Current line number */ + int f_iblock; /* index in f_blockstack */ + PyTryBlock f_blockstack[CO_MAXBLOCKS]; /* for try and loop blocks */ + PyObject *f_localsplus[1]; /* locals+stack, dynamically sized */ + + static bool IsFor(int majorVersion, int minorVersion) { + return majorVersion == 2 && (minorVersion >= 5 && minorVersion <= 7) || + majorVersion == 3 && (minorVersion >= 0 && minorVersion <= 3); + } +}; + +class PyFrameObject34_36 : public PyFrameObject { +public: + /* Borrowed reference to a generator, or NULL */ + PyObject *f_gen; + + int f_lasti; /* Last instruction if called */ + /* As of 2.3 f_lineno is only valid when tracing is active (i.e. when + f_trace is set) -- at other times use PyCode_Addr2Line instead. */ + int f_lineno; /* Current line number */ + int f_iblock; /* index in f_blockstack */ + char f_executing; /* whether the frame is still executing */ + PyTryBlock f_blockstack[CO_MAXBLOCKS]; /* for try and loop blocks */ + PyObject *f_localsplus[1]; /* locals+stack, dynamically sized */ + + static bool IsFor(int majorVersion, int minorVersion) { + return majorVersion == 3 && minorVersion >= 4 && minorVersion <= 6; + } +}; + + +typedef void (*destructor)(PyObject *); + +// 2.4 - 3.6 +class PyMethodDef { +public: + char *ml_name; /* The name of the built-in function/method */ +}; + + +// +// 2.4 - 3.5, 2.4 has different compat in 64-bit but we don't support any of the released 64-bit platforms (which includes only IA-64) +// While these are compatible there are fields only available on later versions. +class PyTypeObject : public PyVarObject { +public: + const char *tp_name; /* For printing, in format "." */ + size_t tp_basicsize, tp_itemsize; /* For allocation */ + + /* Methods to implement standard operations */ + + destructor tp_dealloc; + void* tp_print; + void* tp_getattr; + void* tp_setattr; + union { + void* tp_compare; /* 2.4 - 3.4 */ + void* tp_as_async; /* 3.5 - 3.6 */ + }; + void* tp_repr; + + /* Method suites for standard classes */ + + void *tp_as_number; + void*tp_as_sequence; + void*tp_as_mapping; + + /* More standard operations (here for binary compatibility) */ + + void* tp_hash; + void* tp_call; + void* tp_str; + void* tp_getattro; + void* tp_setattro; + + /* Functions to access object as input/output buffer */ + void*tp_as_buffer; + + /* Flags to define presence of optional/expanded features */ + long tp_flags; + + const char *tp_doc; /* Documentation string */ + + /* Assigned meaning in release 2.0 */ + /* call function for all accessible objects */ + void* tp_traverse; + + /* delete references to contained objects */ + void* tp_clear; + + /* Assigned meaning in release 2.1 */ + /* rich comparisons */ + void* tp_richcompare; + + /* weak reference enabler */ + size_t tp_weaklistoffset; + + /* Added in release 2.2 */ + /* Iterators */ + void* tp_iter; + void* tp_iternext; + + /* Attribute descriptor and subclassing stuff */ + PyMethodDef *tp_methods; + struct PyMemberDef *tp_members; + struct PyGetSetDef *tp_getset; + struct _typeobject *tp_base; + PyObject *tp_dict; + void* tp_descr_get; + void* tp_descr_set; + size_t tp_dictoffset; + void* tp_init; + void* tp_alloc; + void* tp_new; + void* tp_free; /* Low-level free-memory routine */ + void* tp_is_gc; /* For PyObject_IS_GC */ + PyObject *tp_bases; + PyObject *tp_mro; /* method resolution order */ + PyObject *tp_cache; + PyObject *tp_subclasses; + PyObject *tp_weaklist; + void* tp_del; + + /* Type attribute cache version tag. Added in version 2.6 */ + unsigned int tp_version_tag; +}; + +// 2.4 - 3.6 +class PyTupleObject : public PyVarObject { +public: + PyObject *ob_item[1]; + + /* ob_item contains space for 'ob_size' elements. + * Items must normally not be NULL, except during construction when + * the tuple is not yet visible outside the function that builds it. + */ +}; + +// 2.4 - 3.6 +class PyCFunctionObject : public PyObject { +public: + PyMethodDef *m_ml; /* Description of the C function to call */ + PyObject *m_self; /* Passed as 'self' arg to the C func, can be NULL */ + PyObject *m_module; /* The __module__ attribute, can be anything */ +}; + +typedef int (*Py_tracefunc)(PyObject *, PyFrameObject *, int, PyObject *); + +#define PyTrace_CALL 0 +#define PyTrace_EXCEPTION 1 +#define PyTrace_LINE 2 +#define PyTrace_RETURN 3 +#define PyTrace_C_CALL 4 +#define PyTrace_C_EXCEPTION 5 +#define PyTrace_C_RETURN 6 + +class PyInterpreterState { +}; + +class PyThreadState { }; + +class PyThreadState_25_27 : public PyThreadState { +public: + /* See Python/ceval.c for comments explaining most fields */ + + PyThreadState *next; + PyInterpreterState *interp; + + PyFrameObject *frame; + int recursion_depth; + /* 'tracing' keeps track of the execution depth when tracing/profiling. + This is to prevent the actual trace/profile code from being recorded in + the trace/profile. */ + int tracing; + int use_tracing; + + Py_tracefunc c_profilefunc; + Py_tracefunc c_tracefunc; + PyObject *c_profileobj; + PyObject *c_traceobj; + + PyObject *curexc_type; + PyObject *curexc_value; + PyObject *curexc_traceback; + + PyObject *exc_type; + PyObject *exc_value; + PyObject *exc_traceback; + + PyObject *dict; /* Stores per-thread state */ + + /* tick_counter is incremented whenever the check_interval ticker + * reaches zero. The purpose is to give a useful measure of the number + * of interpreted bytecode instructions in a given thread. This + * extremely lightweight statistic collector may be of interest to + * profilers (like psyco.jit()), although nothing in the core uses it. + */ + int tick_counter; + + int gilstate_counter; + + PyObject *async_exc; /* Asynchronous exception to raise */ + long thread_id; /* Thread id where this tstate was created */ + + /* XXX signal handlers should also be here */ + static bool IsFor(int majorVersion, int minorVersion) { + return majorVersion == 2 && (minorVersion >= 5 && minorVersion <= 7); + } + + static bool IsFor(PythonVersion version) { + return version >= PythonVersion_25 && version <= PythonVersion_27; + } +}; + +class PyThreadState_30_33 : public PyThreadState { +public: + PyThreadState *next; + PyInterpreterState *interp; + + PyFrameObject *frame; + int recursion_depth; + char overflowed; /* The stack has overflowed. Allow 50 more calls + to handle the runtime error. */ + char recursion_critical; /* The current calls must not cause + a stack overflow. */ + /* 'tracing' keeps track of the execution depth when tracing/profiling. + This is to prevent the actual trace/profile code from being recorded in + the trace/profile. */ + int tracing; + int use_tracing; + + Py_tracefunc c_profilefunc; + Py_tracefunc c_tracefunc; + PyObject *c_profileobj; + PyObject *c_traceobj; + + PyObject *curexc_type; + PyObject *curexc_value; + PyObject *curexc_traceback; + + PyObject *exc_type; + PyObject *exc_value; + PyObject *exc_traceback; + + PyObject *dict; /* Stores per-thread state */ + + /* tick_counter is incremented whenever the check_interval ticker + * reaches zero. The purpose is to give a useful measure of the number + * of interpreted bytecode instructions in a given thread. This + * extremely lightweight statistic collector may be of interest to + * profilers (like psyco.jit()), although nothing in the core uses it. + */ + int tick_counter; + + int gilstate_counter; + + PyObject *async_exc; /* Asynchronous exception to raise */ + long thread_id; /* Thread id where this tstate was created */ + + /* XXX signal handlers should also be here */ + static bool IsFor(int majorVersion, int minorVersion) { + return majorVersion == 3 && (minorVersion >= 0 && minorVersion <= 3); + } + + static bool IsFor(PythonVersion version) { + return version >= PythonVersion_30 && version <= PythonVersion_33; + } +}; + +class PyThreadState_34_36 : public PyThreadState { +public: + PyThreadState *prev; + PyThreadState *next; + PyInterpreterState *interp; + + PyFrameObject *frame; + int recursion_depth; + char overflowed; /* The stack has overflowed. Allow 50 more calls + to handle the runtime error. */ + char recursion_critical; /* The current calls must not cause + a stack overflow. */ + /* 'tracing' keeps track of the execution depth when tracing/profiling. + This is to prevent the actual trace/profile code from being recorded in + the trace/profile. */ + int tracing; + int use_tracing; + + Py_tracefunc c_profilefunc; + Py_tracefunc c_tracefunc; + PyObject *c_profileobj; + PyObject *c_traceobj; + + PyObject *curexc_type; + PyObject *curexc_value; + PyObject *curexc_traceback; + + PyObject *exc_type; + PyObject *exc_value; + PyObject *exc_traceback; + + PyObject *dict; /* Stores per-thread state */ + + int gilstate_counter; + + PyObject *async_exc; /* Asynchronous exception to raise */ + long thread_id; /* Thread id where this tstate was created */ + + /* XXX signal handlers should also be here */ + static bool IsFor(int majorVersion, int minorVersion) { + return majorVersion == 3 && minorVersion >= 4 && minorVersion <= 6; + } + + static bool IsFor(PythonVersion version) { + return version >= PythonVersion_34 && version <= PythonVersion_36; + } +}; + +class PyIntObject : public PyObject { +public: + long ob_ival; +}; + +class Py3kLongObject : public PyVarObject { +public: + DWORD ob_digit[1]; +}; + +class PyOldStyleClassObject : public PyObject { +public: + PyObject *cl_bases; /* A tuple of class objects */ + PyObject *cl_dict; /* A dictionary */ + PyObject *cl_name; /* A string */ + /* The following three are functions or NULL */ + PyObject *cl_getattr; + PyObject *cl_setattr; + PyObject *cl_delattr; +}; + +class PyInstanceObject : public PyObject { +public: + PyOldStyleClassObject *in_class; /* The class object */ + PyObject *in_dict; /* A dictionary */ + PyObject *in_weakreflist; /* List of weak references */ +}; + +typedef const char* (GetVersionFunc) (); + +static PythonVersion GetPythonVersion(HMODULE hMod) { + auto versionFunc = (GetVersionFunc*)GetProcAddress(hMod, "Py_GetVersion"); + if(versionFunc != nullptr) { + auto version = versionFunc(); + if(version != nullptr && strlen(version) >= 3 && version[1] == '.') { + if(version[0] == '2') { + switch(version[2]) { + case '5': return PythonVersion_25; + case '6': return PythonVersion_26; + case '7': return PythonVersion_27; + } + } else if(version[0] == '3') { + switch(version[2]) { + case '0': return PythonVersion_30; + case '1': return PythonVersion_31; + case '2': return PythonVersion_32; + case '3': return PythonVersion_33; + case '4': return PythonVersion_34; + case '5': return PythonVersion_35; + case '6': return PythonVersion_36; + } + } + } + } + return PythonVersion_Unknown; +} + +#endif \ No newline at end of file diff --git a/ptvsd/pydevd/pydevd_attach_to_process/dll/stdafx.cpp b/ptvsd/pydevd/pydevd_attach_to_process/dll/stdafx.cpp new file mode 100644 index 00000000..4b80b546 --- /dev/null +++ b/ptvsd/pydevd/pydevd_attach_to_process/dll/stdafx.cpp @@ -0,0 +1,22 @@ +/* **************************************************************************** + * + * Copyright (c) Microsoft Corporation. + * + * This source code is subject to terms and conditions of the Apache License, Version 2.0. A + * copy of the license can be found in the License.html file at the root of this distribution. If + * you cannot locate the Apache License, Version 2.0, please send an email to + * vspython@microsoft.com. By using this source code in any fashion, you are agreeing to be bound + * by the terms of the Apache License, Version 2.0. + * + * You must not remove this notice, or any other, from this software. + * + * ***************************************************************************/ + +// stdafx.cpp : source file that includes just the standard includes +// PyDebugAttach.pch will be the pre-compiled header +// stdafx.obj will contain the pre-compiled type information + +#include "stdafx.h" + +// TODO: reference any additional headers you need in STDAFX.H +// and not in this file diff --git a/ptvsd/pydevd/pydevd_attach_to_process/dll/stdafx.h b/ptvsd/pydevd/pydevd_attach_to_process/dll/stdafx.h new file mode 100644 index 00000000..8b75af5d --- /dev/null +++ b/ptvsd/pydevd/pydevd_attach_to_process/dll/stdafx.h @@ -0,0 +1,36 @@ +/* **************************************************************************** + * + * Copyright (c) Microsoft Corporation. + * + * This source code is subject to terms and conditions of the Apache License, Version 2.0. A + * copy of the license can be found in the License.html file at the root of this distribution. If + * you cannot locate the Apache License, Version 2.0, please send an email to + * vspython@microsoft.com. By using this source code in any fashion, you are agreeing to be bound + * by the terms of the Apache License, Version 2.0. + * + * You must not remove this notice, or any other, from this software. + * + * ***************************************************************************/ + +// stdafx.h : include file for standard system include files, +// or project specific include files that are used frequently, but +// are changed infrequently +// + +#pragma once + +#include "targetver.h" + +#include +#include +#include +#include +#include + +#define WIN32_LEAN_AND_MEAN +#include +#include +#include +#include +#include +#include diff --git a/ptvsd/pydevd/pydevd_attach_to_process/dll/targetver.h b/ptvsd/pydevd/pydevd_attach_to_process/dll/targetver.h new file mode 100644 index 00000000..acff5416 --- /dev/null +++ b/ptvsd/pydevd/pydevd_attach_to_process/dll/targetver.h @@ -0,0 +1,22 @@ +/* **************************************************************************** + * + * Copyright (c) Microsoft Corporation. + * + * This source code is subject to terms and conditions of the Apache License, Version 2.0. A + * copy of the license can be found in the License.html file at the root of this distribution. If + * you cannot locate the Apache License, Version 2.0, please send an email to + * vspython@microsoft.com. By using this source code in any fashion, you are agreeing to be bound + * by the terms of the Apache License, Version 2.0. + * + * You must not remove this notice, or any other, from this software. + * + * ***************************************************************************/ + +#pragma once + +// Including SDKDDKVer.h defines the highest available Windows platform. + +// If you wish to build your application for a previous Windows platform, include WinSDKVer.h and +// set the _WIN32_WINNT macro to the platform you wish to support before including SDKDDKVer.h. + +#include diff --git a/ptvsd/pydevd/pydevd_attach_to_process/linux/Makefile b/ptvsd/pydevd/pydevd_attach_to_process/linux/Makefile new file mode 100644 index 00000000..aedfe16e --- /dev/null +++ b/ptvsd/pydevd/pydevd_attach_to_process/linux/Makefile @@ -0,0 +1,64 @@ +# Defaults which can be overridden. +OS = macosx +ARCH_X86 = x86 +ARCH_X86_64 = x86_64 + +CC=g++ +LD=libtool +CPPFLAGS = -I. +CFLAGS +=-fPIC -D_REENTRANT -nostartfiles + +ARCH_FLAG_X86 = -arch i386 +ARCH_FLAG_X86_64 = -arch x86_64 + +INSTALL_DIR_X86 = ../os/$(OS)/$(ARCH_X86) +INSTALL_DIR_X86_64 = ../os/$(OS)/$(ARCH_X86_64) +INSTALL_DIR_LINUX_X86 = ../os/$(LINUX)/$(ARCH_X86) +INSTALL_DIR_LINUX_X86_64 = ../os/$(LINUX)/$(ARCH_X86_64) + +ATTACH = attach_mac.so +ATTACH_NAME_FULL_X86 = $(INSTALL_DIR_X86)/attach_x86.dylib +ATTACH_NAME_FULL_X86_64 = $(INSTALL_DIR_X86_64)/attach_x86_64.dylib + +OBJS_ATTACH_X86 = attach_linux_$(ARCH_X86).o +OBJS_ATTACH_X86_64 = attach_linux_$(ARCH_X86_64).o + +OBJS_X86 = $(OBJS_ATTACH_X86) +OBJS_X86_64 = $(OBJS_ATTACH_X86_64) + +all: x86 x86_64 + +x86: $(ATTACH_NAME_FULL_X86) + +x86_64: $(ATTACH_NAME_FULL_X86_64) + +linux_x86: $(ATTACH_NAME_FULL_LINUX_X86) +linux_x86_64: $(ATTACH_NAME_FULL_LINUX_X86_64) + +rebuild: clean all + +$(ATTACH_NAME_FULL_X86): $(OBJS_ATTACH_X86) + mkdir -p $(INSTALL_DIR_X86) + $(CC) -dynamiclib $(ARCH_FLAG_X86) -o $(ATTACH_NAME_FULL_X86) $(OBJS_ATTACH_X86) -lc + +$(ATTACH_NAME_FULL_X86_64): $(OBJS_ATTACH_X86_64) + mkdir -p $(INSTALL_DIR_X86_64) + $(CC) -dynamiclib $(ARCH_FLAG_X86_64) -o $(ATTACH_NAME_FULL_X86_64) $(OBJS_ATTACH_X86_64) -lc + +$(ATTACH_NAME_FULL_LINUX_X86): $(OBJS_ATTACH_X86) + mkdir -p $(INSTALL_DIR_LINUX_X86) + $(CC) -m32 -g -shared -Wl,-soname,$(ATTACH) $(LDFLAGS) -o $(ATTACH_NAME_FULL_LINUX_X86) $(OBJS_ATTACH_X86) + +$(ATTACH_NAME_FULL_LINUX_X86_64): $(OBJS_ATTACH_X86_64) + mkdir -p $(INSTALL_DIR_LINUX_X86_64) + $(CC) -g -shared -Wl,-soname,$(ATTACH) $(LDFLAGS) -o $(ATTACH_NAME_FULL_LINUX_X86_64) $(OBJS_ATTACH_X86_64) + +attach_linux_$(ARCH_X86).o: attach_linux.c + $(CC) $(CFLAGS) $(ARCH_FLAG_X86) $(CPPFLAGS) -c -o $@ attach_linux.c + +attach_linux_$(ARCH_X86_64).o: attach_linux.c + $(CC) $(CFLAGS) $(ARCH_FLAG_X86_64) $(CPPFLAGS) -c -o $@ attach_linux.c + +clean : + $(RM) $(OBJS_X86) $(ATTACH_NAME_FULL_X86) + $(RM) $(OBJS_X86_64) $(ATTACH_NAME_FULL_X86_64) diff --git a/ptvsd/pydevd/pydevd_attach_to_process/linux/attach_linux.c b/ptvsd/pydevd/pydevd_attach_to_process/linux/attach_linux.c new file mode 100644 index 00000000..3687ba56 --- /dev/null +++ b/ptvsd/pydevd/pydevd_attach_to_process/linux/attach_linux.c @@ -0,0 +1,284 @@ +// This is much simpler than the windows version because we're using gdb and +// we assume that gdb will call things in the correct thread already. + +//compile with: g++ -shared -o attach_linux.so -fPIC -nostartfiles attach_linux.c + +#include +#include +#include +#include +#include "python.h" +//#include used for usleep + +// Exported function: hello(): Just to print something and check that we've been +// able to connect. +extern "C" int hello(void); + +int hello() +{ + printf("Hello world!\n"); + + void *main_hndl = dlopen(NULL, 0x2); + + void *hndl = dlsym (main_hndl, "PyGILState_Ensure"); + if(hndl == NULL){ + printf("NULL\n"); + + }else{ + printf("Worked (found PyGILState_Ensure)!\n"); + } + + printf("%d", GetPythonVersion()); + + + return 2; +} + + +// This is the function which enables us to set the sys.settrace for all the threads +// which are already running. +// isDebug is pretty important! Must be true on python debug builds (python_d) +// If this value is passed wrongly the program will crash. +extern "C" int SetSysTraceFunc(bool showDebugInfo, bool isDebug); +extern "C" int DoAttach(bool isDebug, const char *command, bool showDebugInfo); + +// Internal function to keep on the tracing +int _PYDEVD_ExecWithGILSetSysStrace(bool showDebugInfo, bool isDebug); + +// Implementation details below +typedef int (*Py_IsInitialized) (); +typedef PyInterpreterState* (*PyInterpreterState_Head)(); +typedef enum { PyGILState_LOCKED, PyGILState_UNLOCKED } PyGILState_STATE; +typedef PyGILState_STATE(*PyGILState_Ensure)(); +typedef void (*PyGILState_Release)(PyGILState_STATE); +typedef PyObject* (*PyBool_FromLong)(long v); +typedef PyObject* (*PyImport_ImportModuleNoBlock) (const char *name); +typedef PyObject* (*PyObject_HasAttrString)(PyObject *o, const char *attr_name); +typedef PyObject* (*PyObject_GetAttrString)(PyObject *o, const char *attr_name); +typedef PyObject* (*PyObject_CallFunctionObjArgs)(PyObject *callable, ...); // call w/ varargs, last arg should be NULL +typedef int (*PyEval_ThreadsInitialized)(); +typedef unsigned long (*_PyEval_GetSwitchInterval)(void); +typedef void (*_PyEval_SetSwitchInterval)(unsigned long microseconds); +typedef int (*PyRun_SimpleString)(const char *command); + +// Helper so that we get a PyObject where we can access its fields (in debug or release). +PyObject* GetPyObjectPointerNoDebugInfo(bool isDebug, PyObject* object) { + if (object != NULL && isDebug) { + // debug builds have 2 extra pointers at the front that we don't care about + return (PyObject*)((size_t*)object + 2); + } + return object; +} + +// Helper so that we get a PyObject where we can access its fields (in debug or release). +PyTypeObject * GetPyObjectPointerNoDebugInfo2(bool isDebug, PyTypeObject * object) { + if (object != NULL && isDebug) { + // debug builds have 2 extra pointers at the front that we don't care about + return (PyTypeObject *)((size_t*)object + 2); + } + return object; +} + +// Helper which will decrement the reference count of an object and dealloc it if +// it's not there. + void DecRef(PyObject* object, bool isDebug) { + PyObject* noDebug = GetPyObjectPointerNoDebugInfo(isDebug, object); + + if (noDebug != NULL && --noDebug->ob_refcnt == 0) { + PyTypeObject *temp = GetPyObjectPointerNoDebugInfo2(isDebug, noDebug->ob_type); + temp->tp_dealloc(object); + } + } + +// Helper to increment the reference count to some object. +void IncRef(PyObject* object, bool isDebug) { + PyObject* noDebug = GetPyObjectPointerNoDebugInfo(isDebug, object); + + if (noDebug != NULL){ + noDebug->ob_refcnt++; + } +} + +class PyObjectHolder { +private: + PyObject* _object; + bool _isDebug; +public: + PyObjectHolder(bool isDebug, PyObject *object) { + _object = object; + _isDebug = isDebug; + }; + + PyObject* ToPython() { + return _object; + } + + ~PyObjectHolder() { + if(_object != NULL){ + DecRef(_object, _isDebug); + } + } +}; + + +# define CHECK_NULL(ptr, msg, returnVal) if(ptr == NULL){if(showDebugInfo){printf(msg);} return returnVal;} + +int DoAttach(bool isDebug, const char *command, bool showDebugInfo) +{ + Py_IsInitialized isInitFunc; + void *main_hndl = dlopen(NULL, 0x2); + *(void**)(&isInitFunc) = dlsym(main_hndl, "Py_IsInitialized"); + CHECK_NULL(isInitFunc, "Py_IsInitialized not found.\n", 1); + + if(!isInitFunc()){ + if(showDebugInfo){ + printf("Py_IsInitialized returned false.\n"); + } + return 2; + } + + PythonVersion version = GetPythonVersion(); + + PyInterpreterState_Head interpHeadFunc; + *(void**)(&interpHeadFunc) = dlsym(main_hndl, "PyInterpreterState_Head"); + CHECK_NULL(interpHeadFunc, "PyInterpreterState_Head not found.\n", 3); + + PyInterpreterState* head = interpHeadFunc(); + CHECK_NULL(head, "Interpreter not initialized.\n", 4); + + // Note: unlike windows where we have to do many things to enable threading + // to work to get the gil, here we'll be executing in an existing thread, + // so, it's mostly a matter of getting the GIL and running it and we shouldn't + // have any more problems. + + PyGILState_Ensure pyGilStateEnsureFunc; + *(void**)(&pyGilStateEnsureFunc) = dlsym(main_hndl, "PyGILState_Ensure"); + CHECK_NULL(pyGilStateEnsureFunc, "PyGILState_Ensure not found.\n", 5); + + PyGILState_Release pyGilStateReleaseFunc; + *(void**)(&pyGilStateReleaseFunc) = dlsym(main_hndl, "PyGILState_Release"); + CHECK_NULL(pyGilStateReleaseFunc, "PyGILState_Release not found.\n", 6); + + PyRun_SimpleString pyRun_SimpleString; + *(void**)(&pyRun_SimpleString) = dlsym(main_hndl, "PyRun_SimpleString"); + CHECK_NULL(pyRun_SimpleString, "PyRun_SimpleString not found.\n", 6); + + PyGILState_STATE pyGILState = pyGilStateEnsureFunc(); + pyRun_SimpleString(command); + //No matter what happens we have to release it. + pyGilStateReleaseFunc(pyGILState); +} + + +// All of the code below is the same as: +// sys.settrace(pydevd.GetGlobalDebugger().trace_dispatch) +// +// (with error checking) +int SetSysTraceFunc(bool showDebugInfo, bool isDebug) +{ + if(showDebugInfo){ + printf("SetSysTraceFunc started.\n"); + } + Py_IsInitialized isInitFunc; + void *main_hndl = dlopen(NULL, 0x2); + *(void**)(&isInitFunc) = dlsym(main_hndl, "Py_IsInitialized"); + CHECK_NULL(isInitFunc, "Py_IsInitialized not found.\n", 1); + + if(!isInitFunc()){ + if(showDebugInfo){ + printf("Py_IsInitialized returned false.\n"); + } + return 2; + } + + PythonVersion version = GetPythonVersion(); + + PyInterpreterState_Head interpHeadFunc; + *(void**)(&interpHeadFunc) = dlsym(main_hndl, "PyInterpreterState_Head"); + CHECK_NULL(interpHeadFunc, "PyInterpreterState_Head not found.\n", 3); + + PyInterpreterState* head = interpHeadFunc(); + CHECK_NULL(head, "Interpreter not initialized.\n", 4); + + PyGILState_Ensure pyGilStateEnsureFunc; + *(void**)(&pyGilStateEnsureFunc) = dlsym(main_hndl, "PyGILState_Ensure"); + CHECK_NULL(pyGilStateEnsureFunc, "PyGILState_Ensure not found.\n", 5); + + PyGILState_Release pyGilStateReleaseFunc; + *(void**)(&pyGilStateReleaseFunc) = dlsym(main_hndl, "PyGILState_Release"); + CHECK_NULL(pyGilStateReleaseFunc, "PyGILState_Release not found.\n", 6); + + PyGILState_STATE pyGILState = pyGilStateEnsureFunc(); + int ret = _PYDEVD_ExecWithGILSetSysStrace(showDebugInfo, isDebug); + //No matter what happens we have to release it. + pyGilStateReleaseFunc(pyGILState); + return ret; +} + + +int _PYDEVD_ExecWithGILSetSysStrace(bool showDebugInfo, bool isDebug){ + PyBool_FromLong boolFromLongFunc; + void *main_hndl = dlopen(NULL, 0x2); + + *(void**)(&boolFromLongFunc) = dlsym(main_hndl, "PyBool_FromLong"); + CHECK_NULL(boolFromLongFunc, "PyBool_FromLong not found.\n", 7); + + PyObject_HasAttrString pyHasAttrFunc; + *(void**)(&pyHasAttrFunc) = dlsym(main_hndl, "PyObject_HasAttrString"); + CHECK_NULL(pyHasAttrFunc, "PyObject_HasAttrString not found.\n", 7); + + //Important: we need a non-blocking import here: PyImport_ImportModule + //could end up crashing (this makes us work only from 2.6 onwards). + PyImport_ImportModuleNoBlock pyImportModFunc; + *(void**)(&pyImportModFunc) = dlsym(main_hndl, "PyImport_ImportModuleNoBlock"); + CHECK_NULL(pyImportModFunc, "PyImport_ImportModuleNoBlock not found.\n", 8); + + + PyObjectHolder pydevdTracingMod = PyObjectHolder(isDebug, pyImportModFunc("pydevd_tracing")); + CHECK_NULL(pydevdTracingMod.ToPython(), "pydevd_tracing module null.\n", 9); + + if(!pyHasAttrFunc(pydevdTracingMod.ToPython(), "_original_settrace")){ + if(showDebugInfo){ + printf("pydevd_tracing module has no _original_settrace!\n"); + } + return 8; + } + + + PyObject_GetAttrString pyGetAttr; + *(void**)(&pyGetAttr) = dlsym(main_hndl, "PyObject_GetAttrString"); + CHECK_NULL(pyGetAttr, "PyObject_GetAttrString not found.\n", 8); + + PyObjectHolder settrace = PyObjectHolder(isDebug, pyGetAttr(pydevdTracingMod.ToPython(), "_original_settrace")); + CHECK_NULL(settrace.ToPython(), "pydevd_tracing._original_settrace null!\n", 10); + + PyObjectHolder pydevdMod = PyObjectHolder(isDebug, pyImportModFunc("pydevd")); + CHECK_NULL(pydevdMod.ToPython(), "pydevd module null.\n", 10); + + PyObjectHolder getGlobalDebugger = PyObjectHolder(isDebug, pyGetAttr(pydevdMod.ToPython(), "GetGlobalDebugger")); + CHECK_NULL(getGlobalDebugger.ToPython(), "pydevd.GetGlobalDebugger null.\n", 11); + + PyObject_CallFunctionObjArgs call; + *(void**)(&call) = dlsym(main_hndl, "PyObject_CallFunctionObjArgs"); + CHECK_NULL(call, "PyObject_CallFunctionObjArgs not found.\n", 11); + + PyObjectHolder globalDbg = PyObjectHolder(isDebug, call(getGlobalDebugger.ToPython(), NULL)); + CHECK_NULL(globalDbg.ToPython(), "pydevd.GetGlobalDebugger() returned null.\n", 12); + + if(!pyHasAttrFunc(globalDbg.ToPython(), "trace_dispatch")){ + if(showDebugInfo){ + printf("pydevd.GetGlobalDebugger() has no attribute trace_dispatch!\n"); + } + return 13; + } + + PyObjectHolder traceFunc = PyObjectHolder(isDebug, pyGetAttr(globalDbg.ToPython(), "trace_dispatch")); + CHECK_NULL(traceFunc.ToPython(), "pydevd.GetGlobalDebugger().trace_dispatch returned null!\n", 14); + + DecRef(call(settrace.ToPython(), traceFunc.ToPython(), NULL), isDebug); + if(showDebugInfo){ + printf("sys.settrace(pydevd.GetGlobalDebugger().trace_dispatch) worked.\n"); + } + + return 0; +} diff --git a/ptvsd/pydevd/pydevd_attach_to_process/linux/compile_mac.sh b/ptvsd/pydevd/pydevd_attach_to_process/linux/compile_mac.sh new file mode 100755 index 00000000..635330d7 --- /dev/null +++ b/ptvsd/pydevd/pydevd_attach_to_process/linux/compile_mac.sh @@ -0,0 +1,8 @@ +g++ -fPIC -D_REENTRANT -arch x86_64 I. -c -o attach_linux_x86_64.o attach_linux.c +g++ -dynamiclib -arch x86_64 -o attach_x86_64.dylib attach_linux_x86_64.o -lc + + +g++ -fPIC -D_REENTRANT -arch i386 -I. -c -o attach_linux_x86.o attach_linux.c +g++ -dynamiclib -arch i386 -o attach_x86.dylib attach_linux_x86.o -lc + + diff --git a/ptvsd/pydevd/pydevd_attach_to_process/linux/compile_so.sh b/ptvsd/pydevd/pydevd_attach_to_process/linux/compile_so.sh new file mode 100755 index 00000000..1a043fb2 --- /dev/null +++ b/ptvsd/pydevd/pydevd_attach_to_process/linux/compile_so.sh @@ -0,0 +1,7 @@ +g++ -m64 -shared -o attach_linux_amd64.so -fPIC -nostartfiles attach_linux.c +mv attach_linux_amd64.so ../attach_linux_amd64.so + +echo Note: may need "sudo apt-get install libx32gcc-4.8-dev" and "sudo apt-get install libc6-dev-i386" and "sudo apt-get install g++-multilib" to compile 32 bits + +g++ -m32 -shared -o attach_linux_x86.so -fPIC -nostartfiles attach_linux.c +mv attach_linux_x86.so ../attach_linux_x86.so \ No newline at end of file diff --git a/ptvsd/pydevd/pydevd_attach_to_process/linux/gdb_threads_settrace.py b/ptvsd/pydevd/pydevd_attach_to_process/linux/gdb_threads_settrace.py new file mode 100644 index 00000000..48e3a7bc --- /dev/null +++ b/ptvsd/pydevd/pydevd_attach_to_process/linux/gdb_threads_settrace.py @@ -0,0 +1,16 @@ +# This file is meant to be run inside GDB as a command after +# the attach_linux.so dll has already been loaded to settrace for all threads. +if __name__ == '__main__': + #print('Startup GDB in Python!') + + try: + show_debug_info = 0 + is_debug = 0 + for t in list(gdb.selected_inferior().threads()): + t.switch() + if t.is_stopped(): + #print('Will settrace in: %s' % (t,)) + gdb.execute("call SetSysTraceFunc(%s, %s)" % ( + show_debug_info, is_debug)) + except: + import traceback;traceback.print_exc() diff --git a/ptvsd/pydevd/pydevd_attach_to_process/linux/lldb_prepare.py b/ptvsd/pydevd/pydevd_attach_to_process/linux/lldb_prepare.py new file mode 100644 index 00000000..8a220542 --- /dev/null +++ b/ptvsd/pydevd/pydevd_attach_to_process/linux/lldb_prepare.py @@ -0,0 +1,54 @@ +# This file is meant to be run inside lldb +# It registers command to load library and invoke attach function +# Also it marks process threads to to distinguish them from debugger +# threads later while settings trace in threads + +def load_lib_and_attach(debugger, command, result, internal_dict): + import shlex + args = shlex.split(command) + + dll = args[0] + is_debug = args[1] + python_code = args[2] + show_debug_info = args[3] + + import lldb + options = lldb.SBExpressionOptions() + options.SetFetchDynamicValue() + options.SetTryAllThreads(run_others=False) + options.SetTimeoutInMicroSeconds(timeout=10000000) + + print(dll) + target = debugger.GetSelectedTarget() + res = target.EvaluateExpression("(void*)dlopen(\"%s\", 2);" % ( + dll), options) + error = res.GetError() + if error: + print(error) + + print(python_code) + res = target.EvaluateExpression("(int)DoAttach(%s, \"%s\", %s);" % ( + is_debug, python_code.replace('"', "'"), show_debug_info), options) + error = res.GetError() + if error: + print(error) + +def __lldb_init_module(debugger, internal_dict): + import lldb + + debugger.HandleCommand('command script add -f lldb_prepare.load_lib_and_attach load_lib_and_attach') + + try: + target = debugger.GetSelectedTarget() + if target: + process = target.GetProcess() + if process: + for thread in process: + # print('Marking process thread %d'%thread.GetThreadID()) + internal_dict['_thread_%d' % thread.GetThreadID()] = True + # thread.Suspend() + except: + import traceback;traceback.print_exc() + + + diff --git a/ptvsd/pydevd/pydevd_attach_to_process/linux/lldb_threads_settrace.py b/ptvsd/pydevd/pydevd_attach_to_process/linux/lldb_threads_settrace.py new file mode 100644 index 00000000..e6ceb911 --- /dev/null +++ b/ptvsd/pydevd/pydevd_attach_to_process/linux/lldb_threads_settrace.py @@ -0,0 +1,52 @@ +# This file is meant to be run inside lldb as a command after +# the attach_linux.dylib dll has already been loaded to settrace for all threads. +def __lldb_init_module(debugger, internal_dict): + # Command Initialization code goes here + # print('Startup LLDB in Python!') + import lldb + + try: + show_debug_info = 1 + is_debug = 0 + + options = lldb.SBExpressionOptions() + options.SetFetchDynamicValue() + options.SetTryAllThreads(run_others=False) + options.SetTimeoutInMicroSeconds(timeout=10000000) + + target = debugger.GetSelectedTarget() + if target: + process = target.GetProcess() + if process: + for thread in process: + # Get the first frame + # print('Thread %s, suspended %s\n'%(thread, thread.IsStopped())) + + if internal_dict.get('_thread_%d' % thread.GetThreadID(), False): + process.SetSelectedThread(thread) + if not thread.IsStopped(): + # thread.Suspend() + error = process.Stop() + + frame = thread.GetSelectedFrame() + + if frame.GetFunctionName() == '__select': + # print('We are in __select') + # Step over select, otherwise evaluating expression there can terminate thread + thread.StepOver() + frame = thread.GetSelectedFrame() + + print('Will settrace in: %s' % (frame,)) + + for f in thread: + print(f) + + res = frame.EvaluateExpression("(int) SetSysTraceFunc(%s, %s)" % ( + show_debug_info, is_debug), options) + error = res.GetError() + if error: + print(error) + + thread.Resume() + except: + import traceback;traceback.print_exc() diff --git a/ptvsd/pydevd/pydevd_attach_to_process/linux/python.h b/ptvsd/pydevd/pydevd_attach_to_process/linux/python.h new file mode 100644 index 00000000..93bfe6e4 --- /dev/null +++ b/ptvsd/pydevd/pydevd_attach_to_process/linux/python.h @@ -0,0 +1,576 @@ +/* **************************************************************************** + * + * Copyright (c) Microsoft Corporation. + * + * This source code is subject to terms and conditions of the Apache License, Version 2.0. A + * copy of the license can be found in the License.html file at the root of this distribution. If + * you cannot locate the Apache License, Version 2.0, please send an email to + * vspython@microsoft.com. By using this source code in any fashion, you are agreeing to be bound + * by the terms of the Apache License, Version 2.0. + * + * You must not remove this notice, or any other, from this software. + * + * ***************************************************************************/ + +#ifndef __PYTHON_H__ +#define __PYTHON_H__ +#include + +// must be kept in sync with PythonLanguageVersion.cs +enum PythonVersion { + PythonVersion_Unknown, + PythonVersion_25 = 0x0205, + PythonVersion_26 = 0x0206, + PythonVersion_27 = 0x0207, + PythonVersion_30 = 0x0300, + PythonVersion_31 = 0x0301, + PythonVersion_32 = 0x0302, + PythonVersion_33 = 0x0303, + PythonVersion_34 = 0x0304 +}; + + +// defines limited header of Python API for compatible access across a number of Pythons. + +class PyTypeObject; +class PyThreadState; + +#define PyObject_HEAD \ + size_t ob_refcnt; \ + PyTypeObject *ob_type; + +#define PyObject_VAR_HEAD \ + PyObject_HEAD \ + size_t ob_size; /* Number of items in variable part */ + +class PyObject { +public: + PyObject_HEAD +}; + +class PyVarObject : public PyObject { +public: + size_t ob_size; /* Number of items in variable part */ +}; + +// 2.4 - 2.7 compatible +class PyCodeObject25_27 : public PyObject { +public: + int co_argcount; /* #arguments, except *args */ + int co_nlocals; /* #local variables */ + int co_stacksize; /* #entries needed for evaluation stack */ + int co_flags; /* CO_..., see below */ + PyObject *co_code; /* instruction opcodes */ + PyObject *co_consts; /* list (constants used) */ + PyObject *co_names; /* list of strings (names used) */ + PyObject *co_varnames; /* tuple of strings (local variable names) */ + PyObject *co_freevars; /* tuple of strings (free variable names) */ + PyObject *co_cellvars; /* tuple of strings (cell variable names) */ + /* The rest doesn't count for hash/cmp */ + PyObject *co_filename; /* string (where it was loaded from) */ + PyObject *co_name; /* string (name, for reference) */ + int co_firstlineno; /* first source line number */ + PyObject *co_lnotab; /* string (encoding addr<->lineno mapping) */ + + static bool IsFor(int majorVersion, int minorVersion) { + return majorVersion == 2 && (minorVersion >= 5 && minorVersion <= 7); + } + + static bool IsFor(PythonVersion version) { + return version >= PythonVersion_25 && version <= PythonVersion_27; + } +}; + +// 3.0-3.2 +class PyCodeObject30_32 : public PyObject { +public: + int co_argcount; /* #arguments, except *args */ + int co_kwonlyargcount; /* #keyword only arguments */ + int co_nlocals; /* #local variables */ + int co_stacksize; /* #entries needed for evaluation stack */ + int co_flags; /* CO_..., see below */ + PyObject *co_code; /* instruction opcodes */ + PyObject *co_consts; /* list (constants used) */ + PyObject *co_names; /* list of strings (names used) */ + PyObject *co_varnames; /* tuple of strings (local variable names) */ + PyObject *co_freevars; /* tuple of strings (free variable names) */ + PyObject *co_cellvars; /* tuple of strings (cell variable names) */ + /* The rest doesn't count for hash or comparisons */ + PyObject *co_filename; /* unicode (where it was loaded from) */ + PyObject *co_name; /* unicode (name, for reference) */ + int co_firstlineno; /* first source line number */ + PyObject *co_lnotab; /* string (encoding addr<->lineno mapping) */ + void *co_zombieframe; /* for optimization only (see frameobject.c) */ + + static bool IsFor(int majorVersion, int minorVersion) { + return majorVersion == 3 && (minorVersion >= 0 && minorVersion <= 2); + } + + static bool IsFor(PythonVersion version) { + return version >= PythonVersion_30 && version <= PythonVersion_32; + } +}; + +// 3.3-3.4 +class PyCodeObject33_34 : public PyObject { +public: + int co_argcount; /* #arguments, except *args */ + int co_kwonlyargcount; /* #keyword only arguments */ + int co_nlocals; /* #local variables */ + int co_stacksize; /* #entries needed for evaluation stack */ + int co_flags; /* CO_..., see below */ + PyObject *co_code; /* instruction opcodes */ + PyObject *co_consts; /* list (constants used) */ + PyObject *co_names; /* list of strings (names used) */ + PyObject *co_varnames; /* tuple of strings (local variable names) */ + PyObject *co_freevars; /* tuple of strings (free variable names) */ + PyObject *co_cellvars; /* tuple of strings (cell variable names) */ + /* The rest doesn't count for hash or comparisons */ + unsigned char *co_cell2arg; /* Maps cell vars which are arguments. */ + PyObject *co_filename; /* unicode (where it was loaded from) */ + PyObject *co_name; /* unicode (name, for reference) */ + int co_firstlineno; /* first source line number */ + PyObject *co_lnotab; /* string (encoding addr<->lineno mapping) */ + void *co_zombieframe; /* for optimization only (see frameobject.c) */ + + static bool IsFor(int majorVersion, int minorVersion) { + return majorVersion == 3 && (minorVersion >= 3 && minorVersion <= 4); + } + + static bool IsFor(PythonVersion version) { + return version >= PythonVersion_33 && version <= PythonVersion_34; + } +}; + +// 2.5 - 3.1 +class PyFunctionObject : public PyObject { +public: + PyObject *func_code; /* A code object */ +}; + +// 2.5 - 2.7 compatible +class PyStringObject : public PyVarObject { +public: + long ob_shash; + int ob_sstate; + char ob_sval[1]; + + /* Invariants: + * ob_sval contains space for 'ob_size+1' elements. + * ob_sval[ob_size] == 0. + * ob_shash is the hash of the string or -1 if not computed yet. + * ob_sstate != 0 iff the string object is in stringobject.c's + * 'interned' dictionary; in this case the two references + * from 'interned' to this object are *not counted* in ob_refcnt. + */ +}; + +// 2.4 - 3.2 compatible +typedef struct { + PyObject_HEAD + size_t length; /* Length of raw Unicode data in buffer */ + wchar_t *str; /* Raw Unicode buffer */ + long hash; /* Hash value; -1 if not set */ +} PyUnicodeObject; + +// 2.4 - 3.4 compatible +class PyFrameObject : public PyVarObject { +public: + PyFrameObject *f_back; /* previous frame, or NULL */ + PyObject *f_code; /* code segment */ + PyObject *f_builtins; /* builtin symbol table (PyDictObject) */ + PyObject *f_globals; /* global symbol table (PyDictObject) */ + PyObject *f_locals; /* local symbol table (any mapping) */ + PyObject **f_valuestack; /* points after the last local */ + /* Next free slot in f_valuestack. Frame creation sets to f_valuestack. + Frame evaluation usually NULLs it, but a frame that yields sets it + to the current stack top. */ + PyObject **f_stacktop; + PyObject *f_trace; /* Trace function */ + PyObject *f_exc_type, *f_exc_value, *f_exc_traceback; +}; + +#define CO_MAXBLOCKS 20 +typedef struct { + int b_type; /* what kind of block this is */ + int b_handler; /* where to jump to find handler */ + int b_level; /* value stack level to pop to */ +} PyTryBlock; + +class PyFrameObject25_33 : public PyFrameObject { +public: + PyThreadState* f_tstate; + int f_lasti; /* Last instruction if called */ + /* As of 2.3 f_lineno is only valid when tracing is active (i.e. when + f_trace is set) -- at other times use PyCode_Addr2Line instead. */ + int f_lineno; /* Current line number */ + int f_iblock; /* index in f_blockstack */ + PyTryBlock f_blockstack[CO_MAXBLOCKS]; /* for try and loop blocks */ + PyObject *f_localsplus[1]; /* locals+stack, dynamically sized */ + + static bool IsFor(int majorVersion, int minorVersion) { + return majorVersion == 2 && (minorVersion >= 5 && minorVersion <= 7) || + majorVersion == 3 && (minorVersion >= 0 && minorVersion <= 3); + } +}; + +class PyFrameObject34 : public PyFrameObject { +public: + /* Borrowed reference to a generator, or NULL */ + PyObject *f_gen; + + int f_lasti; /* Last instruction if called */ + /* As of 2.3 f_lineno is only valid when tracing is active (i.e. when + f_trace is set) -- at other times use PyCode_Addr2Line instead. */ + int f_lineno; /* Current line number */ + int f_iblock; /* index in f_blockstack */ + char f_executing; /* whether the frame is still executing */ + PyTryBlock f_blockstack[CO_MAXBLOCKS]; /* for try and loop blocks */ + PyObject *f_localsplus[1]; /* locals+stack, dynamically sized */ + + static bool IsFor(int majorVersion, int minorVersion) { + return majorVersion == 3 && minorVersion == 4; + } +}; + + +typedef void (*destructor)(PyObject *); + +// 2.4 - 3.4 +class PyMethodDef { +public: + char *ml_name; /* The name of the built-in function/method */ +}; + + +// +// 2.4 - 3.4, 2.4 has different compat in 64-bit but we don't support any of the released 64-bit platforms (which includes only IA-64) +// While these are compatible there are fields only available on later versions. +class PyTypeObject : public PyVarObject { +public: + const char *tp_name; /* For printing, in format "." */ + size_t tp_basicsize, tp_itemsize; /* For allocation */ + + /* Methods to implement standard operations */ + + destructor tp_dealloc; + void* tp_print; + void* tp_getattr; + void* tp_setattr; + void* tp_compare; + void* tp_repr; + + /* Method suites for standard classes */ + + void *tp_as_number; + void*tp_as_sequence; + void*tp_as_mapping; + + /* More standard operations (here for binary compatibility) */ + + void* tp_hash; + void* tp_call; + void* tp_str; + void* tp_getattro; + void* tp_setattro; + + /* Functions to access object as input/output buffer */ + void*tp_as_buffer; + + /* Flags to define presence of optional/expanded features */ + long tp_flags; + + const char *tp_doc; /* Documentation string */ + + /* Assigned meaning in release 2.0 */ + /* call function for all accessible objects */ + void* tp_traverse; + + /* delete references to contained objects */ + void* tp_clear; + + /* Assigned meaning in release 2.1 */ + /* rich comparisons */ + void* tp_richcompare; + + /* weak reference enabler */ + size_t tp_weaklistoffset; + + /* Added in release 2.2 */ + /* Iterators */ + void* tp_iter; + void* tp_iternext; + + /* Attribute descriptor and subclassing stuff */ + PyMethodDef *tp_methods; + struct PyMemberDef *tp_members; + struct PyGetSetDef *tp_getset; + struct _typeobject *tp_base; + PyObject *tp_dict; + void* tp_descr_get; + void* tp_descr_set; + size_t tp_dictoffset; + void* tp_init; + void* tp_alloc; + void* tp_new; + void* tp_free; /* Low-level free-memory routine */ + void* tp_is_gc; /* For PyObject_IS_GC */ + PyObject *tp_bases; + PyObject *tp_mro; /* method resolution order */ + PyObject *tp_cache; + PyObject *tp_subclasses; + PyObject *tp_weaklist; + void* tp_del; + + /* Type attribute cache version tag. Added in version 2.6 */ + unsigned int tp_version_tag; +}; + +// 2.4 - 3.4 +class PyTupleObject : public PyVarObject { +public: + PyObject *ob_item[1]; + + /* ob_item contains space for 'ob_size' elements. + * Items must normally not be NULL, except during construction when + * the tuple is not yet visible outside the function that builds it. + */ +}; + +// 2.4 - 3.4 +class PyCFunctionObject : public PyObject { +public: + PyMethodDef *m_ml; /* Description of the C function to call */ + PyObject *m_self; /* Passed as 'self' arg to the C func, can be NULL */ + PyObject *m_module; /* The __module__ attribute, can be anything */ +}; + +typedef int (*Py_tracefunc)(PyObject *, PyFrameObject *, int, PyObject *); + +#define PyTrace_CALL 0 +#define PyTrace_EXCEPTION 1 +#define PyTrace_LINE 2 +#define PyTrace_RETURN 3 +#define PyTrace_C_CALL 4 +#define PyTrace_C_EXCEPTION 5 +#define PyTrace_C_RETURN 6 + +class PyInterpreterState { +}; + +class PyThreadState { }; + +class PyThreadState_25_27 : public PyThreadState { +public: + /* See Python/ceval.c for comments explaining most fields */ + + PyThreadState *next; + PyInterpreterState *interp; + + PyFrameObject *frame; + int recursion_depth; + /* 'tracing' keeps track of the execution depth when tracing/profiling. + This is to prevent the actual trace/profile code from being recorded in + the trace/profile. */ + int tracing; + int use_tracing; + + Py_tracefunc c_profilefunc; + Py_tracefunc c_tracefunc; + PyObject *c_profileobj; + PyObject *c_traceobj; + + PyObject *curexc_type; + PyObject *curexc_value; + PyObject *curexc_traceback; + + PyObject *exc_type; + PyObject *exc_value; + PyObject *exc_traceback; + + PyObject *dict; /* Stores per-thread state */ + + /* tick_counter is incremented whenever the check_interval ticker + * reaches zero. The purpose is to give a useful measure of the number + * of interpreted bytecode instructions in a given thread. This + * extremely lightweight statistic collector may be of interest to + * profilers (like psyco.jit()), although nothing in the core uses it. + */ + int tick_counter; + + int gilstate_counter; + + PyObject *async_exc; /* Asynchronous exception to raise */ + long thread_id; /* Thread id where this tstate was created */ + + /* XXX signal handlers should also be here */ + static bool IsFor(int majorVersion, int minorVersion) { + return majorVersion == 2 && (minorVersion >= 5 && minorVersion <= 7); + } + + static bool IsFor(PythonVersion version) { + return version >= PythonVersion_25 && version <= PythonVersion_27; + } +}; + +class PyThreadState_30_33 : public PyThreadState { +public: + PyThreadState *next; + PyInterpreterState *interp; + + PyFrameObject *frame; + int recursion_depth; + char overflowed; /* The stack has overflowed. Allow 50 more calls + to handle the runtime error. */ + char recursion_critical; /* The current calls must not cause + a stack overflow. */ + /* 'tracing' keeps track of the execution depth when tracing/profiling. + This is to prevent the actual trace/profile code from being recorded in + the trace/profile. */ + int tracing; + int use_tracing; + + Py_tracefunc c_profilefunc; + Py_tracefunc c_tracefunc; + PyObject *c_profileobj; + PyObject *c_traceobj; + + PyObject *curexc_type; + PyObject *curexc_value; + PyObject *curexc_traceback; + + PyObject *exc_type; + PyObject *exc_value; + PyObject *exc_traceback; + + PyObject *dict; /* Stores per-thread state */ + + /* tick_counter is incremented whenever the check_interval ticker + * reaches zero. The purpose is to give a useful measure of the number + * of interpreted bytecode instructions in a given thread. This + * extremely lightweight statistic collector may be of interest to + * profilers (like psyco.jit()), although nothing in the core uses it. + */ + int tick_counter; + + int gilstate_counter; + + PyObject *async_exc; /* Asynchronous exception to raise */ + long thread_id; /* Thread id where this tstate was created */ + + /* XXX signal handlers should also be here */ + static bool IsFor(int majorVersion, int minorVersion) { + return majorVersion == 3 && (minorVersion >= 0 && minorVersion <= 3); + } + + static bool IsFor(PythonVersion version) { + return version >= PythonVersion_30 && version <= PythonVersion_33; + } +}; + +class PyThreadState_34 : public PyThreadState { +public: + PyThreadState *prev; + PyThreadState *next; + PyInterpreterState *interp; + + PyFrameObject *frame; + int recursion_depth; + char overflowed; /* The stack has overflowed. Allow 50 more calls + to handle the runtime error. */ + char recursion_critical; /* The current calls must not cause + a stack overflow. */ + /* 'tracing' keeps track of the execution depth when tracing/profiling. + This is to prevent the actual trace/profile code from being recorded in + the trace/profile. */ + int tracing; + int use_tracing; + + Py_tracefunc c_profilefunc; + Py_tracefunc c_tracefunc; + PyObject *c_profileobj; + PyObject *c_traceobj; + + PyObject *curexc_type; + PyObject *curexc_value; + PyObject *curexc_traceback; + + PyObject *exc_type; + PyObject *exc_value; + PyObject *exc_traceback; + + PyObject *dict; /* Stores per-thread state */ + + int gilstate_counter; + + PyObject *async_exc; /* Asynchronous exception to raise */ + long thread_id; /* Thread id where this tstate was created */ + + /* XXX signal handlers should also be here */ + static bool IsFor(int majorVersion, int minorVersion) { + return majorVersion == 3 && minorVersion == 4; + } + + static bool IsFor(PythonVersion version) { + return version == PythonVersion_34; + } +}; + +class PyIntObject : public PyObject { +public: + long ob_ival; +}; + +//class Py3kLongObject : public PyVarObject { +//public: +// DWORD ob_digit[1]; +//}; + +class PyOldStyleClassObject : public PyObject { +public: + PyObject *cl_bases; /* A tuple of class objects */ + PyObject *cl_dict; /* A dictionary */ + PyObject *cl_name; /* A string */ + /* The following three are functions or NULL */ + PyObject *cl_getattr; + PyObject *cl_setattr; + PyObject *cl_delattr; +}; + +class PyInstanceObject : public PyObject { +public: + PyOldStyleClassObject *in_class; /* The class object */ + PyObject *in_dict; /* A dictionary */ + PyObject *in_weakreflist; /* List of weak references */ +}; + +typedef const char* (*GetVersionFunc) (); + +static PythonVersion GetPythonVersion() { + GetVersionFunc versionFunc; + void *main_hndl = dlopen(NULL, 0x2); + *(void**)(&versionFunc) = dlsym(main_hndl, "Py_GetVersion"); + if(versionFunc != NULL) { + const char* version = versionFunc(); + if(version != NULL && strlen(version) >= 3 && version[1] == '.') { + if(version[0] == '2') { + switch(version[2]) { + case '5': return PythonVersion_25; + case '6': return PythonVersion_26; + case '7': return PythonVersion_27; + } + } else if(version[0] == '3') { + switch(version[2]) { + case '0': return PythonVersion_30; + case '1': return PythonVersion_31; + case '2': return PythonVersion_32; + case '3': return PythonVersion_33; + case '4': return PythonVersion_34; + } + } + } + } + return PythonVersion_Unknown; +} + +#endif diff --git a/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/__init__.py b/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/__init__.py new file mode 100644 index 00000000..aa138ccf --- /dev/null +++ b/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/__init__.py @@ -0,0 +1,263 @@ +#!~/.wine/drive_c/Python25/python.exe +# -*- coding: utf-8 -*- + +# Copyright (c) 2009-2014, Mario Vilas +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright +# notice,this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# * Neither the name of the copyright holder nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. + +""" +Windows application debugging engine for Python. + +by Mario Vilas (mvilas at gmail.com) + +Project: U{http://sourceforge.net/projects/winappdbg/} + +Web: U{http://winappdbg.sourceforge.net/} + +Blog: U{http://breakingcode.wordpress.com} + +@group Debugging: + Debug, EventHandler, EventSift, DebugLog + +@group Instrumentation: + System, Process, Thread, Module, Window, Registry + +@group Disassemblers: + Disassembler, + BeaEngine, DistormEngine, PyDasmEngine + +@group Crash reporting: + Crash, CrashDump, CrashDAO, CrashDictionary + +@group Memory search: + Search, + Pattern, + BytePattern, + TextPattern, + RegExpPattern, + HexPattern + +@group Debug events: + Event, + NoEvent, + CreateProcessEvent, + CreateThreadEvent, + ExitProcessEvent, + ExitThreadEvent, + LoadDLLEvent, + UnloadDLLEvent, + OutputDebugStringEvent, + RIPEvent, + ExceptionEvent + +@group Win32 API wrappers: + win32, Handle, ProcessHandle, ThreadHandle, FileHandle + +@group Helpers: + HexInput, HexOutput, HexDump, Color, Table, Logger, + PathOperations, + MemoryAddresses, + CustomAddressIterator, + DataAddressIterator, + ImageAddressIterator, + MappedAddressIterator, + ExecutableAddressIterator, + ReadableAddressIterator, + WriteableAddressIterator, + ExecutableAndWriteableAddressIterator, + DebugRegister, + Regenerator + +@group Warnings: + MixedBitsWarning, BreakpointWarning, BreakpointCallbackWarning, + EventCallbackWarning, DebugSymbolsWarning, CrashWarning + +@group Deprecated classes: + CrashContainer, CrashTable, CrashTableMSSQL, + VolatileCrashContainer, DummyCrashContainer + +@type version_number: float +@var version_number: This WinAppDbg major and minor version, + as a floating point number. Use this for compatibility checking. + +@type version: str +@var version: This WinAppDbg release version, + as a printable string. Use this to show to the user. + +@undocumented: plugins +""" + +__revision__ = "$Id$" + +# List of all public symbols +__all__ = [ + # Library version + 'version', + 'version_number', + + # from breakpoint import * +## 'Breakpoint', +## 'CodeBreakpoint', +## 'PageBreakpoint', +## 'HardwareBreakpoint', +## 'Hook', +## 'ApiHook', +## 'BufferWatch', + 'BreakpointWarning', + 'BreakpointCallbackWarning', + + # from crash import * + 'Crash', + 'CrashWarning', + 'CrashDictionary', + 'CrashContainer', + 'CrashTable', + 'CrashTableMSSQL', + 'VolatileCrashContainer', + 'DummyCrashContainer', + + # from debug import * + 'Debug', + 'MixedBitsWarning', + + # from disasm import * + 'Disassembler', + 'BeaEngine', + 'DistormEngine', + 'PyDasmEngine', + + # from event import * + 'EventHandler', + 'EventSift', +## 'EventFactory', +## 'EventDispatcher', + 'EventCallbackWarning', + 'Event', +## 'NoEvent', + 'CreateProcessEvent', + 'CreateThreadEvent', + 'ExitProcessEvent', + 'ExitThreadEvent', + 'LoadDLLEvent', + 'UnloadDLLEvent', + 'OutputDebugStringEvent', + 'RIPEvent', + 'ExceptionEvent', + + # from interactive import * +## 'ConsoleDebugger', + + # from module import * + 'Module', + 'DebugSymbolsWarning', + + # from process import * + 'Process', + + # from system import * + 'System', + + # from search import * + 'Search', + 'Pattern', + 'BytePattern', + 'TextPattern', + 'RegExpPattern', + 'HexPattern', + + # from registry import * + 'Registry', + + # from textio import * + 'HexDump', + 'HexInput', + 'HexOutput', + 'Color', + 'Table', + 'CrashDump', + 'DebugLog', + 'Logger', + + # from thread import * + 'Thread', + + # from util import * + 'PathOperations', + 'MemoryAddresses', + 'CustomAddressIterator', + 'DataAddressIterator', + 'ImageAddressIterator', + 'MappedAddressIterator', + 'ExecutableAddressIterator', + 'ReadableAddressIterator', + 'WriteableAddressIterator', + 'ExecutableAndWriteableAddressIterator', + 'DebugRegister', + + # from window import * + 'Window', + + # import win32 + 'win32', + + # from win32 import Handle, ProcessHandle, ThreadHandle, FileHandle + 'Handle', + 'ProcessHandle', + 'ThreadHandle', + 'FileHandle', + ] + +# Import all public symbols +from winappdbg.breakpoint import * +from winappdbg.crash import * +from winappdbg.debug import * +from winappdbg.disasm import * +from winappdbg.event import * +from winappdbg.interactive import * +from winappdbg.module import * +from winappdbg.process import * +from winappdbg.registry import * +from winappdbg.system import * +from winappdbg.search import * +from winappdbg.textio import * +from winappdbg.thread import * +from winappdbg.util import * +from winappdbg.window import * + +import winappdbg.win32 +from winappdbg.win32 import Handle, ProcessHandle, ThreadHandle, FileHandle + +try: + from sql import * + __all__.append('CrashDAO') +except ImportError: + import warnings + warnings.warn("No SQL database support present (missing dependencies?)", + ImportWarning) + +# Library version +version_number = 1.5 +version = "Version %s" % version_number diff --git a/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/breakpoint.py b/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/breakpoint.py new file mode 100644 index 00000000..3b9ca73f --- /dev/null +++ b/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/breakpoint.py @@ -0,0 +1,4822 @@ +#!~/.wine/drive_c/Python25/python.exe +# -*- coding: utf-8 -*- + +# Copyright (c) 2009-2014, Mario Vilas +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright +# notice,this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# * Neither the name of the copyright holder nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. + +""" +Breakpoints. + +@group Breakpoints: + Breakpoint, CodeBreakpoint, PageBreakpoint, HardwareBreakpoint, + BufferWatch, Hook, ApiHook + +@group Warnings: + BreakpointWarning, BreakpointCallbackWarning +""" + +__revision__ = "$Id$" + +__all__ = [ + + # Base class for breakpoints + 'Breakpoint', + + # Breakpoint implementations + 'CodeBreakpoint', + 'PageBreakpoint', + 'HardwareBreakpoint', + + # Hooks and watches + 'Hook', + 'ApiHook', + 'BufferWatch', + + # Warnings + 'BreakpointWarning', + 'BreakpointCallbackWarning', + + ] + +from winappdbg import win32 +from winappdbg import compat +import sys +from winappdbg.process import Process, Thread +from winappdbg.util import DebugRegister, MemoryAddresses +from winappdbg.textio import HexDump + +import ctypes +import warnings +import traceback + +#============================================================================== + +class BreakpointWarning (UserWarning): + """ + This warning is issued when a non-fatal error occurs that's related to + breakpoints. + """ + +class BreakpointCallbackWarning (RuntimeWarning): + """ + This warning is issued when an uncaught exception was raised by a + breakpoint's user-defined callback. + """ + +#============================================================================== + +class Breakpoint (object): + """ + Base class for breakpoints. + Here's the breakpoints state machine. + + @see: L{CodeBreakpoint}, L{PageBreakpoint}, L{HardwareBreakpoint} + + @group Breakpoint states: + DISABLED, ENABLED, ONESHOT, RUNNING + @group State machine: + hit, disable, enable, one_shot, running, + is_disabled, is_enabled, is_one_shot, is_running, + get_state, get_state_name + @group Information: + get_address, get_size, get_span, is_here + @group Conditional breakpoints: + is_conditional, is_unconditional, + get_condition, set_condition, eval_condition + @group Automatic breakpoints: + is_automatic, is_interactive, + get_action, set_action, run_action + + @cvar DISABLED: I{Disabled} S{->} Enabled, OneShot + @cvar ENABLED: I{Enabled} S{->} I{Running}, Disabled + @cvar ONESHOT: I{OneShot} S{->} I{Disabled} + @cvar RUNNING: I{Running} S{->} I{Enabled}, Disabled + + @type DISABLED: int + @type ENABLED: int + @type ONESHOT: int + @type RUNNING: int + + @type stateNames: dict E{lb} int S{->} str E{rb} + @cvar stateNames: User-friendly names for each breakpoint state. + + @type typeName: str + @cvar typeName: User friendly breakpoint type string. + """ + + # I don't think transitions Enabled <-> OneShot should be allowed... plus + # it would require special handling to avoid setting the same bp twice + + DISABLED = 0 + ENABLED = 1 + ONESHOT = 2 + RUNNING = 3 + + typeName = 'breakpoint' + + stateNames = { + DISABLED : 'disabled', + ENABLED : 'enabled', + ONESHOT : 'one shot', + RUNNING : 'running', + } + + def __init__(self, address, size = 1, condition = True, action = None): + """ + Breakpoint object. + + @type address: int + @param address: Memory address for breakpoint. + + @type size: int + @param size: Size of breakpoint in bytes (defaults to 1). + + @type condition: function + @param condition: (Optional) Condition callback function. + + The callback signature is:: + + def condition_callback(event): + return True # returns True or False + + Where B{event} is an L{Event} object, + and the return value is a boolean + (C{True} to dispatch the event, C{False} otherwise). + + @type action: function + @param action: (Optional) Action callback function. + If specified, the event is handled by this callback instead of + being dispatched normally. + + The callback signature is:: + + def action_callback(event): + pass # no return value + + Where B{event} is an L{Event} object. + """ + self.__address = address + self.__size = size + self.__state = self.DISABLED + + self.set_condition(condition) + self.set_action(action) + + def __repr__(self): + if self.is_disabled(): + state = 'Disabled' + else: + state = 'Active (%s)' % self.get_state_name() + if self.is_conditional(): + condition = 'conditional' + else: + condition = 'unconditional' + name = self.typeName + size = self.get_size() + if size == 1: + address = HexDump.address( self.get_address() ) + else: + begin = self.get_address() + end = begin + size + begin = HexDump.address(begin) + end = HexDump.address(end) + address = "range %s-%s" % (begin, end) + msg = "<%s %s %s at remote address %s>" + msg = msg % (state, condition, name, address) + return msg + +#------------------------------------------------------------------------------ + + def is_disabled(self): + """ + @rtype: bool + @return: C{True} if the breakpoint is in L{DISABLED} state. + """ + return self.get_state() == self.DISABLED + + def is_enabled(self): + """ + @rtype: bool + @return: C{True} if the breakpoint is in L{ENABLED} state. + """ + return self.get_state() == self.ENABLED + + def is_one_shot(self): + """ + @rtype: bool + @return: C{True} if the breakpoint is in L{ONESHOT} state. + """ + return self.get_state() == self.ONESHOT + + def is_running(self): + """ + @rtype: bool + @return: C{True} if the breakpoint is in L{RUNNING} state. + """ + return self.get_state() == self.RUNNING + + def is_here(self, address): + """ + @rtype: bool + @return: C{True} if the address is within the range of the breakpoint. + """ + begin = self.get_address() + end = begin + self.get_size() + return begin <= address < end + + def get_address(self): + """ + @rtype: int + @return: The target memory address for the breakpoint. + """ + return self.__address + + def get_size(self): + """ + @rtype: int + @return: The size in bytes of the breakpoint. + """ + return self.__size + + def get_span(self): + """ + @rtype: tuple( int, int ) + @return: + Starting and ending address of the memory range + covered by the breakpoint. + """ + address = self.get_address() + size = self.get_size() + return ( address, address + size ) + + def get_state(self): + """ + @rtype: int + @return: The current state of the breakpoint + (L{DISABLED}, L{ENABLED}, L{ONESHOT}, L{RUNNING}). + """ + return self.__state + + def get_state_name(self): + """ + @rtype: str + @return: The name of the current state of the breakpoint. + """ + return self.stateNames[ self.get_state() ] + +#------------------------------------------------------------------------------ + + def is_conditional(self): + """ + @see: L{__init__} + @rtype: bool + @return: C{True} if the breakpoint has a condition callback defined. + """ + # Do not evaluate as boolean! Test for identity with True instead. + return self.__condition is not True + + def is_unconditional(self): + """ + @rtype: bool + @return: C{True} if the breakpoint doesn't have a condition callback defined. + """ + # Do not evaluate as boolean! Test for identity with True instead. + return self.__condition is True + + def get_condition(self): + """ + @rtype: bool, function + @return: Returns the condition callback for conditional breakpoints. + Returns C{True} for unconditional breakpoints. + """ + return self.__condition + + def set_condition(self, condition = True): + """ + Sets a new condition callback for the breakpoint. + + @see: L{__init__} + + @type condition: function + @param condition: (Optional) Condition callback function. + """ + if condition is None: + self.__condition = True + else: + self.__condition = condition + + def eval_condition(self, event): + """ + Evaluates the breakpoint condition, if any was set. + + @type event: L{Event} + @param event: Debug event triggered by the breakpoint. + + @rtype: bool + @return: C{True} to dispatch the event, C{False} otherwise. + """ + condition = self.get_condition() + if condition is True: # shortcut for unconditional breakpoints + return True + if callable(condition): + try: + return bool( condition(event) ) + except Exception: + e = sys.exc_info()[1] + msg = ("Breakpoint condition callback %r" + " raised an exception: %s") + msg = msg % (condition, traceback.format_exc(e)) + warnings.warn(msg, BreakpointCallbackWarning) + return False + return bool( condition ) # force evaluation now + +#------------------------------------------------------------------------------ + + def is_automatic(self): + """ + @rtype: bool + @return: C{True} if the breakpoint has an action callback defined. + """ + return self.__action is not None + + def is_interactive(self): + """ + @rtype: bool + @return: + C{True} if the breakpoint doesn't have an action callback defined. + """ + return self.__action is None + + def get_action(self): + """ + @rtype: bool, function + @return: Returns the action callback for automatic breakpoints. + Returns C{None} for interactive breakpoints. + """ + return self.__action + + def set_action(self, action = None): + """ + Sets a new action callback for the breakpoint. + + @type action: function + @param action: (Optional) Action callback function. + """ + self.__action = action + + def run_action(self, event): + """ + Executes the breakpoint action callback, if any was set. + + @type event: L{Event} + @param event: Debug event triggered by the breakpoint. + """ + action = self.get_action() + if action is not None: + try: + return bool( action(event) ) + except Exception: + e = sys.exc_info()[1] + msg = ("Breakpoint action callback %r" + " raised an exception: %s") + msg = msg % (action, traceback.format_exc(e)) + warnings.warn(msg, BreakpointCallbackWarning) + return False + return True + +#------------------------------------------------------------------------------ + + def __bad_transition(self, state): + """ + Raises an C{AssertionError} exception for an invalid state transition. + + @see: L{stateNames} + + @type state: int + @param state: Intended breakpoint state. + + @raise Exception: Always. + """ + statemsg = "" + oldState = self.stateNames[ self.get_state() ] + newState = self.stateNames[ state ] + msg = "Invalid state transition (%s -> %s)" \ + " for breakpoint at address %s" + msg = msg % (oldState, newState, HexDump.address(self.get_address())) + raise AssertionError(msg) + + def disable(self, aProcess, aThread): + """ + Transition to L{DISABLED} state. + - When hit: OneShot S{->} Disabled + - Forced by user: Enabled, OneShot, Running S{->} Disabled + - Transition from running state may require special handling + by the breakpoint implementation class. + + @type aProcess: L{Process} + @param aProcess: Process object. + + @type aThread: L{Thread} + @param aThread: Thread object. + """ +## if self.__state not in (self.ENABLED, self.ONESHOT, self.RUNNING): +## self.__bad_transition(self.DISABLED) + self.__state = self.DISABLED + + def enable(self, aProcess, aThread): + """ + Transition to L{ENABLED} state. + - When hit: Running S{->} Enabled + - Forced by user: Disabled, Running S{->} Enabled + - Transition from running state may require special handling + by the breakpoint implementation class. + + @type aProcess: L{Process} + @param aProcess: Process object. + + @type aThread: L{Thread} + @param aThread: Thread object. + """ +## if self.__state not in (self.DISABLED, self.RUNNING): +## self.__bad_transition(self.ENABLED) + self.__state = self.ENABLED + + def one_shot(self, aProcess, aThread): + """ + Transition to L{ONESHOT} state. + - Forced by user: Disabled S{->} OneShot + + @type aProcess: L{Process} + @param aProcess: Process object. + + @type aThread: L{Thread} + @param aThread: Thread object. + """ +## if self.__state != self.DISABLED: +## self.__bad_transition(self.ONESHOT) + self.__state = self.ONESHOT + + def running(self, aProcess, aThread): + """ + Transition to L{RUNNING} state. + - When hit: Enabled S{->} Running + + @type aProcess: L{Process} + @param aProcess: Process object. + + @type aThread: L{Thread} + @param aThread: Thread object. + """ + if self.__state != self.ENABLED: + self.__bad_transition(self.RUNNING) + self.__state = self.RUNNING + + def hit(self, event): + """ + Notify a breakpoint that it's been hit. + + This triggers the corresponding state transition and sets the + C{breakpoint} property of the given L{Event} object. + + @see: L{disable}, L{enable}, L{one_shot}, L{running} + + @type event: L{Event} + @param event: Debug event to handle (depends on the breakpoint type). + + @raise AssertionError: Disabled breakpoints can't be hit. + """ + aProcess = event.get_process() + aThread = event.get_thread() + state = self.get_state() + + event.breakpoint = self + + if state == self.ENABLED: + self.running(aProcess, aThread) + + elif state == self.RUNNING: + self.enable(aProcess, aThread) + + elif state == self.ONESHOT: + self.disable(aProcess, aThread) + + elif state == self.DISABLED: + # this should not happen + msg = "Hit a disabled breakpoint at address %s" + msg = msg % HexDump.address( self.get_address() ) + warnings.warn(msg, BreakpointWarning) + +#============================================================================== + +# XXX TODO +# Check if the user is trying to set a code breakpoint on a memory mapped file, +# so we don't end up writing the int3 instruction in the file by accident. + +class CodeBreakpoint (Breakpoint): + """ + Code execution breakpoints (using an int3 opcode). + + @see: L{Debug.break_at} + + @type bpInstruction: str + @cvar bpInstruction: Breakpoint instruction for the current processor. + """ + + typeName = 'code breakpoint' + + if win32.arch in (win32.ARCH_I386, win32.ARCH_AMD64): + bpInstruction = '\xCC' # int 3 + + def __init__(self, address, condition = True, action = None): + """ + Code breakpoint object. + + @see: L{Breakpoint.__init__} + + @type address: int + @param address: Memory address for breakpoint. + + @type condition: function + @param condition: (Optional) Condition callback function. + + @type action: function + @param action: (Optional) Action callback function. + """ + if win32.arch not in (win32.ARCH_I386, win32.ARCH_AMD64): + msg = "Code breakpoints not supported for %s" % win32.arch + raise NotImplementedError(msg) + Breakpoint.__init__(self, address, len(self.bpInstruction), + condition, action) + self.__previousValue = self.bpInstruction + + def __set_bp(self, aProcess): + """ + Writes a breakpoint instruction at the target address. + + @type aProcess: L{Process} + @param aProcess: Process object. + """ + address = self.get_address() + self.__previousValue = aProcess.read(address, len(self.bpInstruction)) + if self.__previousValue == self.bpInstruction: + msg = "Possible overlapping code breakpoints at %s" + msg = msg % HexDump.address(address) + warnings.warn(msg, BreakpointWarning) + aProcess.write(address, self.bpInstruction) + + def __clear_bp(self, aProcess): + """ + Restores the original byte at the target address. + + @type aProcess: L{Process} + @param aProcess: Process object. + """ + address = self.get_address() + currentValue = aProcess.read(address, len(self.bpInstruction)) + if currentValue == self.bpInstruction: + # Only restore the previous value if the int3 is still there. + aProcess.write(self.get_address(), self.__previousValue) + else: + self.__previousValue = currentValue + msg = "Overwritten code breakpoint at %s" + msg = msg % HexDump.address(address) + warnings.warn(msg, BreakpointWarning) + + def disable(self, aProcess, aThread): + if not self.is_disabled() and not self.is_running(): + self.__clear_bp(aProcess) + super(CodeBreakpoint, self).disable(aProcess, aThread) + + def enable(self, aProcess, aThread): + if not self.is_enabled() and not self.is_one_shot(): + self.__set_bp(aProcess) + super(CodeBreakpoint, self).enable(aProcess, aThread) + + def one_shot(self, aProcess, aThread): + if not self.is_enabled() and not self.is_one_shot(): + self.__set_bp(aProcess) + super(CodeBreakpoint, self).one_shot(aProcess, aThread) + + # FIXME race condition here (however unlikely) + # If another thread runs on over the target address while + # the breakpoint is in RUNNING state, we'll miss it. There + # is a solution to this but it's somewhat complicated, so + # I'm leaving it for another version of the debugger. :( + def running(self, aProcess, aThread): + if self.is_enabled(): + self.__clear_bp(aProcess) + aThread.set_tf() + super(CodeBreakpoint, self).running(aProcess, aThread) + +#============================================================================== + +# TODO: +# * If the original page was already a guard page, the exception should be +# passed to the debugee instead of being handled by the debugger. +# * If the original page was already a guard page, it should NOT be converted +# to a no-access page when disabling the breakpoint. +# * If the page permissions were modified after the breakpoint was enabled, +# no change should be done on them when disabling the breakpoint. For this +# we need to remember the original page permissions instead of blindly +# setting and clearing the guard page bit on them. +# * Some pages seem to be "magic" and resist all attempts at changing their +# protect bits (for example the pages where the PEB and TEB reside). Maybe +# a more descriptive error message could be shown in this case. + +class PageBreakpoint (Breakpoint): + """ + Page access breakpoint (using guard pages). + + @see: L{Debug.watch_buffer} + + @group Information: + get_size_in_pages + """ + + typeName = 'page breakpoint' + +#------------------------------------------------------------------------------ + + def __init__(self, address, pages = 1, condition = True, action = None): + """ + Page breakpoint object. + + @see: L{Breakpoint.__init__} + + @type address: int + @param address: Memory address for breakpoint. + + @type pages: int + @param address: Size of breakpoint in pages. + + @type condition: function + @param condition: (Optional) Condition callback function. + + @type action: function + @param action: (Optional) Action callback function. + """ + Breakpoint.__init__(self, address, pages * MemoryAddresses.pageSize, + condition, action) +## if (address & 0x00000FFF) != 0: + floordiv_align = long(address) // long(MemoryAddresses.pageSize) + truediv_align = float(address) / float(MemoryAddresses.pageSize) + if floordiv_align != truediv_align: + msg = "Address of page breakpoint " \ + "must be aligned to a page size boundary " \ + "(value %s received)" % HexDump.address(address) + raise ValueError(msg) + + def get_size_in_pages(self): + """ + @rtype: int + @return: The size in pages of the breakpoint. + """ + # The size is always a multiple of the page size. + return self.get_size() // MemoryAddresses.pageSize + + def __set_bp(self, aProcess): + """ + Sets the target pages as guard pages. + + @type aProcess: L{Process} + @param aProcess: Process object. + """ + lpAddress = self.get_address() + dwSize = self.get_size() + flNewProtect = aProcess.mquery(lpAddress).Protect + flNewProtect = flNewProtect | win32.PAGE_GUARD + aProcess.mprotect(lpAddress, dwSize, flNewProtect) + + def __clear_bp(self, aProcess): + """ + Restores the original permissions of the target pages. + + @type aProcess: L{Process} + @param aProcess: Process object. + """ + lpAddress = self.get_address() + flNewProtect = aProcess.mquery(lpAddress).Protect + flNewProtect = flNewProtect & (0xFFFFFFFF ^ win32.PAGE_GUARD) # DWORD + aProcess.mprotect(lpAddress, self.get_size(), flNewProtect) + + def disable(self, aProcess, aThread): + if not self.is_disabled(): + self.__clear_bp(aProcess) + super(PageBreakpoint, self).disable(aProcess, aThread) + + def enable(self, aProcess, aThread): + if win32.arch not in (win32.ARCH_I386, win32.ARCH_AMD64): + msg = "Only one-shot page breakpoints are supported for %s" + raise NotImplementedError(msg % win32.arch) + if not self.is_enabled() and not self.is_one_shot(): + self.__set_bp(aProcess) + super(PageBreakpoint, self).enable(aProcess, aThread) + + def one_shot(self, aProcess, aThread): + if not self.is_enabled() and not self.is_one_shot(): + self.__set_bp(aProcess) + super(PageBreakpoint, self).one_shot(aProcess, aThread) + + def running(self, aProcess, aThread): + aThread.set_tf() + super(PageBreakpoint, self).running(aProcess, aThread) + +#============================================================================== + +class HardwareBreakpoint (Breakpoint): + """ + Hardware breakpoint (using debug registers). + + @see: L{Debug.watch_variable} + + @group Information: + get_slot, get_trigger, get_watch + + @group Trigger flags: + BREAK_ON_EXECUTION, BREAK_ON_WRITE, BREAK_ON_ACCESS + + @group Watch size flags: + WATCH_BYTE, WATCH_WORD, WATCH_DWORD, WATCH_QWORD + + @type BREAK_ON_EXECUTION: int + @cvar BREAK_ON_EXECUTION: Break on execution. + + @type BREAK_ON_WRITE: int + @cvar BREAK_ON_WRITE: Break on write. + + @type BREAK_ON_ACCESS: int + @cvar BREAK_ON_ACCESS: Break on read or write. + + @type WATCH_BYTE: int + @cvar WATCH_BYTE: Watch a byte. + + @type WATCH_WORD: int + @cvar WATCH_WORD: Watch a word (2 bytes). + + @type WATCH_DWORD: int + @cvar WATCH_DWORD: Watch a double word (4 bytes). + + @type WATCH_QWORD: int + @cvar WATCH_QWORD: Watch one quad word (8 bytes). + + @type validTriggers: tuple + @cvar validTriggers: Valid trigger flag values. + + @type validWatchSizes: tuple + @cvar validWatchSizes: Valid watch flag values. + """ + + typeName = 'hardware breakpoint' + + BREAK_ON_EXECUTION = DebugRegister.BREAK_ON_EXECUTION + BREAK_ON_WRITE = DebugRegister.BREAK_ON_WRITE + BREAK_ON_ACCESS = DebugRegister.BREAK_ON_ACCESS + + WATCH_BYTE = DebugRegister.WATCH_BYTE + WATCH_WORD = DebugRegister.WATCH_WORD + WATCH_DWORD = DebugRegister.WATCH_DWORD + WATCH_QWORD = DebugRegister.WATCH_QWORD + + validTriggers = ( + BREAK_ON_EXECUTION, + BREAK_ON_WRITE, + BREAK_ON_ACCESS, + ) + + validWatchSizes = ( + WATCH_BYTE, + WATCH_WORD, + WATCH_DWORD, + WATCH_QWORD, + ) + + def __init__(self, address, triggerFlag = BREAK_ON_ACCESS, + sizeFlag = WATCH_DWORD, + condition = True, + action = None): + """ + Hardware breakpoint object. + + @see: L{Breakpoint.__init__} + + @type address: int + @param address: Memory address for breakpoint. + + @type triggerFlag: int + @param triggerFlag: Trigger of breakpoint. Must be one of the following: + + - L{BREAK_ON_EXECUTION} + + Break on code execution. + + - L{BREAK_ON_WRITE} + + Break on memory read or write. + + - L{BREAK_ON_ACCESS} + + Break on memory write. + + @type sizeFlag: int + @param sizeFlag: Size of breakpoint. Must be one of the following: + + - L{WATCH_BYTE} + + One (1) byte in size. + + - L{WATCH_WORD} + + Two (2) bytes in size. + + - L{WATCH_DWORD} + + Four (4) bytes in size. + + - L{WATCH_QWORD} + + Eight (8) bytes in size. + + @type condition: function + @param condition: (Optional) Condition callback function. + + @type action: function + @param action: (Optional) Action callback function. + """ + if win32.arch not in (win32.ARCH_I386, win32.ARCH_AMD64): + msg = "Hardware breakpoints not supported for %s" % win32.arch + raise NotImplementedError(msg) + if sizeFlag == self.WATCH_BYTE: + size = 1 + elif sizeFlag == self.WATCH_WORD: + size = 2 + elif sizeFlag == self.WATCH_DWORD: + size = 4 + elif sizeFlag == self.WATCH_QWORD: + size = 8 + else: + msg = "Invalid size flag for hardware breakpoint (%s)" + msg = msg % repr(sizeFlag) + raise ValueError(msg) + + if triggerFlag not in self.validTriggers: + msg = "Invalid trigger flag for hardware breakpoint (%s)" + msg = msg % repr(triggerFlag) + raise ValueError(msg) + + Breakpoint.__init__(self, address, size, condition, action) + self.__trigger = triggerFlag + self.__watch = sizeFlag + self.__slot = None + + def __clear_bp(self, aThread): + """ + Clears this breakpoint from the debug registers. + + @type aThread: L{Thread} + @param aThread: Thread object. + """ + if self.__slot is not None: + aThread.suspend() + try: + ctx = aThread.get_context(win32.CONTEXT_DEBUG_REGISTERS) + DebugRegister.clear_bp(ctx, self.__slot) + aThread.set_context(ctx) + self.__slot = None + finally: + aThread.resume() + + def __set_bp(self, aThread): + """ + Sets this breakpoint in the debug registers. + + @type aThread: L{Thread} + @param aThread: Thread object. + """ + if self.__slot is None: + aThread.suspend() + try: + ctx = aThread.get_context(win32.CONTEXT_DEBUG_REGISTERS) + self.__slot = DebugRegister.find_slot(ctx) + if self.__slot is None: + msg = "No available hardware breakpoint slots for thread ID %d" + msg = msg % aThread.get_tid() + raise RuntimeError(msg) + DebugRegister.set_bp(ctx, self.__slot, self.get_address(), + self.__trigger, self.__watch) + aThread.set_context(ctx) + finally: + aThread.resume() + + def get_slot(self): + """ + @rtype: int + @return: The debug register number used by this breakpoint, + or C{None} if the breakpoint is not active. + """ + return self.__slot + + def get_trigger(self): + """ + @see: L{validTriggers} + @rtype: int + @return: The breakpoint trigger flag. + """ + return self.__trigger + + def get_watch(self): + """ + @see: L{validWatchSizes} + @rtype: int + @return: The breakpoint watch flag. + """ + return self.__watch + + def disable(self, aProcess, aThread): + if not self.is_disabled(): + self.__clear_bp(aThread) + super(HardwareBreakpoint, self).disable(aProcess, aThread) + + def enable(self, aProcess, aThread): + if not self.is_enabled() and not self.is_one_shot(): + self.__set_bp(aThread) + super(HardwareBreakpoint, self).enable(aProcess, aThread) + + def one_shot(self, aProcess, aThread): + if not self.is_enabled() and not self.is_one_shot(): + self.__set_bp(aThread) + super(HardwareBreakpoint, self).one_shot(aProcess, aThread) + + def running(self, aProcess, aThread): + self.__clear_bp(aThread) + super(HardwareBreakpoint, self).running(aProcess, aThread) + aThread.set_tf() + +#============================================================================== + +# XXX FIXME +# +# The implementation of function hooks is very simple. A breakpoint is set at +# the entry point. Each time it's hit the "pre" callback is executed. If a +# "post" callback was defined, a one-shot breakpoint is set at the return +# address - and when that breakpoint hits, the "post" callback is executed. +# +# Functions hooks, as they are implemented now, don't work correctly for +# recursive functions. The problem is we don't know when to remove the +# breakpoint at the return address. Also there could be more than one return +# address. +# +# One possible solution would involve a dictionary of lists, where the key +# would be the thread ID and the value a stack of return addresses. But we +# still don't know what to do if the "wrong" return address is hit for some +# reason (maybe check the stack pointer?). Or if both a code and a hardware +# breakpoint are hit simultaneously. +# +# For now, the workaround for the user is to set only the "pre" callback for +# functions that are known to be recursive. +# +# If an exception is thrown by a hooked function and caught by one of it's +# parent functions, the "post" callback won't be called and weird stuff may +# happen. A possible solution is to put a breakpoint in the system call that +# unwinds the stack, to detect this case and remove the "post" breakpoint. +# +# Hooks may also behave oddly if the return address is overwritten by a buffer +# overflow bug (this is similar to the exception problem). But it's probably a +# minor issue since when you're fuzzing a function for overflows you're usually +# not interested in the return value anyway. + +# TODO: an API to modify the hooked function's arguments + +class Hook (object): + """ + Factory class to produce hook objects. Used by L{Debug.hook_function} and + L{Debug.stalk_function}. + + When you try to instance this class, one of the architecture specific + implementations is returned instead. + + Instances act as an action callback for code breakpoints set at the + beginning of a function. It automatically retrieves the parameters from + the stack, sets a breakpoint at the return address and retrieves the + return value from the function call. + + @see: L{_Hook_i386}, L{_Hook_amd64} + + @type useHardwareBreakpoints: bool + @cvar useHardwareBreakpoints: C{True} to try to use hardware breakpoints, + C{False} otherwise. + """ + + # This is a factory class that returns + # the architecture specific implementation. + def __new__(cls, *argv, **argd): + try: + arch = argd['arch'] + del argd['arch'] + except KeyError: + try: + arch = argv[4] + argv = argv[:4] + argv[5:] + except IndexError: + raise TypeError("Missing 'arch' argument!") + if arch is None: + arch = win32.arch + if arch == win32.ARCH_I386: + return _Hook_i386(*argv, **argd) + if arch == win32.ARCH_AMD64: + return _Hook_amd64(*argv, **argd) + return object.__new__(cls, *argv, **argd) + + # XXX FIXME + # + # Hardware breakpoints don't work correctly (or al all) in old VirtualBox + # versions (3.0 and below). + # + # Maybe there should be a way to autodetect the buggy VirtualBox versions + # and tell Hook objects not to use hardware breakpoints? + # + # For now the workaround is to manually set this variable to True when + # WinAppDbg is installed on a physical machine. + # + useHardwareBreakpoints = False + + def __init__(self, preCB = None, postCB = None, + paramCount = None, signature = None, + arch = None): + """ + @type preCB: function + @param preCB: (Optional) Callback triggered on function entry. + + The signature for the callback should be something like this:: + + def pre_LoadLibraryEx(event, ra, lpFilename, hFile, dwFlags): + + # return address + ra = params[0] + + # function arguments start from here... + szFilename = event.get_process().peek_string(lpFilename) + + # (...) + + Note that all pointer types are treated like void pointers, so your + callback won't get the string or structure pointed to by it, but + the remote memory address instead. This is so to prevent the ctypes + library from being "too helpful" and trying to dereference the + pointer. To get the actual data being pointed to, use one of the + L{Process.read} methods. + + @type postCB: function + @param postCB: (Optional) Callback triggered on function exit. + + The signature for the callback should be something like this:: + + def post_LoadLibraryEx(event, return_value): + + # (...) + + @type paramCount: int + @param paramCount: + (Optional) Number of parameters for the C{preCB} callback, + not counting the return address. Parameters are read from + the stack and assumed to be DWORDs in 32 bits and QWORDs in 64. + + This is a faster way to pull stack parameters in 32 bits, but in 64 + bits (or with some odd APIs in 32 bits) it won't be useful, since + not all arguments to the hooked function will be of the same size. + + For a more reliable and cross-platform way of hooking use the + C{signature} argument instead. + + @type signature: tuple + @param signature: + (Optional) Tuple of C{ctypes} data types that constitute the + hooked function signature. When the function is called, this will + be used to parse the arguments from the stack. Overrides the + C{paramCount} argument. + + @type arch: str + @param arch: (Optional) Target architecture. Defaults to the current + architecture. See: L{win32.arch} + """ + self.__preCB = preCB + self.__postCB = postCB + self.__paramStack = dict() # tid -> list of tuple( arg, arg, arg... ) + + self._paramCount = paramCount + + if win32.arch != win32.ARCH_I386: + self.useHardwareBreakpoints = False + + if win32.bits == 64 and paramCount and not signature: + signature = (win32.QWORD,) * paramCount + + if signature: + self._signature = self._calc_signature(signature) + else: + self._signature = None + + def _cast_signature_pointers_to_void(self, signature): + c_void_p = ctypes.c_void_p + c_char_p = ctypes.c_char_p + c_wchar_p = ctypes.c_wchar_p + _Pointer = ctypes._Pointer + cast = ctypes.cast + for i in compat.xrange(len(signature)): + t = signature[i] + if t is not c_void_p and (issubclass(t, _Pointer) \ + or t in [c_char_p, c_wchar_p]): + signature[i] = cast(t, c_void_p) + + def _calc_signature(self, signature): + raise NotImplementedError( + "Hook signatures are not supported for architecture: %s" \ + % win32.arch) + + def _get_return_address(self, aProcess, aThread): + return None + + def _get_function_arguments(self, aProcess, aThread): + if self._signature or self._paramCount: + raise NotImplementedError( + "Hook signatures are not supported for architecture: %s" \ + % win32.arch) + return () + + def _get_return_value(self, aThread): + return None + + # By using break_at() to set a process-wide breakpoint on the function's + # return address, we might hit a race condition when more than one thread + # is being debugged. + # + # Hardware breakpoints should be used instead. But since a thread can run + # out of those, we need to fall back to this method when needed. + + def __call__(self, event): + """ + Handles the breakpoint event on entry of the function. + + @type event: L{ExceptionEvent} + @param event: Breakpoint hit event. + + @raise WindowsError: An error occured. + """ + debug = event.debug + + dwProcessId = event.get_pid() + dwThreadId = event.get_tid() + aProcess = event.get_process() + aThread = event.get_thread() + + # Get the return address and function arguments. + ra = self._get_return_address(aProcess, aThread) + params = self._get_function_arguments(aProcess, aThread) + + # Keep the function arguments for later use. + self.__push_params(dwThreadId, params) + + # If we need to hook the return from the function... + bHookedReturn = False + if ra is not None and self.__postCB is not None: + + # Try to set a one shot hardware breakpoint at the return address. + useHardwareBreakpoints = self.useHardwareBreakpoints + if useHardwareBreakpoints: + try: + debug.define_hardware_breakpoint( + dwThreadId, + ra, + event.debug.BP_BREAK_ON_EXECUTION, + event.debug.BP_WATCH_BYTE, + True, + self.__postCallAction_hwbp + ) + debug.enable_one_shot_hardware_breakpoint(dwThreadId, ra) + bHookedReturn = True + except Exception: + e = sys.exc_info()[1] + useHardwareBreakpoints = False + msg = ("Failed to set hardware breakpoint" + " at address %s for thread ID %d") + msg = msg % (HexDump.address(ra), dwThreadId) + warnings.warn(msg, BreakpointWarning) + + # If not possible, set a code breakpoint instead. + if not useHardwareBreakpoints: + try: + debug.break_at(dwProcessId, ra, + self.__postCallAction_codebp) + bHookedReturn = True + except Exception: + e = sys.exc_info()[1] + msg = ("Failed to set code breakpoint" + " at address %s for process ID %d") + msg = msg % (HexDump.address(ra), dwProcessId) + warnings.warn(msg, BreakpointWarning) + + # Call the "pre" callback. + try: + self.__callHandler(self.__preCB, event, ra, *params) + + # If no "post" callback is defined, forget the function arguments. + finally: + if not bHookedReturn: + self.__pop_params(dwThreadId) + + def __postCallAction_hwbp(self, event): + """ + Handles hardware breakpoint events on return from the function. + + @type event: L{ExceptionEvent} + @param event: Single step event. + """ + + # Remove the one shot hardware breakpoint + # at the return address location in the stack. + tid = event.get_tid() + address = event.breakpoint.get_address() + event.debug.erase_hardware_breakpoint(tid, address) + + # Call the "post" callback. + try: + self.__postCallAction(event) + + # Forget the parameters. + finally: + self.__pop_params(tid) + + def __postCallAction_codebp(self, event): + """ + Handles code breakpoint events on return from the function. + + @type event: L{ExceptionEvent} + @param event: Breakpoint hit event. + """ + + # If the breakpoint was accidentally hit by another thread, + # pass it to the debugger instead of calling the "post" callback. + # + # XXX FIXME: + # I suppose this check will fail under some weird conditions... + # + tid = event.get_tid() + if tid not in self.__paramStack: + return True + + # Remove the code breakpoint at the return address. + pid = event.get_pid() + address = event.breakpoint.get_address() + event.debug.dont_break_at(pid, address) + + # Call the "post" callback. + try: + self.__postCallAction(event) + + # Forget the parameters. + finally: + self.__pop_params(tid) + + def __postCallAction(self, event): + """ + Calls the "post" callback. + + @type event: L{ExceptionEvent} + @param event: Breakpoint hit event. + """ + aThread = event.get_thread() + retval = self._get_return_value(aThread) + self.__callHandler(self.__postCB, event, retval) + + def __callHandler(self, callback, event, *params): + """ + Calls a "pre" or "post" handler, if set. + + @type callback: function + @param callback: Callback function to call. + + @type event: L{ExceptionEvent} + @param event: Breakpoint hit event. + + @type params: tuple + @param params: Parameters for the callback function. + """ + if callback is not None: + event.hook = self + callback(event, *params) + + def __push_params(self, tid, params): + """ + Remembers the arguments tuple for the last call to the hooked function + from this thread. + + @type tid: int + @param tid: Thread global ID. + + @type params: tuple( arg, arg, arg... ) + @param params: Tuple of arguments. + """ + stack = self.__paramStack.get( tid, [] ) + stack.append(params) + self.__paramStack[tid] = stack + + def __pop_params(self, tid): + """ + Forgets the arguments tuple for the last call to the hooked function + from this thread. + + @type tid: int + @param tid: Thread global ID. + """ + stack = self.__paramStack[tid] + stack.pop() + if not stack: + del self.__paramStack[tid] + + def get_params(self, tid): + """ + Returns the parameters found in the stack when the hooked function + was last called by this thread. + + @type tid: int + @param tid: Thread global ID. + + @rtype: tuple( arg, arg, arg... ) + @return: Tuple of arguments. + """ + try: + params = self.get_params_stack(tid)[-1] + except IndexError: + msg = "Hooked function called from thread %d already returned" + raise IndexError(msg % tid) + return params + + def get_params_stack(self, tid): + """ + Returns the parameters found in the stack each time the hooked function + was called by this thread and hasn't returned yet. + + @type tid: int + @param tid: Thread global ID. + + @rtype: list of tuple( arg, arg, arg... ) + @return: List of argument tuples. + """ + try: + stack = self.__paramStack[tid] + except KeyError: + msg = "Hooked function was not called from thread %d" + raise KeyError(msg % tid) + return stack + + def hook(self, debug, pid, address): + """ + Installs the function hook at a given process and address. + + @see: L{unhook} + + @warning: Do not call from an function hook callback. + + @type debug: L{Debug} + @param debug: Debug object. + + @type pid: int + @param pid: Process ID. + + @type address: int + @param address: Function address. + """ + return debug.break_at(pid, address, self) + + def unhook(self, debug, pid, address): + """ + Removes the function hook at a given process and address. + + @see: L{hook} + + @warning: Do not call from an function hook callback. + + @type debug: L{Debug} + @param debug: Debug object. + + @type pid: int + @param pid: Process ID. + + @type address: int + @param address: Function address. + """ + return debug.dont_break_at(pid, address) + +class _Hook_i386 (Hook): + """ + Implementation details for L{Hook} on the L{win32.ARCH_I386} architecture. + """ + + # We don't want to inherit the parent class __new__ method. + __new__ = object.__new__ + + def _calc_signature(self, signature): + self._cast_signature_pointers_to_void(signature) + class Arguments (ctypes.Structure): + _fields_ = [ ("arg_%s" % i, signature[i]) \ + for i in compat.xrange(len(signature) - 1, -1, -1) ] + return Arguments + + def _get_return_address(self, aProcess, aThread): + return aProcess.read_pointer( aThread.get_sp() ) + + def _get_function_arguments(self, aProcess, aThread): + if self._signature: + params = aThread.read_stack_structure(self._signature, + offset = win32.sizeof(win32.LPVOID)) + elif self._paramCount: + params = aThread.read_stack_dwords(self._paramCount, + offset = win32.sizeof(win32.LPVOID)) + else: + params = () + return params + + def _get_return_value(self, aThread): + ctx = aThread.get_context(win32.CONTEXT_INTEGER) + return ctx['Eax'] + +class _Hook_amd64 (Hook): + """ + Implementation details for L{Hook} on the L{win32.ARCH_AMD64} architecture. + """ + + # We don't want to inherit the parent class __new__ method. + __new__ = object.__new__ + + # Make a list of floating point types. + __float_types = ( + ctypes.c_double, + ctypes.c_float, + ) + # Long doubles are not supported in old versions of ctypes! + try: + __float_types += (ctypes.c_longdouble,) + except AttributeError: + pass + + def _calc_signature(self, signature): + self._cast_signature_pointers_to_void(signature) + + float_types = self.__float_types + c_sizeof = ctypes.sizeof + reg_size = c_sizeof(ctypes.c_size_t) + + reg_int_sig = [] + reg_float_sig = [] + stack_sig = [] + + for i in compat.xrange(len(signature)): + arg = signature[i] + name = "arg_%d" % i + stack_sig.insert( 0, (name, arg) ) + if i < 4: + if type(arg) in float_types: + reg_float_sig.append( (name, arg) ) + elif c_sizeof(arg) <= reg_size: + reg_int_sig.append( (name, arg) ) + else: + msg = ("Hook signatures don't support structures" + " within the first 4 arguments of a function" + " for the %s architecture") % win32.arch + raise NotImplementedError(msg) + + if reg_int_sig: + class RegisterArguments (ctypes.Structure): + _fields_ = reg_int_sig + else: + RegisterArguments = None + if reg_float_sig: + class FloatArguments (ctypes.Structure): + _fields_ = reg_float_sig + else: + FloatArguments = None + if stack_sig: + class StackArguments (ctypes.Structure): + _fields_ = stack_sig + else: + StackArguments = None + + return (len(signature), + RegisterArguments, + FloatArguments, + StackArguments) + + def _get_return_address(self, aProcess, aThread): + return aProcess.read_pointer( aThread.get_sp() ) + + def _get_function_arguments(self, aProcess, aThread): + if self._signature: + (args_count, + RegisterArguments, + FloatArguments, + StackArguments) = self._signature + arguments = {} + if StackArguments: + address = aThread.get_sp() + win32.sizeof(win32.LPVOID) + stack_struct = aProcess.read_structure(address, + StackArguments) + stack_args = dict( + [ (name, stack_struct.__getattribute__(name)) + for (name, type) in stack_struct._fields_ ] + ) + arguments.update(stack_args) + flags = 0 + if RegisterArguments: + flags = flags | win32.CONTEXT_INTEGER + if FloatArguments: + flags = flags | win32.CONTEXT_MMX_REGISTERS + if flags: + ctx = aThread.get_context(flags) + if RegisterArguments: + buffer = (win32.QWORD * 4)(ctx['Rcx'], ctx['Rdx'], + ctx['R8'], ctx['R9']) + reg_args = self._get_arguments_from_buffer(buffer, + RegisterArguments) + arguments.update(reg_args) + if FloatArguments: + buffer = (win32.M128A * 4)(ctx['XMM0'], ctx['XMM1'], + ctx['XMM2'], ctx['XMM3']) + float_args = self._get_arguments_from_buffer(buffer, + FloatArguments) + arguments.update(float_args) + params = tuple( [ arguments["arg_%d" % i] + for i in compat.xrange(args_count) ] ) + else: + params = () + return params + + def _get_arguments_from_buffer(self, buffer, structure): + b_ptr = ctypes.pointer(buffer) + v_ptr = ctypes.cast(b_ptr, ctypes.c_void_p) + s_ptr = ctypes.cast(v_ptr, ctypes.POINTER(structure)) + struct = s_ptr.contents + return dict( + [ (name, struct.__getattribute__(name)) + for (name, type) in struct._fields_ ] + ) + + def _get_return_value(self, aThread): + ctx = aThread.get_context(win32.CONTEXT_INTEGER) + return ctx['Rax'] + +#------------------------------------------------------------------------------ + +# This class acts as a factory of Hook objects, one per target process. +# Said objects are deleted by the unhook() method. + +class ApiHook (object): + """ + Used by L{EventHandler}. + + This class acts as an action callback for code breakpoints set at the + beginning of a function. It automatically retrieves the parameters from + the stack, sets a breakpoint at the return address and retrieves the + return value from the function call. + + @see: L{EventHandler.apiHooks} + + @type modName: str + @ivar modName: Module name. + + @type procName: str + @ivar procName: Procedure name. + """ + + def __init__(self, eventHandler, modName, procName, paramCount = None, + signature = None): + """ + @type eventHandler: L{EventHandler} + @param eventHandler: Event handler instance. This is where the hook + callbacks are to be defined (see below). + + @type modName: str + @param modName: Module name. + + @type procName: str + @param procName: Procedure name. + The pre and post callbacks will be deduced from it. + + For example, if the procedure is "LoadLibraryEx" the callback + routines will be "pre_LoadLibraryEx" and "post_LoadLibraryEx". + + The signature for the callbacks should be something like this:: + + def pre_LoadLibraryEx(self, event, ra, lpFilename, hFile, dwFlags): + + # return address + ra = params[0] + + # function arguments start from here... + szFilename = event.get_process().peek_string(lpFilename) + + # (...) + + def post_LoadLibraryEx(self, event, return_value): + + # (...) + + Note that all pointer types are treated like void pointers, so your + callback won't get the string or structure pointed to by it, but + the remote memory address instead. This is so to prevent the ctypes + library from being "too helpful" and trying to dereference the + pointer. To get the actual data being pointed to, use one of the + L{Process.read} methods. + + @type paramCount: int + @param paramCount: + (Optional) Number of parameters for the C{preCB} callback, + not counting the return address. Parameters are read from + the stack and assumed to be DWORDs in 32 bits and QWORDs in 64. + + This is a faster way to pull stack parameters in 32 bits, but in 64 + bits (or with some odd APIs in 32 bits) it won't be useful, since + not all arguments to the hooked function will be of the same size. + + For a more reliable and cross-platform way of hooking use the + C{signature} argument instead. + + @type signature: tuple + @param signature: + (Optional) Tuple of C{ctypes} data types that constitute the + hooked function signature. When the function is called, this will + be used to parse the arguments from the stack. Overrides the + C{paramCount} argument. + """ + self.__modName = modName + self.__procName = procName + self.__paramCount = paramCount + self.__signature = signature + self.__preCB = getattr(eventHandler, 'pre_%s' % procName, None) + self.__postCB = getattr(eventHandler, 'post_%s' % procName, None) + self.__hook = dict() + + def __call__(self, event): + """ + Handles the breakpoint event on entry of the function. + + @type event: L{ExceptionEvent} + @param event: Breakpoint hit event. + + @raise WindowsError: An error occured. + """ + pid = event.get_pid() + try: + hook = self.__hook[pid] + except KeyError: + hook = Hook(self.__preCB, self.__postCB, + self.__paramCount, self.__signature, + event.get_process().get_arch() ) + self.__hook[pid] = hook + return hook(event) + + @property + def modName(self): + return self.__modName + + @property + def procName(self): + return self.__procName + + def hook(self, debug, pid): + """ + Installs the API hook on a given process and module. + + @warning: Do not call from an API hook callback. + + @type debug: L{Debug} + @param debug: Debug object. + + @type pid: int + @param pid: Process ID. + """ + label = "%s!%s" % (self.__modName, self.__procName) + try: + hook = self.__hook[pid] + except KeyError: + try: + aProcess = debug.system.get_process(pid) + except KeyError: + aProcess = Process(pid) + hook = Hook(self.__preCB, self.__postCB, + self.__paramCount, self.__signature, + aProcess.get_arch() ) + self.__hook[pid] = hook + hook.hook(debug, pid, label) + + def unhook(self, debug, pid): + """ + Removes the API hook from the given process and module. + + @warning: Do not call from an API hook callback. + + @type debug: L{Debug} + @param debug: Debug object. + + @type pid: int + @param pid: Process ID. + """ + try: + hook = self.__hook[pid] + except KeyError: + return + label = "%s!%s" % (self.__modName, self.__procName) + hook.unhook(debug, pid, label) + del self.__hook[pid] + +#============================================================================== + +class BufferWatch (object): + """ + Returned by L{Debug.watch_buffer}. + + This object uniquely references a buffer being watched, even if there are + multiple watches set on the exact memory region. + + @type pid: int + @ivar pid: Process ID. + + @type start: int + @ivar start: Memory address of the start of the buffer. + + @type end: int + @ivar end: Memory address of the end of the buffer. + + @type action: callable + @ivar action: Action callback. + + @type oneshot: bool + @ivar oneshot: C{True} for one shot breakpoints, C{False} otherwise. + """ + + def __init__(self, pid, start, end, action = None, oneshot = False): + self.__pid = pid + self.__start = start + self.__end = end + self.__action = action + self.__oneshot = oneshot + + @property + def pid(self): + return self.__pid + + @property + def start(self): + return self.__start + + @property + def end(self): + return self.__end + + @property + def action(self): + return self.__action + + @property + def oneshot(self): + return self.__oneshot + + def match(self, address): + """ + Determine if the given memory address lies within the watched buffer. + + @rtype: bool + @return: C{True} if the given memory address lies within the watched + buffer, C{False} otherwise. + """ + return self.__start <= address < self.__end + +#============================================================================== + +class _BufferWatchCondition (object): + """ + Used by L{Debug.watch_buffer}. + + This class acts as a condition callback for page breakpoints. + It emulates page breakpoints that can overlap and/or take up less + than a page's size. + """ + + def __init__(self): + self.__ranges = list() # list of BufferWatch in definition order + + def add(self, bw): + """ + Adds a buffer watch identifier. + + @type bw: L{BufferWatch} + @param bw: + Buffer watch identifier. + """ + self.__ranges.append(bw) + + def remove(self, bw): + """ + Removes a buffer watch identifier. + + @type bw: L{BufferWatch} + @param bw: + Buffer watch identifier. + + @raise KeyError: The buffer watch identifier was already removed. + """ + try: + self.__ranges.remove(bw) + except KeyError: + if not bw.oneshot: + raise + + def remove_last_match(self, address, size): + """ + Removes the last buffer from the watch object + to match the given address and size. + + @type address: int + @param address: Memory address of buffer to stop watching. + + @type size: int + @param size: Size in bytes of buffer to stop watching. + + @rtype: int + @return: Number of matching elements found. Only the last one to be + added is actually deleted upon calling this method. + + This counter allows you to know if there are more matching elements + and how many. + """ + count = 0 + start = address + end = address + size - 1 + matched = None + for item in self.__ranges: + if item.match(start) and item.match(end): + matched = item + count += 1 + self.__ranges.remove(matched) + return count + + def count(self): + """ + @rtype: int + @return: Number of buffers being watched. + """ + return len(self.__ranges) + + def __call__(self, event): + """ + Breakpoint condition callback. + + This method will also call the action callbacks for each + buffer being watched. + + @type event: L{ExceptionEvent} + @param event: Guard page exception event. + + @rtype: bool + @return: C{True} if the address being accessed belongs + to at least one of the buffers that was being watched + and had no action callback. + """ + address = event.get_exception_information(1) + bCondition = False + for bw in self.__ranges: + bMatched = bw.match(address) + try: + action = bw.action + if bMatched and action is not None: + try: + action(event) + except Exception: + e = sys.exc_info()[1] + msg = ("Breakpoint action callback %r" + " raised an exception: %s") + msg = msg % (action, traceback.format_exc(e)) + warnings.warn(msg, BreakpointCallbackWarning) + else: + bCondition = bCondition or bMatched + finally: + if bMatched and bw.oneshot: + event.debug.dont_watch_buffer(bw) + return bCondition + +#============================================================================== + +class _BreakpointContainer (object): + """ + Encapsulates the capability to contain Breakpoint objects. + + @group Breakpoints: + break_at, watch_variable, watch_buffer, hook_function, + dont_break_at, dont_watch_variable, dont_watch_buffer, + dont_hook_function, unhook_function, + break_on_error, dont_break_on_error + + @group Stalking: + stalk_at, stalk_variable, stalk_buffer, stalk_function, + dont_stalk_at, dont_stalk_variable, dont_stalk_buffer, + dont_stalk_function + + @group Tracing: + is_tracing, get_traced_tids, + start_tracing, stop_tracing, + start_tracing_process, stop_tracing_process, + start_tracing_all, stop_tracing_all + + @group Symbols: + resolve_label, resolve_exported_function + + @group Advanced breakpoint use: + define_code_breakpoint, + define_page_breakpoint, + define_hardware_breakpoint, + has_code_breakpoint, + has_page_breakpoint, + has_hardware_breakpoint, + get_code_breakpoint, + get_page_breakpoint, + get_hardware_breakpoint, + erase_code_breakpoint, + erase_page_breakpoint, + erase_hardware_breakpoint, + enable_code_breakpoint, + enable_page_breakpoint, + enable_hardware_breakpoint, + enable_one_shot_code_breakpoint, + enable_one_shot_page_breakpoint, + enable_one_shot_hardware_breakpoint, + disable_code_breakpoint, + disable_page_breakpoint, + disable_hardware_breakpoint + + @group Listing breakpoints: + get_all_breakpoints, + get_all_code_breakpoints, + get_all_page_breakpoints, + get_all_hardware_breakpoints, + get_process_breakpoints, + get_process_code_breakpoints, + get_process_page_breakpoints, + get_process_hardware_breakpoints, + get_thread_hardware_breakpoints, + get_all_deferred_code_breakpoints, + get_process_deferred_code_breakpoints + + @group Batch operations on breakpoints: + enable_all_breakpoints, + enable_one_shot_all_breakpoints, + disable_all_breakpoints, + erase_all_breakpoints, + enable_process_breakpoints, + enable_one_shot_process_breakpoints, + disable_process_breakpoints, + erase_process_breakpoints + + @group Breakpoint types: + BP_TYPE_ANY, BP_TYPE_CODE, BP_TYPE_PAGE, BP_TYPE_HARDWARE + @group Breakpoint states: + BP_STATE_DISABLED, BP_STATE_ENABLED, BP_STATE_ONESHOT, BP_STATE_RUNNING + @group Memory breakpoint trigger flags: + BP_BREAK_ON_EXECUTION, BP_BREAK_ON_WRITE, BP_BREAK_ON_ACCESS + @group Memory breakpoint size flags: + BP_WATCH_BYTE, BP_WATCH_WORD, BP_WATCH_DWORD, BP_WATCH_QWORD + + @type BP_TYPE_ANY: int + @cvar BP_TYPE_ANY: To get all breakpoints + @type BP_TYPE_CODE: int + @cvar BP_TYPE_CODE: To get code breakpoints only + @type BP_TYPE_PAGE: int + @cvar BP_TYPE_PAGE: To get page breakpoints only + @type BP_TYPE_HARDWARE: int + @cvar BP_TYPE_HARDWARE: To get hardware breakpoints only + + @type BP_STATE_DISABLED: int + @cvar BP_STATE_DISABLED: Breakpoint is disabled. + @type BP_STATE_ENABLED: int + @cvar BP_STATE_ENABLED: Breakpoint is enabled. + @type BP_STATE_ONESHOT: int + @cvar BP_STATE_ONESHOT: Breakpoint is enabled for one shot. + @type BP_STATE_RUNNING: int + @cvar BP_STATE_RUNNING: Breakpoint is running (recently hit). + + @type BP_BREAK_ON_EXECUTION: int + @cvar BP_BREAK_ON_EXECUTION: Break on code execution. + @type BP_BREAK_ON_WRITE: int + @cvar BP_BREAK_ON_WRITE: Break on memory write. + @type BP_BREAK_ON_ACCESS: int + @cvar BP_BREAK_ON_ACCESS: Break on memory read or write. + """ + + # Breakpoint types + BP_TYPE_ANY = 0 # to get all breakpoints + BP_TYPE_CODE = 1 + BP_TYPE_PAGE = 2 + BP_TYPE_HARDWARE = 3 + + # Breakpoint states + BP_STATE_DISABLED = Breakpoint.DISABLED + BP_STATE_ENABLED = Breakpoint.ENABLED + BP_STATE_ONESHOT = Breakpoint.ONESHOT + BP_STATE_RUNNING = Breakpoint.RUNNING + + # Memory breakpoint trigger flags + BP_BREAK_ON_EXECUTION = HardwareBreakpoint.BREAK_ON_EXECUTION + BP_BREAK_ON_WRITE = HardwareBreakpoint.BREAK_ON_WRITE + BP_BREAK_ON_ACCESS = HardwareBreakpoint.BREAK_ON_ACCESS + + # Memory breakpoint size flags + BP_WATCH_BYTE = HardwareBreakpoint.WATCH_BYTE + BP_WATCH_WORD = HardwareBreakpoint.WATCH_WORD + BP_WATCH_QWORD = HardwareBreakpoint.WATCH_QWORD + BP_WATCH_DWORD = HardwareBreakpoint.WATCH_DWORD + + def __init__(self): + self.__codeBP = dict() # (pid, address) -> CodeBreakpoint + self.__pageBP = dict() # (pid, address) -> PageBreakpoint + self.__hardwareBP = dict() # tid -> [ HardwareBreakpoint ] + self.__runningBP = dict() # tid -> set( Breakpoint ) + self.__tracing = set() # set( tid ) + self.__deferredBP = dict() # pid -> label -> (action, oneshot) + +#------------------------------------------------------------------------------ + + # This operates on the dictionary of running breakpoints. + # Since the bps are meant to stay alive no cleanup is done here. + + def __get_running_bp_set(self, tid): + "Auxiliary method." + return self.__runningBP.get(tid, ()) + + def __add_running_bp(self, tid, bp): + "Auxiliary method." + if tid not in self.__runningBP: + self.__runningBP[tid] = set() + self.__runningBP[tid].add(bp) + + def __del_running_bp(self, tid, bp): + "Auxiliary method." + self.__runningBP[tid].remove(bp) + if not self.__runningBP[tid]: + del self.__runningBP[tid] + + def __del_running_bp_from_all_threads(self, bp): + "Auxiliary method." + for (tid, bpset) in compat.iteritems(self.__runningBP): + if bp in bpset: + bpset.remove(bp) + self.system.get_thread(tid).clear_tf() + +#------------------------------------------------------------------------------ + + # This is the cleanup code. Mostly called on response to exit/unload debug + # events. If possible it shouldn't raise exceptions on runtime errors. + # The main goal here is to avoid memory or handle leaks. + + def __cleanup_breakpoint(self, event, bp): + "Auxiliary method." + try: + process = event.get_process() + thread = event.get_thread() + bp.disable(process, thread) # clear the debug regs / trap flag + except Exception: + pass + bp.set_condition(True) # break possible circular reference + bp.set_action(None) # break possible circular reference + + def __cleanup_thread(self, event): + """ + Auxiliary method for L{_notify_exit_thread} + and L{_notify_exit_process}. + """ + tid = event.get_tid() + + # Cleanup running breakpoints + try: + for bp in self.__runningBP[tid]: + self.__cleanup_breakpoint(event, bp) + del self.__runningBP[tid] + except KeyError: + pass + + # Cleanup hardware breakpoints + try: + for bp in self.__hardwareBP[tid]: + self.__cleanup_breakpoint(event, bp) + del self.__hardwareBP[tid] + except KeyError: + pass + + # Cleanup set of threads being traced + if tid in self.__tracing: + self.__tracing.remove(tid) + + def __cleanup_process(self, event): + """ + Auxiliary method for L{_notify_exit_process}. + """ + pid = event.get_pid() + process = event.get_process() + + # Cleanup code breakpoints + for (bp_pid, bp_address) in compat.keys(self.__codeBP): + if bp_pid == pid: + bp = self.__codeBP[ (bp_pid, bp_address) ] + self.__cleanup_breakpoint(event, bp) + del self.__codeBP[ (bp_pid, bp_address) ] + + # Cleanup page breakpoints + for (bp_pid, bp_address) in compat.keys(self.__pageBP): + if bp_pid == pid: + bp = self.__pageBP[ (bp_pid, bp_address) ] + self.__cleanup_breakpoint(event, bp) + del self.__pageBP[ (bp_pid, bp_address) ] + + # Cleanup deferred code breakpoints + try: + del self.__deferredBP[pid] + except KeyError: + pass + + def __cleanup_module(self, event): + """ + Auxiliary method for L{_notify_unload_dll}. + """ + pid = event.get_pid() + process = event.get_process() + module = event.get_module() + + # Cleanup thread breakpoints on this module + for tid in process.iter_thread_ids(): + thread = process.get_thread(tid) + + # Running breakpoints + if tid in self.__runningBP: + bplist = list(self.__runningBP[tid]) + for bp in bplist: + bp_address = bp.get_address() + if process.get_module_at_address(bp_address) == module: + self.__cleanup_breakpoint(event, bp) + self.__runningBP[tid].remove(bp) + + # Hardware breakpoints + if tid in self.__hardwareBP: + bplist = list(self.__hardwareBP[tid]) + for bp in bplist: + bp_address = bp.get_address() + if process.get_module_at_address(bp_address) == module: + self.__cleanup_breakpoint(event, bp) + self.__hardwareBP[tid].remove(bp) + + # Cleanup code breakpoints on this module + for (bp_pid, bp_address) in compat.keys(self.__codeBP): + if bp_pid == pid: + if process.get_module_at_address(bp_address) == module: + bp = self.__codeBP[ (bp_pid, bp_address) ] + self.__cleanup_breakpoint(event, bp) + del self.__codeBP[ (bp_pid, bp_address) ] + + # Cleanup page breakpoints on this module + for (bp_pid, bp_address) in compat.keys(self.__pageBP): + if bp_pid == pid: + if process.get_module_at_address(bp_address) == module: + bp = self.__pageBP[ (bp_pid, bp_address) ] + self.__cleanup_breakpoint(event, bp) + del self.__pageBP[ (bp_pid, bp_address) ] + +#------------------------------------------------------------------------------ + + # Defining breakpoints. + + # Code breakpoints. + def define_code_breakpoint(self, dwProcessId, address, condition = True, + action = None): + """ + Creates a disabled code breakpoint at the given address. + + @see: + L{has_code_breakpoint}, + L{get_code_breakpoint}, + L{enable_code_breakpoint}, + L{enable_one_shot_code_breakpoint}, + L{disable_code_breakpoint}, + L{erase_code_breakpoint} + + @type dwProcessId: int + @param dwProcessId: Process global ID. + + @type address: int + @param address: Memory address of the code instruction to break at. + + @type condition: function + @param condition: (Optional) Condition callback function. + + The callback signature is:: + + def condition_callback(event): + return True # returns True or False + + Where B{event} is an L{Event} object, + and the return value is a boolean + (C{True} to dispatch the event, C{False} otherwise). + + @type action: function + @param action: (Optional) Action callback function. + If specified, the event is handled by this callback instead of + being dispatched normally. + + The callback signature is:: + + def action_callback(event): + pass # no return value + + Where B{event} is an L{Event} object, + and the return value is a boolean + (C{True} to dispatch the event, C{False} otherwise). + + @rtype: L{CodeBreakpoint} + @return: The code breakpoint object. + """ + process = self.system.get_process(dwProcessId) + bp = CodeBreakpoint(address, condition, action) + + key = (dwProcessId, bp.get_address()) + if key in self.__codeBP: + msg = "Already exists (PID %d) : %r" + raise KeyError(msg % (dwProcessId, self.__codeBP[key])) + self.__codeBP[key] = bp + return bp + + # Page breakpoints. + def define_page_breakpoint(self, dwProcessId, address, pages = 1, + condition = True, + action = None): + """ + Creates a disabled page breakpoint at the given address. + + @see: + L{has_page_breakpoint}, + L{get_page_breakpoint}, + L{enable_page_breakpoint}, + L{enable_one_shot_page_breakpoint}, + L{disable_page_breakpoint}, + L{erase_page_breakpoint} + + @type dwProcessId: int + @param dwProcessId: Process global ID. + + @type address: int + @param address: Memory address of the first page to watch. + + @type pages: int + @param pages: Number of pages to watch. + + @type condition: function + @param condition: (Optional) Condition callback function. + + The callback signature is:: + + def condition_callback(event): + return True # returns True or False + + Where B{event} is an L{Event} object, + and the return value is a boolean + (C{True} to dispatch the event, C{False} otherwise). + + @type action: function + @param action: (Optional) Action callback function. + If specified, the event is handled by this callback instead of + being dispatched normally. + + The callback signature is:: + + def action_callback(event): + pass # no return value + + Where B{event} is an L{Event} object, + and the return value is a boolean + (C{True} to dispatch the event, C{False} otherwise). + + @rtype: L{PageBreakpoint} + @return: The page breakpoint object. + """ + process = self.system.get_process(dwProcessId) + bp = PageBreakpoint(address, pages, condition, action) + begin = bp.get_address() + end = begin + bp.get_size() + + address = begin + pageSize = MemoryAddresses.pageSize + while address < end: + key = (dwProcessId, address) + if key in self.__pageBP: + msg = "Already exists (PID %d) : %r" + msg = msg % (dwProcessId, self.__pageBP[key]) + raise KeyError(msg) + address = address + pageSize + + address = begin + while address < end: + key = (dwProcessId, address) + self.__pageBP[key] = bp + address = address + pageSize + return bp + + # Hardware breakpoints. + def define_hardware_breakpoint(self, dwThreadId, address, + triggerFlag = BP_BREAK_ON_ACCESS, + sizeFlag = BP_WATCH_DWORD, + condition = True, + action = None): + """ + Creates a disabled hardware breakpoint at the given address. + + @see: + L{has_hardware_breakpoint}, + L{get_hardware_breakpoint}, + L{enable_hardware_breakpoint}, + L{enable_one_shot_hardware_breakpoint}, + L{disable_hardware_breakpoint}, + L{erase_hardware_breakpoint} + + @note: + Hardware breakpoints do not seem to work properly on VirtualBox. + See U{http://www.virtualbox.org/ticket/477}. + + @type dwThreadId: int + @param dwThreadId: Thread global ID. + + @type address: int + @param address: Memory address to watch. + + @type triggerFlag: int + @param triggerFlag: Trigger of breakpoint. Must be one of the following: + + - L{BP_BREAK_ON_EXECUTION} + + Break on code execution. + + - L{BP_BREAK_ON_WRITE} + + Break on memory read or write. + + - L{BP_BREAK_ON_ACCESS} + + Break on memory write. + + @type sizeFlag: int + @param sizeFlag: Size of breakpoint. Must be one of the following: + + - L{BP_WATCH_BYTE} + + One (1) byte in size. + + - L{BP_WATCH_WORD} + + Two (2) bytes in size. + + - L{BP_WATCH_DWORD} + + Four (4) bytes in size. + + - L{BP_WATCH_QWORD} + + Eight (8) bytes in size. + + @type condition: function + @param condition: (Optional) Condition callback function. + + The callback signature is:: + + def condition_callback(event): + return True # returns True or False + + Where B{event} is an L{Event} object, + and the return value is a boolean + (C{True} to dispatch the event, C{False} otherwise). + + @type action: function + @param action: (Optional) Action callback function. + If specified, the event is handled by this callback instead of + being dispatched normally. + + The callback signature is:: + + def action_callback(event): + pass # no return value + + Where B{event} is an L{Event} object, + and the return value is a boolean + (C{True} to dispatch the event, C{False} otherwise). + + @rtype: L{HardwareBreakpoint} + @return: The hardware breakpoint object. + """ + thread = self.system.get_thread(dwThreadId) + bp = HardwareBreakpoint(address, triggerFlag, sizeFlag, condition, + action) + begin = bp.get_address() + end = begin + bp.get_size() + + if dwThreadId in self.__hardwareBP: + bpSet = self.__hardwareBP[dwThreadId] + for oldbp in bpSet: + old_begin = oldbp.get_address() + old_end = old_begin + oldbp.get_size() + if MemoryAddresses.do_ranges_intersect(begin, end, old_begin, + old_end): + msg = "Already exists (TID %d) : %r" % (dwThreadId, oldbp) + raise KeyError(msg) + else: + bpSet = set() + self.__hardwareBP[dwThreadId] = bpSet + bpSet.add(bp) + return bp + +#------------------------------------------------------------------------------ + + # Checking breakpoint definitions. + + def has_code_breakpoint(self, dwProcessId, address): + """ + Checks if a code breakpoint is defined at the given address. + + @see: + L{define_code_breakpoint}, + L{get_code_breakpoint}, + L{erase_code_breakpoint}, + L{enable_code_breakpoint}, + L{enable_one_shot_code_breakpoint}, + L{disable_code_breakpoint} + + @type dwProcessId: int + @param dwProcessId: Process global ID. + + @type address: int + @param address: Memory address of breakpoint. + + @rtype: bool + @return: C{True} if the breakpoint is defined, C{False} otherwise. + """ + return (dwProcessId, address) in self.__codeBP + + def has_page_breakpoint(self, dwProcessId, address): + """ + Checks if a page breakpoint is defined at the given address. + + @see: + L{define_page_breakpoint}, + L{get_page_breakpoint}, + L{erase_page_breakpoint}, + L{enable_page_breakpoint}, + L{enable_one_shot_page_breakpoint}, + L{disable_page_breakpoint} + + @type dwProcessId: int + @param dwProcessId: Process global ID. + + @type address: int + @param address: Memory address of breakpoint. + + @rtype: bool + @return: C{True} if the breakpoint is defined, C{False} otherwise. + """ + return (dwProcessId, address) in self.__pageBP + + def has_hardware_breakpoint(self, dwThreadId, address): + """ + Checks if a hardware breakpoint is defined at the given address. + + @see: + L{define_hardware_breakpoint}, + L{get_hardware_breakpoint}, + L{erase_hardware_breakpoint}, + L{enable_hardware_breakpoint}, + L{enable_one_shot_hardware_breakpoint}, + L{disable_hardware_breakpoint} + + @type dwThreadId: int + @param dwThreadId: Thread global ID. + + @type address: int + @param address: Memory address of breakpoint. + + @rtype: bool + @return: C{True} if the breakpoint is defined, C{False} otherwise. + """ + if dwThreadId in self.__hardwareBP: + bpSet = self.__hardwareBP[dwThreadId] + for bp in bpSet: + if bp.get_address() == address: + return True + return False + +#------------------------------------------------------------------------------ + + # Getting breakpoints. + + def get_code_breakpoint(self, dwProcessId, address): + """ + Returns the internally used breakpoint object, + for the code breakpoint defined at the given address. + + @warning: It's usually best to call the L{Debug} methods + instead of accessing the breakpoint objects directly. + + @see: + L{define_code_breakpoint}, + L{has_code_breakpoint}, + L{enable_code_breakpoint}, + L{enable_one_shot_code_breakpoint}, + L{disable_code_breakpoint}, + L{erase_code_breakpoint} + + @type dwProcessId: int + @param dwProcessId: Process global ID. + + @type address: int + @param address: Memory address where the breakpoint is defined. + + @rtype: L{CodeBreakpoint} + @return: The code breakpoint object. + """ + key = (dwProcessId, address) + if key not in self.__codeBP: + msg = "No breakpoint at process %d, address %s" + address = HexDump.address(address) + raise KeyError(msg % (dwProcessId, address)) + return self.__codeBP[key] + + def get_page_breakpoint(self, dwProcessId, address): + """ + Returns the internally used breakpoint object, + for the page breakpoint defined at the given address. + + @warning: It's usually best to call the L{Debug} methods + instead of accessing the breakpoint objects directly. + + @see: + L{define_page_breakpoint}, + L{has_page_breakpoint}, + L{enable_page_breakpoint}, + L{enable_one_shot_page_breakpoint}, + L{disable_page_breakpoint}, + L{erase_page_breakpoint} + + @type dwProcessId: int + @param dwProcessId: Process global ID. + + @type address: int + @param address: Memory address where the breakpoint is defined. + + @rtype: L{PageBreakpoint} + @return: The page breakpoint object. + """ + key = (dwProcessId, address) + if key not in self.__pageBP: + msg = "No breakpoint at process %d, address %s" + address = HexDump.addresS(address) + raise KeyError(msg % (dwProcessId, address)) + return self.__pageBP[key] + + def get_hardware_breakpoint(self, dwThreadId, address): + """ + Returns the internally used breakpoint object, + for the code breakpoint defined at the given address. + + @warning: It's usually best to call the L{Debug} methods + instead of accessing the breakpoint objects directly. + + @see: + L{define_hardware_breakpoint}, + L{has_hardware_breakpoint}, + L{get_code_breakpoint}, + L{enable_hardware_breakpoint}, + L{enable_one_shot_hardware_breakpoint}, + L{disable_hardware_breakpoint}, + L{erase_hardware_breakpoint} + + @type dwThreadId: int + @param dwThreadId: Thread global ID. + + @type address: int + @param address: Memory address where the breakpoint is defined. + + @rtype: L{HardwareBreakpoint} + @return: The hardware breakpoint object. + """ + if dwThreadId not in self.__hardwareBP: + msg = "No hardware breakpoints set for thread %d" + raise KeyError(msg % dwThreadId) + for bp in self.__hardwareBP[dwThreadId]: + if bp.is_here(address): + return bp + msg = "No hardware breakpoint at thread %d, address %s" + raise KeyError(msg % (dwThreadId, HexDump.address(address))) + +#------------------------------------------------------------------------------ + + # Enabling and disabling breakpoints. + + def enable_code_breakpoint(self, dwProcessId, address): + """ + Enables the code breakpoint at the given address. + + @see: + L{define_code_breakpoint}, + L{has_code_breakpoint}, + L{enable_one_shot_code_breakpoint}, + L{disable_code_breakpoint} + L{erase_code_breakpoint}, + + @type dwProcessId: int + @param dwProcessId: Process global ID. + + @type address: int + @param address: Memory address of breakpoint. + """ + p = self.system.get_process(dwProcessId) + bp = self.get_code_breakpoint(dwProcessId, address) + if bp.is_running(): + self.__del_running_bp_from_all_threads(bp) + bp.enable(p, None) # XXX HACK thread is not used + + def enable_page_breakpoint(self, dwProcessId, address): + """ + Enables the page breakpoint at the given address. + + @see: + L{define_page_breakpoint}, + L{has_page_breakpoint}, + L{get_page_breakpoint}, + L{enable_one_shot_page_breakpoint}, + L{disable_page_breakpoint} + L{erase_page_breakpoint}, + + @type dwProcessId: int + @param dwProcessId: Process global ID. + + @type address: int + @param address: Memory address of breakpoint. + """ + p = self.system.get_process(dwProcessId) + bp = self.get_page_breakpoint(dwProcessId, address) + if bp.is_running(): + self.__del_running_bp_from_all_threads(bp) + bp.enable(p, None) # XXX HACK thread is not used + + def enable_hardware_breakpoint(self, dwThreadId, address): + """ + Enables the hardware breakpoint at the given address. + + @see: + L{define_hardware_breakpoint}, + L{has_hardware_breakpoint}, + L{get_hardware_breakpoint}, + L{enable_one_shot_hardware_breakpoint}, + L{disable_hardware_breakpoint} + L{erase_hardware_breakpoint}, + + @note: Do not set hardware breakpoints while processing the system + breakpoint event. + + @type dwThreadId: int + @param dwThreadId: Thread global ID. + + @type address: int + @param address: Memory address of breakpoint. + """ + t = self.system.get_thread(dwThreadId) + bp = self.get_hardware_breakpoint(dwThreadId, address) + if bp.is_running(): + self.__del_running_bp_from_all_threads(bp) + bp.enable(None, t) # XXX HACK process is not used + + def enable_one_shot_code_breakpoint(self, dwProcessId, address): + """ + Enables the code breakpoint at the given address for only one shot. + + @see: + L{define_code_breakpoint}, + L{has_code_breakpoint}, + L{get_code_breakpoint}, + L{enable_code_breakpoint}, + L{disable_code_breakpoint} + L{erase_code_breakpoint}, + + @type dwProcessId: int + @param dwProcessId: Process global ID. + + @type address: int + @param address: Memory address of breakpoint. + """ + p = self.system.get_process(dwProcessId) + bp = self.get_code_breakpoint(dwProcessId, address) + if bp.is_running(): + self.__del_running_bp_from_all_threads(bp) + bp.one_shot(p, None) # XXX HACK thread is not used + + def enable_one_shot_page_breakpoint(self, dwProcessId, address): + """ + Enables the page breakpoint at the given address for only one shot. + + @see: + L{define_page_breakpoint}, + L{has_page_breakpoint}, + L{get_page_breakpoint}, + L{enable_page_breakpoint}, + L{disable_page_breakpoint} + L{erase_page_breakpoint}, + + @type dwProcessId: int + @param dwProcessId: Process global ID. + + @type address: int + @param address: Memory address of breakpoint. + """ + p = self.system.get_process(dwProcessId) + bp = self.get_page_breakpoint(dwProcessId, address) + if bp.is_running(): + self.__del_running_bp_from_all_threads(bp) + bp.one_shot(p, None) # XXX HACK thread is not used + + def enable_one_shot_hardware_breakpoint(self, dwThreadId, address): + """ + Enables the hardware breakpoint at the given address for only one shot. + + @see: + L{define_hardware_breakpoint}, + L{has_hardware_breakpoint}, + L{get_hardware_breakpoint}, + L{enable_hardware_breakpoint}, + L{disable_hardware_breakpoint} + L{erase_hardware_breakpoint}, + + @type dwThreadId: int + @param dwThreadId: Thread global ID. + + @type address: int + @param address: Memory address of breakpoint. + """ + t = self.system.get_thread(dwThreadId) + bp = self.get_hardware_breakpoint(dwThreadId, address) + if bp.is_running(): + self.__del_running_bp_from_all_threads(bp) + bp.one_shot(None, t) # XXX HACK process is not used + + def disable_code_breakpoint(self, dwProcessId, address): + """ + Disables the code breakpoint at the given address. + + @see: + L{define_code_breakpoint}, + L{has_code_breakpoint}, + L{get_code_breakpoint}, + L{enable_code_breakpoint} + L{enable_one_shot_code_breakpoint}, + L{erase_code_breakpoint}, + + @type dwProcessId: int + @param dwProcessId: Process global ID. + + @type address: int + @param address: Memory address of breakpoint. + """ + p = self.system.get_process(dwProcessId) + bp = self.get_code_breakpoint(dwProcessId, address) + if bp.is_running(): + self.__del_running_bp_from_all_threads(bp) + bp.disable(p, None) # XXX HACK thread is not used + + def disable_page_breakpoint(self, dwProcessId, address): + """ + Disables the page breakpoint at the given address. + + @see: + L{define_page_breakpoint}, + L{has_page_breakpoint}, + L{get_page_breakpoint}, + L{enable_page_breakpoint} + L{enable_one_shot_page_breakpoint}, + L{erase_page_breakpoint}, + + @type dwProcessId: int + @param dwProcessId: Process global ID. + + @type address: int + @param address: Memory address of breakpoint. + """ + p = self.system.get_process(dwProcessId) + bp = self.get_page_breakpoint(dwProcessId, address) + if bp.is_running(): + self.__del_running_bp_from_all_threads(bp) + bp.disable(p, None) # XXX HACK thread is not used + + def disable_hardware_breakpoint(self, dwThreadId, address): + """ + Disables the hardware breakpoint at the given address. + + @see: + L{define_hardware_breakpoint}, + L{has_hardware_breakpoint}, + L{get_hardware_breakpoint}, + L{enable_hardware_breakpoint} + L{enable_one_shot_hardware_breakpoint}, + L{erase_hardware_breakpoint}, + + @type dwThreadId: int + @param dwThreadId: Thread global ID. + + @type address: int + @param address: Memory address of breakpoint. + """ + t = self.system.get_thread(dwThreadId) + p = t.get_process() + bp = self.get_hardware_breakpoint(dwThreadId, address) + if bp.is_running(): + self.__del_running_bp(dwThreadId, bp) + bp.disable(p, t) + +#------------------------------------------------------------------------------ + + # Undefining (erasing) breakpoints. + + def erase_code_breakpoint(self, dwProcessId, address): + """ + Erases the code breakpoint at the given address. + + @see: + L{define_code_breakpoint}, + L{has_code_breakpoint}, + L{get_code_breakpoint}, + L{enable_code_breakpoint}, + L{enable_one_shot_code_breakpoint}, + L{disable_code_breakpoint} + + @type dwProcessId: int + @param dwProcessId: Process global ID. + + @type address: int + @param address: Memory address of breakpoint. + """ + bp = self.get_code_breakpoint(dwProcessId, address) + if not bp.is_disabled(): + self.disable_code_breakpoint(dwProcessId, address) + del self.__codeBP[ (dwProcessId, address) ] + + def erase_page_breakpoint(self, dwProcessId, address): + """ + Erases the page breakpoint at the given address. + + @see: + L{define_page_breakpoint}, + L{has_page_breakpoint}, + L{get_page_breakpoint}, + L{enable_page_breakpoint}, + L{enable_one_shot_page_breakpoint}, + L{disable_page_breakpoint} + + @type dwProcessId: int + @param dwProcessId: Process global ID. + + @type address: int + @param address: Memory address of breakpoint. + """ + bp = self.get_page_breakpoint(dwProcessId, address) + begin = bp.get_address() + end = begin + bp.get_size() + if not bp.is_disabled(): + self.disable_page_breakpoint(dwProcessId, address) + address = begin + pageSize = MemoryAddresses.pageSize + while address < end: + del self.__pageBP[ (dwProcessId, address) ] + address = address + pageSize + + def erase_hardware_breakpoint(self, dwThreadId, address): + """ + Erases the hardware breakpoint at the given address. + + @see: + L{define_hardware_breakpoint}, + L{has_hardware_breakpoint}, + L{get_hardware_breakpoint}, + L{enable_hardware_breakpoint}, + L{enable_one_shot_hardware_breakpoint}, + L{disable_hardware_breakpoint} + + @type dwThreadId: int + @param dwThreadId: Thread global ID. + + @type address: int + @param address: Memory address of breakpoint. + """ + bp = self.get_hardware_breakpoint(dwThreadId, address) + if not bp.is_disabled(): + self.disable_hardware_breakpoint(dwThreadId, address) + bpSet = self.__hardwareBP[dwThreadId] + bpSet.remove(bp) + if not bpSet: + del self.__hardwareBP[dwThreadId] + +#------------------------------------------------------------------------------ + + # Listing breakpoints. + + def get_all_breakpoints(self): + """ + Returns all breakpoint objects as a list of tuples. + + Each tuple contains: + - Process global ID to which the breakpoint applies. + - Thread global ID to which the breakpoint applies, or C{None}. + - The L{Breakpoint} object itself. + + @note: If you're only interested in a specific breakpoint type, or in + breakpoints for a specific process or thread, it's probably faster + to call one of the following methods: + - L{get_all_code_breakpoints} + - L{get_all_page_breakpoints} + - L{get_all_hardware_breakpoints} + - L{get_process_code_breakpoints} + - L{get_process_page_breakpoints} + - L{get_process_hardware_breakpoints} + - L{get_thread_hardware_breakpoints} + + @rtype: list of tuple( pid, tid, bp ) + @return: List of all breakpoints. + """ + bplist = list() + + # Get the code breakpoints. + for (pid, bp) in self.get_all_code_breakpoints(): + bplist.append( (pid, None, bp) ) + + # Get the page breakpoints. + for (pid, bp) in self.get_all_page_breakpoints(): + bplist.append( (pid, None, bp) ) + + # Get the hardware breakpoints. + for (tid, bp) in self.get_all_hardware_breakpoints(): + pid = self.system.get_thread(tid).get_pid() + bplist.append( (pid, tid, bp) ) + + # Return the list of breakpoints. + return bplist + + def get_all_code_breakpoints(self): + """ + @rtype: list of tuple( int, L{CodeBreakpoint} ) + @return: All code breakpoints as a list of tuples (pid, bp). + """ + return [ (pid, bp) for ((pid, address), bp) in compat.iteritems(self.__codeBP) ] + + def get_all_page_breakpoints(self): + """ + @rtype: list of tuple( int, L{PageBreakpoint} ) + @return: All page breakpoints as a list of tuples (pid, bp). + """ +## return list( set( [ (pid, bp) for ((pid, address), bp) in compat.iteritems(self.__pageBP) ] ) ) + result = set() + for ((pid, address), bp) in compat.iteritems(self.__pageBP): + result.add( (pid, bp) ) + return list(result) + + def get_all_hardware_breakpoints(self): + """ + @rtype: list of tuple( int, L{HardwareBreakpoint} ) + @return: All hardware breakpoints as a list of tuples (tid, bp). + """ + result = list() + for (tid, bplist) in compat.iteritems(self.__hardwareBP): + for bp in bplist: + result.append( (tid, bp) ) + return result + + def get_process_breakpoints(self, dwProcessId): + """ + Returns all breakpoint objects for the given process as a list of tuples. + + Each tuple contains: + - Process global ID to which the breakpoint applies. + - Thread global ID to which the breakpoint applies, or C{None}. + - The L{Breakpoint} object itself. + + @note: If you're only interested in a specific breakpoint type, or in + breakpoints for a specific process or thread, it's probably faster + to call one of the following methods: + - L{get_all_code_breakpoints} + - L{get_all_page_breakpoints} + - L{get_all_hardware_breakpoints} + - L{get_process_code_breakpoints} + - L{get_process_page_breakpoints} + - L{get_process_hardware_breakpoints} + - L{get_thread_hardware_breakpoints} + + @type dwProcessId: int + @param dwProcessId: Process global ID. + + @rtype: list of tuple( pid, tid, bp ) + @return: List of all breakpoints for the given process. + """ + bplist = list() + + # Get the code breakpoints. + for bp in self.get_process_code_breakpoints(dwProcessId): + bplist.append( (dwProcessId, None, bp) ) + + # Get the page breakpoints. + for bp in self.get_process_page_breakpoints(dwProcessId): + bplist.append( (dwProcessId, None, bp) ) + + # Get the hardware breakpoints. + for (tid, bp) in self.get_process_hardware_breakpoints(dwProcessId): + pid = self.system.get_thread(tid).get_pid() + bplist.append( (dwProcessId, tid, bp) ) + + # Return the list of breakpoints. + return bplist + + def get_process_code_breakpoints(self, dwProcessId): + """ + @type dwProcessId: int + @param dwProcessId: Process global ID. + + @rtype: list of L{CodeBreakpoint} + @return: All code breakpoints for the given process. + """ + return [ bp for ((pid, address), bp) in compat.iteritems(self.__codeBP) \ + if pid == dwProcessId ] + + def get_process_page_breakpoints(self, dwProcessId): + """ + @type dwProcessId: int + @param dwProcessId: Process global ID. + + @rtype: list of L{PageBreakpoint} + @return: All page breakpoints for the given process. + """ + return [ bp for ((pid, address), bp) in compat.iteritems(self.__pageBP) \ + if pid == dwProcessId ] + + def get_thread_hardware_breakpoints(self, dwThreadId): + """ + @see: L{get_process_hardware_breakpoints} + + @type dwThreadId: int + @param dwThreadId: Thread global ID. + + @rtype: list of L{HardwareBreakpoint} + @return: All hardware breakpoints for the given thread. + """ + result = list() + for (tid, bplist) in compat.iteritems(self.__hardwareBP): + if tid == dwThreadId: + for bp in bplist: + result.append(bp) + return result + + def get_process_hardware_breakpoints(self, dwProcessId): + """ + @see: L{get_thread_hardware_breakpoints} + + @type dwProcessId: int + @param dwProcessId: Process global ID. + + @rtype: list of tuple( int, L{HardwareBreakpoint} ) + @return: All hardware breakpoints for each thread in the given process + as a list of tuples (tid, bp). + """ + result = list() + aProcess = self.system.get_process(dwProcessId) + for dwThreadId in aProcess.iter_thread_ids(): + if dwThreadId in self.__hardwareBP: + bplist = self.__hardwareBP[dwThreadId] + for bp in bplist: + result.append( (dwThreadId, bp) ) + return result + +## def get_all_hooks(self): +## """ +## @see: L{get_process_hooks} +## +## @rtype: list of tuple( int, int, L{Hook} ) +## @return: All defined hooks as a list of tuples (pid, address, hook). +## """ +## return [ (pid, address, hook) \ +## for ((pid, address), hook) in self.__hook_objects ] +## +## def get_process_hooks(self, dwProcessId): +## """ +## @see: L{get_all_hooks} +## +## @type dwProcessId: int +## @param dwProcessId: Process global ID. +## +## @rtype: list of tuple( int, int, L{Hook} ) +## @return: All hooks for the given process as a list of tuples +## (pid, address, hook). +## """ +## return [ (pid, address, hook) \ +## for ((pid, address), hook) in self.__hook_objects \ +## if pid == dwProcessId ] + +#------------------------------------------------------------------------------ + + # Batch operations on all breakpoints. + + def enable_all_breakpoints(self): + """ + Enables all disabled breakpoints in all processes. + + @see: + enable_code_breakpoint, + enable_page_breakpoint, + enable_hardware_breakpoint + """ + + # disable code breakpoints + for (pid, bp) in self.get_all_code_breakpoints(): + if bp.is_disabled(): + self.enable_code_breakpoint(pid, bp.get_address()) + + # disable page breakpoints + for (pid, bp) in self.get_all_page_breakpoints(): + if bp.is_disabled(): + self.enable_page_breakpoint(pid, bp.get_address()) + + # disable hardware breakpoints + for (tid, bp) in self.get_all_hardware_breakpoints(): + if bp.is_disabled(): + self.enable_hardware_breakpoint(tid, bp.get_address()) + + def enable_one_shot_all_breakpoints(self): + """ + Enables for one shot all disabled breakpoints in all processes. + + @see: + enable_one_shot_code_breakpoint, + enable_one_shot_page_breakpoint, + enable_one_shot_hardware_breakpoint + """ + + # disable code breakpoints for one shot + for (pid, bp) in self.get_all_code_breakpoints(): + if bp.is_disabled(): + self.enable_one_shot_code_breakpoint(pid, bp.get_address()) + + # disable page breakpoints for one shot + for (pid, bp) in self.get_all_page_breakpoints(): + if bp.is_disabled(): + self.enable_one_shot_page_breakpoint(pid, bp.get_address()) + + # disable hardware breakpoints for one shot + for (tid, bp) in self.get_all_hardware_breakpoints(): + if bp.is_disabled(): + self.enable_one_shot_hardware_breakpoint(tid, bp.get_address()) + + def disable_all_breakpoints(self): + """ + Disables all breakpoints in all processes. + + @see: + disable_code_breakpoint, + disable_page_breakpoint, + disable_hardware_breakpoint + """ + + # disable code breakpoints + for (pid, bp) in self.get_all_code_breakpoints(): + self.disable_code_breakpoint(pid, bp.get_address()) + + # disable page breakpoints + for (pid, bp) in self.get_all_page_breakpoints(): + self.disable_page_breakpoint(pid, bp.get_address()) + + # disable hardware breakpoints + for (tid, bp) in self.get_all_hardware_breakpoints(): + self.disable_hardware_breakpoint(tid, bp.get_address()) + + def erase_all_breakpoints(self): + """ + Erases all breakpoints in all processes. + + @see: + erase_code_breakpoint, + erase_page_breakpoint, + erase_hardware_breakpoint + """ + + # This should be faster but let's not trust the GC so much :P + # self.disable_all_breakpoints() + # self.__codeBP = dict() + # self.__pageBP = dict() + # self.__hardwareBP = dict() + # self.__runningBP = dict() + # self.__hook_objects = dict() + +## # erase hooks +## for (pid, address, hook) in self.get_all_hooks(): +## self.dont_hook_function(pid, address) + + # erase code breakpoints + for (pid, bp) in self.get_all_code_breakpoints(): + self.erase_code_breakpoint(pid, bp.get_address()) + + # erase page breakpoints + for (pid, bp) in self.get_all_page_breakpoints(): + self.erase_page_breakpoint(pid, bp.get_address()) + + # erase hardware breakpoints + for (tid, bp) in self.get_all_hardware_breakpoints(): + self.erase_hardware_breakpoint(tid, bp.get_address()) + +#------------------------------------------------------------------------------ + + # Batch operations on breakpoints per process. + + def enable_process_breakpoints(self, dwProcessId): + """ + Enables all disabled breakpoints for the given process. + + @type dwProcessId: int + @param dwProcessId: Process global ID. + """ + + # enable code breakpoints + for bp in self.get_process_code_breakpoints(dwProcessId): + if bp.is_disabled(): + self.enable_code_breakpoint(dwProcessId, bp.get_address()) + + # enable page breakpoints + for bp in self.get_process_page_breakpoints(dwProcessId): + if bp.is_disabled(): + self.enable_page_breakpoint(dwProcessId, bp.get_address()) + + # enable hardware breakpoints + if self.system.has_process(dwProcessId): + aProcess = self.system.get_process(dwProcessId) + else: + aProcess = Process(dwProcessId) + aProcess.scan_threads() + for aThread in aProcess.iter_threads(): + dwThreadId = aThread.get_tid() + for bp in self.get_thread_hardware_breakpoints(dwThreadId): + if bp.is_disabled(): + self.enable_hardware_breakpoint(dwThreadId, bp.get_address()) + + def enable_one_shot_process_breakpoints(self, dwProcessId): + """ + Enables for one shot all disabled breakpoints for the given process. + + @type dwProcessId: int + @param dwProcessId: Process global ID. + """ + + # enable code breakpoints for one shot + for bp in self.get_process_code_breakpoints(dwProcessId): + if bp.is_disabled(): + self.enable_one_shot_code_breakpoint(dwProcessId, bp.get_address()) + + # enable page breakpoints for one shot + for bp in self.get_process_page_breakpoints(dwProcessId): + if bp.is_disabled(): + self.enable_one_shot_page_breakpoint(dwProcessId, bp.get_address()) + + # enable hardware breakpoints for one shot + if self.system.has_process(dwProcessId): + aProcess = self.system.get_process(dwProcessId) + else: + aProcess = Process(dwProcessId) + aProcess.scan_threads() + for aThread in aProcess.iter_threads(): + dwThreadId = aThread.get_tid() + for bp in self.get_thread_hardware_breakpoints(dwThreadId): + if bp.is_disabled(): + self.enable_one_shot_hardware_breakpoint(dwThreadId, bp.get_address()) + + def disable_process_breakpoints(self, dwProcessId): + """ + Disables all breakpoints for the given process. + + @type dwProcessId: int + @param dwProcessId: Process global ID. + """ + + # disable code breakpoints + for bp in self.get_process_code_breakpoints(dwProcessId): + self.disable_code_breakpoint(dwProcessId, bp.get_address()) + + # disable page breakpoints + for bp in self.get_process_page_breakpoints(dwProcessId): + self.disable_page_breakpoint(dwProcessId, bp.get_address()) + + # disable hardware breakpoints + if self.system.has_process(dwProcessId): + aProcess = self.system.get_process(dwProcessId) + else: + aProcess = Process(dwProcessId) + aProcess.scan_threads() + for aThread in aProcess.iter_threads(): + dwThreadId = aThread.get_tid() + for bp in self.get_thread_hardware_breakpoints(dwThreadId): + self.disable_hardware_breakpoint(dwThreadId, bp.get_address()) + + def erase_process_breakpoints(self, dwProcessId): + """ + Erases all breakpoints for the given process. + + @type dwProcessId: int + @param dwProcessId: Process global ID. + """ + + # disable breakpoints first + # if an error occurs, no breakpoint is erased + self.disable_process_breakpoints(dwProcessId) + +## # erase hooks +## for address, hook in self.get_process_hooks(dwProcessId): +## self.dont_hook_function(dwProcessId, address) + + # erase code breakpoints + for bp in self.get_process_code_breakpoints(dwProcessId): + self.erase_code_breakpoint(dwProcessId, bp.get_address()) + + # erase page breakpoints + for bp in self.get_process_page_breakpoints(dwProcessId): + self.erase_page_breakpoint(dwProcessId, bp.get_address()) + + # erase hardware breakpoints + if self.system.has_process(dwProcessId): + aProcess = self.system.get_process(dwProcessId) + else: + aProcess = Process(dwProcessId) + aProcess.scan_threads() + for aThread in aProcess.iter_threads(): + dwThreadId = aThread.get_tid() + for bp in self.get_thread_hardware_breakpoints(dwThreadId): + self.erase_hardware_breakpoint(dwThreadId, bp.get_address()) + +#------------------------------------------------------------------------------ + + # Internal handlers of debug events. + + def _notify_guard_page(self, event): + """ + Notify breakpoints of a guard page exception event. + + @type event: L{ExceptionEvent} + @param event: Guard page exception event. + + @rtype: bool + @return: C{True} to call the user-defined handle, C{False} otherwise. + """ + address = event.get_fault_address() + pid = event.get_pid() + bCallHandler = True + + # Align address to page boundary. + mask = ~(MemoryAddresses.pageSize - 1) + address = address & mask + + # Do we have an active page breakpoint there? + key = (pid, address) + if key in self.__pageBP: + bp = self.__pageBP[key] + if bp.is_enabled() or bp.is_one_shot(): + + # Breakpoint is ours. + event.continueStatus = win32.DBG_CONTINUE +## event.continueStatus = win32.DBG_EXCEPTION_HANDLED + + # Hit the breakpoint. + bp.hit(event) + + # Remember breakpoints in RUNNING state. + if bp.is_running(): + tid = event.get_tid() + self.__add_running_bp(tid, bp) + + # Evaluate the breakpoint condition. + bCondition = bp.eval_condition(event) + + # If the breakpoint is automatic, run the action. + # If not, notify the user. + if bCondition and bp.is_automatic(): + bp.run_action(event) + bCallHandler = False + else: + bCallHandler = bCondition + + # If we don't have a breakpoint here pass the exception to the debugee. + # This is a normally occurring exception so we shouldn't swallow it. + else: + event.continueStatus = win32.DBG_EXCEPTION_NOT_HANDLED + + return bCallHandler + + def _notify_breakpoint(self, event): + """ + Notify breakpoints of a breakpoint exception event. + + @type event: L{ExceptionEvent} + @param event: Breakpoint exception event. + + @rtype: bool + @return: C{True} to call the user-defined handle, C{False} otherwise. + """ + address = event.get_exception_address() + pid = event.get_pid() + bCallHandler = True + + # Do we have an active code breakpoint there? + key = (pid, address) + if key in self.__codeBP: + bp = self.__codeBP[key] + if not bp.is_disabled(): + + # Change the program counter (PC) to the exception address. + # This accounts for the change in PC caused by + # executing the breakpoint instruction, no matter + # the size of it. + aThread = event.get_thread() + aThread.set_pc(address) + + # Swallow the exception. + event.continueStatus = win32.DBG_CONTINUE + + # Hit the breakpoint. + bp.hit(event) + + # Remember breakpoints in RUNNING state. + if bp.is_running(): + tid = event.get_tid() + self.__add_running_bp(tid, bp) + + # Evaluate the breakpoint condition. + bCondition = bp.eval_condition(event) + + # If the breakpoint is automatic, run the action. + # If not, notify the user. + if bCondition and bp.is_automatic(): + bCallHandler = bp.run_action(event) + else: + bCallHandler = bCondition + + # Handle the system breakpoint. + # TODO: examine the stack trace to figure out if it's really a + # system breakpoint or an antidebug trick. The caller should be + # inside ntdll if it's legit. + elif event.get_process().is_system_defined_breakpoint(address): + event.continueStatus = win32.DBG_CONTINUE + + # In hostile mode, if we don't have a breakpoint here pass the + # exception to the debugee. In normal mode assume all breakpoint + # exceptions are to be handled by the debugger. + else: + if self.in_hostile_mode(): + event.continueStatus = win32.DBG_EXCEPTION_NOT_HANDLED + else: + event.continueStatus = win32.DBG_CONTINUE + + return bCallHandler + + def _notify_single_step(self, event): + """ + Notify breakpoints of a single step exception event. + + @type event: L{ExceptionEvent} + @param event: Single step exception event. + + @rtype: bool + @return: C{True} to call the user-defined handle, C{False} otherwise. + """ + pid = event.get_pid() + tid = event.get_tid() + aThread = event.get_thread() + aProcess = event.get_process() + bCallHandler = True + bIsOurs = False + + # In hostile mode set the default to pass the exception to the debugee. + # If we later determine the exception is ours, hide it instead. + old_continueStatus = event.continueStatus + try: + if self.in_hostile_mode(): + event.continueStatus = win32.DBG_EXCEPTION_NOT_HANDLED + + # Single step support is implemented on x86/x64 architectures only. + if self.system.arch not in (win32.ARCH_I386, win32.ARCH_AMD64): + return bCallHandler + + # In hostile mode, read the last executed bytes to try to detect + # some antidebug tricks. Skip this check in normal mode because + # it'd slow things down. + # + # FIXME: weird opcode encodings may bypass this check! + # + # bFakeSingleStep: Ice Breakpoint undocumented instruction. + # bHideTrapFlag: Don't let pushf instructions get the real value of + # the trap flag. + # bNextIsPopFlags: Don't let popf instructions clear the trap flag. + # + bFakeSingleStep = False + bLastIsPushFlags = False + bNextIsPopFlags = False + if self.in_hostile_mode(): + pc = aThread.get_pc() + c = aProcess.read_char(pc - 1) + if c == 0xF1: # int1 + bFakeSingleStep = True + elif c == 0x9C: # pushf + bLastIsPushFlags = True + c = aProcess.peek_char(pc) + if c == 0x66: # the only valid prefix for popf + c = aProcess.peek_char(pc + 1) + if c == 0x9D: # popf + if bLastIsPushFlags: + bLastIsPushFlags = False # they cancel each other out + else: + bNextIsPopFlags = True + + # When the thread is in tracing mode, + # don't pass the exception to the debugee + # and set the trap flag again. + if self.is_tracing(tid): + bIsOurs = True + if not bFakeSingleStep: + event.continueStatus = win32.DBG_CONTINUE + aThread.set_tf() + + # Don't let the debugee read or write the trap flag. + # This code works in 32 and 64 bits thanks to the endianness. + if bLastIsPushFlags or bNextIsPopFlags: + sp = aThread.get_sp() + flags = aProcess.read_dword(sp) + if bLastIsPushFlags: + flags &= ~Thread.Flags.Trap + else: # if bNextIsPopFlags: + flags |= Thread.Flags.Trap + aProcess.write_dword(sp, flags) + + # Handle breakpoints in RUNNING state. + running = self.__get_running_bp_set(tid) + if running: + bIsOurs = True + if not bFakeSingleStep: + event.continueStatus = win32.DBG_CONTINUE + bCallHandler = False + while running: + try: + running.pop().hit(event) + except Exception: + e = sys.exc_info()[1] + warnings.warn(str(e), BreakpointWarning) + + # Handle hardware breakpoints. + if tid in self.__hardwareBP: + ctx = aThread.get_context(win32.CONTEXT_DEBUG_REGISTERS) + Dr6 = ctx['Dr6'] + ctx['Dr6'] = Dr6 & DebugRegister.clearHitMask + aThread.set_context(ctx) + bFoundBreakpoint = False + bCondition = False + hwbpList = [ bp for bp in self.__hardwareBP[tid] ] + for bp in hwbpList: + if not bp in self.__hardwareBP[tid]: + continue # it was removed by a user-defined callback + slot = bp.get_slot() + if (slot is not None) and \ + (Dr6 & DebugRegister.hitMask[slot]): + if not bFoundBreakpoint: #set before actions are called + if not bFakeSingleStep: + event.continueStatus = win32.DBG_CONTINUE + bFoundBreakpoint = True + bIsOurs = True + bp.hit(event) + if bp.is_running(): + self.__add_running_bp(tid, bp) + bThisCondition = bp.eval_condition(event) + if bThisCondition and bp.is_automatic(): + bp.run_action(event) + bThisCondition = False + bCondition = bCondition or bThisCondition + if bFoundBreakpoint: + bCallHandler = bCondition + + # Always call the user-defined handler + # when the thread is in tracing mode. + if self.is_tracing(tid): + bCallHandler = True + + # If we're not in hostile mode, by default we assume all single + # step exceptions are caused by the debugger. + if not bIsOurs and not self.in_hostile_mode(): + aThread.clear_tf() + + # If the user hit Control-C while we were inside the try block, + # set the default continueStatus back. + except: + event.continueStatus = old_continueStatus + raise + + return bCallHandler + + def _notify_load_dll(self, event): + """ + Notify the loading of a DLL. + + @type event: L{LoadDLLEvent} + @param event: Load DLL event. + + @rtype: bool + @return: C{True} to call the user-defined handler, C{False} otherwise. + """ + self.__set_deferred_breakpoints(event) + return True + + def _notify_unload_dll(self, event): + """ + Notify the unloading of a DLL. + + @type event: L{UnloadDLLEvent} + @param event: Unload DLL event. + + @rtype: bool + @return: C{True} to call the user-defined handler, C{False} otherwise. + """ + self.__cleanup_module(event) + return True + + def _notify_exit_thread(self, event): + """ + Notify the termination of a thread. + + @type event: L{ExitThreadEvent} + @param event: Exit thread event. + + @rtype: bool + @return: C{True} to call the user-defined handler, C{False} otherwise. + """ + self.__cleanup_thread(event) + return True + + def _notify_exit_process(self, event): + """ + Notify the termination of a process. + + @type event: L{ExitProcessEvent} + @param event: Exit process event. + + @rtype: bool + @return: C{True} to call the user-defined handler, C{False} otherwise. + """ + self.__cleanup_process(event) + self.__cleanup_thread(event) + return True + +#------------------------------------------------------------------------------ + + # This is the high level breakpoint interface. Here we don't have to care + # about defining or enabling breakpoints, and many errors are ignored + # (like for example setting the same breakpoint twice, here the second + # breakpoint replaces the first, much like in WinDBG). It should be easier + # and more intuitive, if less detailed. It also allows the use of deferred + # breakpoints. + +#------------------------------------------------------------------------------ + + # Code breakpoints + + def __set_break(self, pid, address, action, oneshot): + """ + Used by L{break_at} and L{stalk_at}. + + @type pid: int + @param pid: Process global ID. + + @type address: int or str + @param address: + Memory address of code instruction to break at. It can be an + integer value for the actual address or a string with a label + to be resolved. + + @type action: function + @param action: (Optional) Action callback function. + + See L{define_code_breakpoint} for more details. + + @type oneshot: bool + @param oneshot: C{True} for one-shot breakpoints, C{False} otherwise. + + @rtype: L{Breakpoint} + @return: Returns the new L{Breakpoint} object, or C{None} if the label + couldn't be resolved and the breakpoint was deferred. Deferred + breakpoints are set when the DLL they point to is loaded. + """ + if type(address) not in (int, long): + label = address + try: + address = self.system.get_process(pid).resolve_label(address) + if not address: + raise Exception() + except Exception: + try: + deferred = self.__deferredBP[pid] + except KeyError: + deferred = dict() + self.__deferredBP[pid] = deferred + if label in deferred: + msg = "Redefined deferred code breakpoint at %s in process ID %d" + msg = msg % (label, pid) + warnings.warn(msg, BreakpointWarning) + deferred[label] = (action, oneshot) + return None + if self.has_code_breakpoint(pid, address): + bp = self.get_code_breakpoint(pid, address) + if bp.get_action() != action: # can't use "is not", fails for bound methods + bp.set_action(action) + msg = "Redefined code breakpoint at %s in process ID %d" + msg = msg % (label, pid) + warnings.warn(msg, BreakpointWarning) + else: + self.define_code_breakpoint(pid, address, True, action) + bp = self.get_code_breakpoint(pid, address) + if oneshot: + if not bp.is_one_shot(): + self.enable_one_shot_code_breakpoint(pid, address) + else: + if not bp.is_enabled(): + self.enable_code_breakpoint(pid, address) + return bp + + def __clear_break(self, pid, address): + """ + Used by L{dont_break_at} and L{dont_stalk_at}. + + @type pid: int + @param pid: Process global ID. + + @type address: int or str + @param address: + Memory address of code instruction to break at. It can be an + integer value for the actual address or a string with a label + to be resolved. + """ + if type(address) not in (int, long): + unknown = True + label = address + try: + deferred = self.__deferredBP[pid] + del deferred[label] + unknown = False + except KeyError: +## traceback.print_last() # XXX DEBUG + pass + aProcess = self.system.get_process(pid) + try: + address = aProcess.resolve_label(label) + if not address: + raise Exception() + except Exception: +## traceback.print_last() # XXX DEBUG + if unknown: + msg = ("Can't clear unknown code breakpoint" + " at %s in process ID %d") + msg = msg % (label, pid) + warnings.warn(msg, BreakpointWarning) + return + if self.has_code_breakpoint(pid, address): + self.erase_code_breakpoint(pid, address) + + def __set_deferred_breakpoints(self, event): + """ + Used internally. Sets all deferred breakpoints for a DLL when it's + loaded. + + @type event: L{LoadDLLEvent} + @param event: Load DLL event. + """ + pid = event.get_pid() + try: + deferred = self.__deferredBP[pid] + except KeyError: + return + aProcess = event.get_process() + for (label, (action, oneshot)) in deferred.items(): + try: + address = aProcess.resolve_label(label) + except Exception: + continue + del deferred[label] + try: + self.__set_break(pid, address, action, oneshot) + except Exception: + msg = "Can't set deferred breakpoint %s at process ID %d" + msg = msg % (label, pid) + warnings.warn(msg, BreakpointWarning) + + def get_all_deferred_code_breakpoints(self): + """ + Returns a list of deferred code breakpoints. + + @rtype: tuple of (int, str, callable, bool) + @return: Tuple containing the following elements: + - Process ID where to set the breakpoint. + - Label pointing to the address where to set the breakpoint. + - Action callback for the breakpoint. + - C{True} of the breakpoint is one-shot, C{False} otherwise. + """ + result = [] + for pid, deferred in compat.iteritems(self.__deferredBP): + for (label, (action, oneshot)) in compat.iteritems(deferred): + result.add( (pid, label, action, oneshot) ) + return result + + def get_process_deferred_code_breakpoints(self, dwProcessId): + """ + Returns a list of deferred code breakpoints. + + @type dwProcessId: int + @param dwProcessId: Process ID. + + @rtype: tuple of (int, str, callable, bool) + @return: Tuple containing the following elements: + - Label pointing to the address where to set the breakpoint. + - Action callback for the breakpoint. + - C{True} of the breakpoint is one-shot, C{False} otherwise. + """ + return [ (label, action, oneshot) + for (label, (action, oneshot)) + in compat.iteritems(self.__deferredBP.get(dwProcessId, {})) ] + + def stalk_at(self, pid, address, action = None): + """ + Sets a one shot code breakpoint at the given process and address. + + If instead of an address you pass a label, the breakpoint may be + deferred until the DLL it points to is loaded. + + @see: L{break_at}, L{dont_stalk_at} + + @type pid: int + @param pid: Process global ID. + + @type address: int or str + @param address: + Memory address of code instruction to break at. It can be an + integer value for the actual address or a string with a label + to be resolved. + + @type action: function + @param action: (Optional) Action callback function. + + See L{define_code_breakpoint} for more details. + + @rtype: bool + @return: C{True} if the breakpoint was set immediately, or C{False} if + it was deferred. + """ + bp = self.__set_break(pid, address, action, oneshot = True) + return bp is not None + + def break_at(self, pid, address, action = None): + """ + Sets a code breakpoint at the given process and address. + + If instead of an address you pass a label, the breakpoint may be + deferred until the DLL it points to is loaded. + + @see: L{stalk_at}, L{dont_break_at} + + @type pid: int + @param pid: Process global ID. + + @type address: int or str + @param address: + Memory address of code instruction to break at. It can be an + integer value for the actual address or a string with a label + to be resolved. + + @type action: function + @param action: (Optional) Action callback function. + + See L{define_code_breakpoint} for more details. + + @rtype: bool + @return: C{True} if the breakpoint was set immediately, or C{False} if + it was deferred. + """ + bp = self.__set_break(pid, address, action, oneshot = False) + return bp is not None + + def dont_break_at(self, pid, address): + """ + Clears a code breakpoint set by L{break_at}. + + @type pid: int + @param pid: Process global ID. + + @type address: int or str + @param address: + Memory address of code instruction to break at. It can be an + integer value for the actual address or a string with a label + to be resolved. + """ + self.__clear_break(pid, address) + + def dont_stalk_at(self, pid, address): + """ + Clears a code breakpoint set by L{stalk_at}. + + @type pid: int + @param pid: Process global ID. + + @type address: int or str + @param address: + Memory address of code instruction to break at. It can be an + integer value for the actual address or a string with a label + to be resolved. + """ + self.__clear_break(pid, address) + +#------------------------------------------------------------------------------ + + # Function hooks + + def hook_function(self, pid, address, + preCB = None, postCB = None, + paramCount = None, signature = None): + """ + Sets a function hook at the given address. + + If instead of an address you pass a label, the hook may be + deferred until the DLL it points to is loaded. + + @type pid: int + @param pid: Process global ID. + + @type address: int or str + @param address: + Memory address of code instruction to break at. It can be an + integer value for the actual address or a string with a label + to be resolved. + + @type preCB: function + @param preCB: (Optional) Callback triggered on function entry. + + The signature for the callback should be something like this:: + + def pre_LoadLibraryEx(event, ra, lpFilename, hFile, dwFlags): + + # return address + ra = params[0] + + # function arguments start from here... + szFilename = event.get_process().peek_string(lpFilename) + + # (...) + + Note that all pointer types are treated like void pointers, so your + callback won't get the string or structure pointed to by it, but + the remote memory address instead. This is so to prevent the ctypes + library from being "too helpful" and trying to dereference the + pointer. To get the actual data being pointed to, use one of the + L{Process.read} methods. + + @type postCB: function + @param postCB: (Optional) Callback triggered on function exit. + + The signature for the callback should be something like this:: + + def post_LoadLibraryEx(event, return_value): + + # (...) + + @type paramCount: int + @param paramCount: + (Optional) Number of parameters for the C{preCB} callback, + not counting the return address. Parameters are read from + the stack and assumed to be DWORDs in 32 bits and QWORDs in 64. + + This is a faster way to pull stack parameters in 32 bits, but in 64 + bits (or with some odd APIs in 32 bits) it won't be useful, since + not all arguments to the hooked function will be of the same size. + + For a more reliable and cross-platform way of hooking use the + C{signature} argument instead. + + @type signature: tuple + @param signature: + (Optional) Tuple of C{ctypes} data types that constitute the + hooked function signature. When the function is called, this will + be used to parse the arguments from the stack. Overrides the + C{paramCount} argument. + + @rtype: bool + @return: C{True} if the hook was set immediately, or C{False} if + it was deferred. + """ + try: + aProcess = self.system.get_process(pid) + except KeyError: + aProcess = Process(pid) + arch = aProcess.get_arch() + hookObj = Hook(preCB, postCB, paramCount, signature, arch) + bp = self.break_at(pid, address, hookObj) + return bp is not None + + def stalk_function(self, pid, address, + preCB = None, postCB = None, + paramCount = None, signature = None): + """ + Sets a one-shot function hook at the given address. + + If instead of an address you pass a label, the hook may be + deferred until the DLL it points to is loaded. + + @type pid: int + @param pid: Process global ID. + + @type address: int or str + @param address: + Memory address of code instruction to break at. It can be an + integer value for the actual address or a string with a label + to be resolved. + + @type preCB: function + @param preCB: (Optional) Callback triggered on function entry. + + The signature for the callback should be something like this:: + + def pre_LoadLibraryEx(event, ra, lpFilename, hFile, dwFlags): + + # return address + ra = params[0] + + # function arguments start from here... + szFilename = event.get_process().peek_string(lpFilename) + + # (...) + + Note that all pointer types are treated like void pointers, so your + callback won't get the string or structure pointed to by it, but + the remote memory address instead. This is so to prevent the ctypes + library from being "too helpful" and trying to dereference the + pointer. To get the actual data being pointed to, use one of the + L{Process.read} methods. + + @type postCB: function + @param postCB: (Optional) Callback triggered on function exit. + + The signature for the callback should be something like this:: + + def post_LoadLibraryEx(event, return_value): + + # (...) + + @type paramCount: int + @param paramCount: + (Optional) Number of parameters for the C{preCB} callback, + not counting the return address. Parameters are read from + the stack and assumed to be DWORDs in 32 bits and QWORDs in 64. + + This is a faster way to pull stack parameters in 32 bits, but in 64 + bits (or with some odd APIs in 32 bits) it won't be useful, since + not all arguments to the hooked function will be of the same size. + + For a more reliable and cross-platform way of hooking use the + C{signature} argument instead. + + @type signature: tuple + @param signature: + (Optional) Tuple of C{ctypes} data types that constitute the + hooked function signature. When the function is called, this will + be used to parse the arguments from the stack. Overrides the + C{paramCount} argument. + + @rtype: bool + @return: C{True} if the breakpoint was set immediately, or C{False} if + it was deferred. + """ + try: + aProcess = self.system.get_process(pid) + except KeyError: + aProcess = Process(pid) + arch = aProcess.get_arch() + hookObj = Hook(preCB, postCB, paramCount, signature, arch) + bp = self.stalk_at(pid, address, hookObj) + return bp is not None + + def dont_hook_function(self, pid, address): + """ + Removes a function hook set by L{hook_function}. + + @type pid: int + @param pid: Process global ID. + + @type address: int or str + @param address: + Memory address of code instruction to break at. It can be an + integer value for the actual address or a string with a label + to be resolved. + """ + self.dont_break_at(pid, address) + + # alias + unhook_function = dont_hook_function + + def dont_stalk_function(self, pid, address): + """ + Removes a function hook set by L{stalk_function}. + + @type pid: int + @param pid: Process global ID. + + @type address: int or str + @param address: + Memory address of code instruction to break at. It can be an + integer value for the actual address or a string with a label + to be resolved. + """ + self.dont_stalk_at(pid, address) + +#------------------------------------------------------------------------------ + + # Variable watches + + def __set_variable_watch(self, tid, address, size, action): + """ + Used by L{watch_variable} and L{stalk_variable}. + + @type tid: int + @param tid: Thread global ID. + + @type address: int + @param address: Memory address of variable to watch. + + @type size: int + @param size: Size of variable to watch. The only supported sizes are: + byte (1), word (2), dword (4) and qword (8). + + @type action: function + @param action: (Optional) Action callback function. + + See L{define_hardware_breakpoint} for more details. + + @rtype: L{HardwareBreakpoint} + @return: Hardware breakpoint at the requested address. + """ + + # TODO + # We should merge the breakpoints instead of overwriting them. + # We'll have the same problem as watch_buffer and we'll need to change + # the API again. + + if size == 1: + sizeFlag = self.BP_WATCH_BYTE + elif size == 2: + sizeFlag = self.BP_WATCH_WORD + elif size == 4: + sizeFlag = self.BP_WATCH_DWORD + elif size == 8: + sizeFlag = self.BP_WATCH_QWORD + else: + raise ValueError("Bad size for variable watch: %r" % size) + + if self.has_hardware_breakpoint(tid, address): + warnings.warn( + "Hardware breakpoint in thread %d at address %s was overwritten!" \ + % (tid, HexDump.address(address, + self.system.get_thread(tid).get_bits())), + BreakpointWarning) + + bp = self.get_hardware_breakpoint(tid, address) + if bp.get_trigger() != self.BP_BREAK_ON_ACCESS or \ + bp.get_watch() != sizeFlag: + self.erase_hardware_breakpoint(tid, address) + self.define_hardware_breakpoint(tid, address, + self.BP_BREAK_ON_ACCESS, sizeFlag, True, action) + bp = self.get_hardware_breakpoint(tid, address) + + else: + self.define_hardware_breakpoint(tid, address, + self.BP_BREAK_ON_ACCESS, sizeFlag, True, action) + bp = self.get_hardware_breakpoint(tid, address) + + return bp + + def __clear_variable_watch(self, tid, address): + """ + Used by L{dont_watch_variable} and L{dont_stalk_variable}. + + @type tid: int + @param tid: Thread global ID. + + @type address: int + @param address: Memory address of variable to stop watching. + """ + if self.has_hardware_breakpoint(tid, address): + self.erase_hardware_breakpoint(tid, address) + + def watch_variable(self, tid, address, size, action = None): + """ + Sets a hardware breakpoint at the given thread, address and size. + + @see: L{dont_watch_variable} + + @type tid: int + @param tid: Thread global ID. + + @type address: int + @param address: Memory address of variable to watch. + + @type size: int + @param size: Size of variable to watch. The only supported sizes are: + byte (1), word (2), dword (4) and qword (8). + + @type action: function + @param action: (Optional) Action callback function. + + See L{define_hardware_breakpoint} for more details. + """ + bp = self.__set_variable_watch(tid, address, size, action) + if not bp.is_enabled(): + self.enable_hardware_breakpoint(tid, address) + + def stalk_variable(self, tid, address, size, action = None): + """ + Sets a one-shot hardware breakpoint at the given thread, + address and size. + + @see: L{dont_watch_variable} + + @type tid: int + @param tid: Thread global ID. + + @type address: int + @param address: Memory address of variable to watch. + + @type size: int + @param size: Size of variable to watch. The only supported sizes are: + byte (1), word (2), dword (4) and qword (8). + + @type action: function + @param action: (Optional) Action callback function. + + See L{define_hardware_breakpoint} for more details. + """ + bp = self.__set_variable_watch(tid, address, size, action) + if not bp.is_one_shot(): + self.enable_one_shot_hardware_breakpoint(tid, address) + + def dont_watch_variable(self, tid, address): + """ + Clears a hardware breakpoint set by L{watch_variable}. + + @type tid: int + @param tid: Thread global ID. + + @type address: int + @param address: Memory address of variable to stop watching. + """ + self.__clear_variable_watch(tid, address) + + def dont_stalk_variable(self, tid, address): + """ + Clears a hardware breakpoint set by L{stalk_variable}. + + @type tid: int + @param tid: Thread global ID. + + @type address: int + @param address: Memory address of variable to stop watching. + """ + self.__clear_variable_watch(tid, address) + +#------------------------------------------------------------------------------ + + # Buffer watches + + def __set_buffer_watch(self, pid, address, size, action, bOneShot): + """ + Used by L{watch_buffer} and L{stalk_buffer}. + + @type pid: int + @param pid: Process global ID. + + @type address: int + @param address: Memory address of buffer to watch. + + @type size: int + @param size: Size in bytes of buffer to watch. + + @type action: function + @param action: (Optional) Action callback function. + + See L{define_page_breakpoint} for more details. + + @type bOneShot: bool + @param bOneShot: + C{True} to set a one-shot breakpoint, + C{False} to set a normal breakpoint. + """ + + # Check the size isn't zero or negative. + if size < 1: + raise ValueError("Bad size for buffer watch: %r" % size) + + # Create the buffer watch identifier. + bw = BufferWatch(pid, address, address + size, action, bOneShot) + + # Get the base address and size in pages required for this buffer. + base = MemoryAddresses.align_address_to_page_start(address) + limit = MemoryAddresses.align_address_to_page_end(address + size) + pages = MemoryAddresses.get_buffer_size_in_pages(address, size) + + try: + + # For each page: + # + if a page breakpoint exists reuse it + # + if it doesn't exist define it + + bset = set() # all breakpoints used + nset = set() # newly defined breakpoints + cset = set() # condition objects + + page_addr = base + pageSize = MemoryAddresses.pageSize + while page_addr < limit: + + # If a breakpoints exists, reuse it. + if self.has_page_breakpoint(pid, page_addr): + bp = self.get_page_breakpoint(pid, page_addr) + if bp not in bset: + condition = bp.get_condition() + if not condition in cset: + if not isinstance(condition,_BufferWatchCondition): + # this shouldn't happen unless you tinkered + # with it or defined your own page breakpoints + # manually. + msg = "Can't watch buffer at page %s" + msg = msg % HexDump.address(page_addr) + raise RuntimeError(msg) + cset.add(condition) + bset.add(bp) + + # If it doesn't, define it. + else: + condition = _BufferWatchCondition() + bp = self.define_page_breakpoint(pid, page_addr, 1, + condition = condition) + bset.add(bp) + nset.add(bp) + cset.add(condition) + + # Next page. + page_addr = page_addr + pageSize + + # For each breakpoint, enable it if needed. + aProcess = self.system.get_process(pid) + for bp in bset: + if bp.is_disabled() or bp.is_one_shot(): + bp.enable(aProcess, None) + + # On error... + except: + + # Erase the newly defined breakpoints. + for bp in nset: + try: + self.erase_page_breakpoint(pid, bp.get_address()) + except: + pass + + # Pass the exception to the caller + raise + + # For each condition object, add the new buffer. + for condition in cset: + condition.add(bw) + + def __clear_buffer_watch_old_method(self, pid, address, size): + """ + Used by L{dont_watch_buffer} and L{dont_stalk_buffer}. + + @warn: Deprecated since WinAppDbg 1.5. + + @type pid: int + @param pid: Process global ID. + + @type address: int + @param address: Memory address of buffer to stop watching. + + @type size: int + @param size: Size in bytes of buffer to stop watching. + """ + warnings.warn("Deprecated since WinAppDbg 1.5", DeprecationWarning) + + # Check the size isn't zero or negative. + if size < 1: + raise ValueError("Bad size for buffer watch: %r" % size) + + # Get the base address and size in pages required for this buffer. + base = MemoryAddresses.align_address_to_page_start(address) + limit = MemoryAddresses.align_address_to_page_end(address + size) + pages = MemoryAddresses.get_buffer_size_in_pages(address, size) + + # For each page, get the breakpoint and it's condition object. + # For each condition, remove the buffer. + # For each breakpoint, if no buffers are on watch, erase it. + cset = set() # condition objects + page_addr = base + pageSize = MemoryAddresses.pageSize + while page_addr < limit: + if self.has_page_breakpoint(pid, page_addr): + bp = self.get_page_breakpoint(pid, page_addr) + condition = bp.get_condition() + if condition not in cset: + if not isinstance(condition, _BufferWatchCondition): + # this shouldn't happen unless you tinkered with it + # or defined your own page breakpoints manually. + continue + cset.add(condition) + condition.remove_last_match(address, size) + if condition.count() == 0: + try: + self.erase_page_breakpoint(pid, bp.get_address()) + except WindowsError: + pass + page_addr = page_addr + pageSize + + def __clear_buffer_watch(self, bw): + """ + Used by L{dont_watch_buffer} and L{dont_stalk_buffer}. + + @type bw: L{BufferWatch} + @param bw: Buffer watch identifier. + """ + + # Get the PID and the start and end addresses of the buffer. + pid = bw.pid + start = bw.start + end = bw.end + + # Get the base address and size in pages required for the buffer. + base = MemoryAddresses.align_address_to_page_start(start) + limit = MemoryAddresses.align_address_to_page_end(end) + pages = MemoryAddresses.get_buffer_size_in_pages(start, end - start) + + # For each page, get the breakpoint and it's condition object. + # For each condition, remove the buffer. + # For each breakpoint, if no buffers are on watch, erase it. + cset = set() # condition objects + page_addr = base + pageSize = MemoryAddresses.pageSize + while page_addr < limit: + if self.has_page_breakpoint(pid, page_addr): + bp = self.get_page_breakpoint(pid, page_addr) + condition = bp.get_condition() + if condition not in cset: + if not isinstance(condition, _BufferWatchCondition): + # this shouldn't happen unless you tinkered with it + # or defined your own page breakpoints manually. + continue + cset.add(condition) + condition.remove(bw) + if condition.count() == 0: + try: + self.erase_page_breakpoint(pid, bp.get_address()) + except WindowsError: + msg = "Cannot remove page breakpoint at address %s" + msg = msg % HexDump.address( bp.get_address() ) + warnings.warn(msg, BreakpointWarning) + page_addr = page_addr + pageSize + + def watch_buffer(self, pid, address, size, action = None): + """ + Sets a page breakpoint and notifies when the given buffer is accessed. + + @see: L{dont_watch_variable} + + @type pid: int + @param pid: Process global ID. + + @type address: int + @param address: Memory address of buffer to watch. + + @type size: int + @param size: Size in bytes of buffer to watch. + + @type action: function + @param action: (Optional) Action callback function. + + See L{define_page_breakpoint} for more details. + + @rtype: L{BufferWatch} + @return: Buffer watch identifier. + """ + self.__set_buffer_watch(pid, address, size, action, False) + + def stalk_buffer(self, pid, address, size, action = None): + """ + Sets a one-shot page breakpoint and notifies + when the given buffer is accessed. + + @see: L{dont_watch_variable} + + @type pid: int + @param pid: Process global ID. + + @type address: int + @param address: Memory address of buffer to watch. + + @type size: int + @param size: Size in bytes of buffer to watch. + + @type action: function + @param action: (Optional) Action callback function. + + See L{define_page_breakpoint} for more details. + + @rtype: L{BufferWatch} + @return: Buffer watch identifier. + """ + self.__set_buffer_watch(pid, address, size, action, True) + + def dont_watch_buffer(self, bw, *argv, **argd): + """ + Clears a page breakpoint set by L{watch_buffer}. + + @type bw: L{BufferWatch} + @param bw: + Buffer watch identifier returned by L{watch_buffer}. + """ + + # The sane way to do it. + if not (argv or argd): + self.__clear_buffer_watch(bw) + + # Backwards compatibility with WinAppDbg 1.4. + else: + argv = list(argv) + argv.insert(0, bw) + if 'pid' in argd: + argv.insert(0, argd.pop('pid')) + if 'address' in argd: + argv.insert(1, argd.pop('address')) + if 'size' in argd: + argv.insert(2, argd.pop('size')) + if argd: + raise TypeError("Wrong arguments for dont_watch_buffer()") + try: + pid, address, size = argv + except ValueError: + raise TypeError("Wrong arguments for dont_watch_buffer()") + self.__clear_buffer_watch_old_method(pid, address, size) + + def dont_stalk_buffer(self, bw, *argv, **argd): + """ + Clears a page breakpoint set by L{stalk_buffer}. + + @type bw: L{BufferWatch} + @param bw: + Buffer watch identifier returned by L{stalk_buffer}. + """ + self.dont_watch_buffer(bw, *argv, **argd) + +#------------------------------------------------------------------------------ + + # Tracing + +# XXX TODO +# Add "action" parameter to tracing mode + + def __start_tracing(self, thread): + """ + @type thread: L{Thread} + @param thread: Thread to start tracing. + """ + tid = thread.get_tid() + if not tid in self.__tracing: + thread.set_tf() + self.__tracing.add(tid) + + def __stop_tracing(self, thread): + """ + @type thread: L{Thread} + @param thread: Thread to stop tracing. + """ + tid = thread.get_tid() + if tid in self.__tracing: + self.__tracing.remove(tid) + if thread.is_alive(): + thread.clear_tf() + + def is_tracing(self, tid): + """ + @type tid: int + @param tid: Thread global ID. + + @rtype: bool + @return: C{True} if the thread is being traced, C{False} otherwise. + """ + return tid in self.__tracing + + def get_traced_tids(self): + """ + Retrieves the list of global IDs of all threads being traced. + + @rtype: list( int... ) + @return: List of thread global IDs. + """ + tids = list(self.__tracing) + tids.sort() + return tids + + def start_tracing(self, tid): + """ + Start tracing mode in the given thread. + + @type tid: int + @param tid: Global ID of thread to start tracing. + """ + if not self.is_tracing(tid): + thread = self.system.get_thread(tid) + self.__start_tracing(thread) + + def stop_tracing(self, tid): + """ + Stop tracing mode in the given thread. + + @type tid: int + @param tid: Global ID of thread to stop tracing. + """ + if self.is_tracing(tid): + thread = self.system.get_thread(tid) + self.__stop_tracing(thread) + + def start_tracing_process(self, pid): + """ + Start tracing mode for all threads in the given process. + + @type pid: int + @param pid: Global ID of process to start tracing. + """ + for thread in self.system.get_process(pid).iter_threads(): + self.__start_tracing(thread) + + def stop_tracing_process(self, pid): + """ + Stop tracing mode for all threads in the given process. + + @type pid: int + @param pid: Global ID of process to stop tracing. + """ + for thread in self.system.get_process(pid).iter_threads(): + self.__stop_tracing(thread) + + def start_tracing_all(self): + """ + Start tracing mode for all threads in all debugees. + """ + for pid in self.get_debugee_pids(): + self.start_tracing_process(pid) + + def stop_tracing_all(self): + """ + Stop tracing mode for all threads in all debugees. + """ + for pid in self.get_debugee_pids(): + self.stop_tracing_process(pid) + +#------------------------------------------------------------------------------ + + # Break on LastError values (only available since Windows Server 2003) + + def break_on_error(self, pid, errorCode): + """ + Sets or clears the system breakpoint for a given Win32 error code. + + Use L{Process.is_system_defined_breakpoint} to tell if a breakpoint + exception was caused by a system breakpoint or by the application + itself (for example because of a failed assertion in the code). + + @note: This functionality is only available since Windows Server 2003. + In 2003 it only breaks on error values set externally to the + kernel32.dll library, but this was fixed in Windows Vista. + + @warn: This method will fail if the debug symbols for ntdll (kernel32 + in Windows 2003) are not present. For more information see: + L{System.fix_symbol_store_path}. + + @see: U{http://www.nynaeve.net/?p=147} + + @type pid: int + @param pid: Process ID. + + @type errorCode: int + @param errorCode: Win32 error code to stop on. Set to C{0} or + C{ERROR_SUCCESS} to clear the breakpoint instead. + + @raise NotImplementedError: + The functionality is not supported in this system. + + @raise WindowsError: + An error occurred while processing this request. + """ + aProcess = self.system.get_process(pid) + address = aProcess.get_break_on_error_ptr() + if not address: + raise NotImplementedError( + "The functionality is not supported in this system.") + aProcess.write_dword(address, errorCode) + + def dont_break_on_error(self, pid): + """ + Alias to L{break_on_error}C{(pid, ERROR_SUCCESS)}. + + @type pid: int + @param pid: Process ID. + + @raise NotImplementedError: + The functionality is not supported in this system. + + @raise WindowsError: + An error occurred while processing this request. + """ + self.break_on_error(pid, 0) + +#------------------------------------------------------------------------------ + + # Simplified symbol resolving, useful for hooking functions + + def resolve_exported_function(self, pid, modName, procName): + """ + Resolves the exported DLL function for the given process. + + @type pid: int + @param pid: Process global ID. + + @type modName: str + @param modName: Name of the module that exports the function. + + @type procName: str + @param procName: Name of the exported function to resolve. + + @rtype: int, None + @return: On success, the address of the exported function. + On failure, returns C{None}. + """ + aProcess = self.system.get_process(pid) + aModule = aProcess.get_module_by_name(modName) + if not aModule: + aProcess.scan_modules() + aModule = aProcess.get_module_by_name(modName) + if aModule: + address = aModule.resolve(procName) + return address + return None + + def resolve_label(self, pid, label): + """ + Resolves a label for the given process. + + @type pid: int + @param pid: Process global ID. + + @type label: str + @param label: Label to resolve. + + @rtype: int + @return: Memory address pointed to by the label. + + @raise ValueError: The label is malformed or impossible to resolve. + @raise RuntimeError: Cannot resolve the module or function. + """ + return self.get_process(pid).resolve_label(label) diff --git a/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/compat.py b/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/compat.py new file mode 100644 index 00000000..ad64901c --- /dev/null +++ b/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/compat.py @@ -0,0 +1,183 @@ +# Partial copy of https://bitbucket.org/gutworth/six/src/8e634686c53a35092dd705172440a9231c90ddd1/six.py?at=default +# With some differences to take into account that the iterXXX version may be defined in user code. + +# Original __author__ = "Benjamin Peterson " +# Base __version__ = "1.7.3" + +# Copyright (c) 2010-2014 Benjamin Peterson +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +import sys +import types + + + +# Useful for very coarse version differentiation. +PY2 = sys.version_info[0] == 2 +PY3 = sys.version_info[0] == 3 + +if PY3: + string_types = str, + integer_types = int, + class_types = type, + text_type = str + binary_type = bytes + + MAXSIZE = sys.maxsize +else: + string_types = basestring, + integer_types = (int, long) + class_types = (type, types.ClassType) + text_type = unicode + binary_type = str + + if sys.platform.startswith("java"): + # Jython always uses 32 bits. + MAXSIZE = int((1 << 31) - 1) + else: + # It's possible to have sizeof(long) != sizeof(Py_ssize_t). + class X(object): + def __len__(self): + return 1 << 31 + try: + len(X()) + except OverflowError: + # 32-bit + MAXSIZE = int((1 << 31) - 1) + else: + # 64-bit + MAXSIZE = int((1 << 63) - 1) + del X + + +if PY3: + xrange = range + unicode = str + bytes = bytes + def iterkeys(d, **kw): + if hasattr(d, 'iterkeys'): + return iter(d.iterkeys(**kw)) + return iter(d.keys(**kw)) + + def itervalues(d, **kw): + if hasattr(d, 'itervalues'): + return iter(d.itervalues(**kw)) + return iter(d.values(**kw)) + + def iteritems(d, **kw): + if hasattr(d, 'iteritems'): + return iter(d.iteritems(**kw)) + return iter(d.items(**kw)) + + def iterlists(d, **kw): + if hasattr(d, 'iterlists'): + return iter(d.iterlists(**kw)) + return iter(d.lists(**kw)) + + def keys(d, **kw): + return list(iterkeys(d, **kw)) +else: + unicode = unicode + xrange = xrange + bytes = str + def keys(d, **kw): + return d.keys(**kw) + + def iterkeys(d, **kw): + return iter(d.iterkeys(**kw)) + + def itervalues(d, **kw): + return iter(d.itervalues(**kw)) + + def iteritems(d, **kw): + return iter(d.iteritems(**kw)) + + def iterlists(d, **kw): + return iter(d.iterlists(**kw)) + +if PY3: + import builtins + exec_ = getattr(builtins, "exec") + + + def reraise(tp, value, tb=None): + if value is None: + value = tp() + if value.__traceback__ is not tb: + raise value.with_traceback(tb) + raise value + +else: + def exec_(_code_, _globs_=None, _locs_=None): + """Execute code in a namespace.""" + if _globs_ is None: + frame = sys._getframe(1) + _globs_ = frame.f_globals + if _locs_ is None: + _locs_ = frame.f_locals + del frame + elif _locs_ is None: + _locs_ = _globs_ + exec("""exec _code_ in _globs_, _locs_""") + + + exec_("""def reraise(tp, value, tb=None): + raise tp, value, tb +""") + + +if PY3: + import operator + def b(s): + if isinstance(s, str): + return s.encode("latin-1") + assert isinstance(s, bytes) + return s + def u(s): + return s + unichr = chr + if sys.version_info[1] <= 1: + def int2byte(i): + return bytes((i,)) + else: + # This is about 2x faster than the implementation above on 3.2+ + int2byte = operator.methodcaller("to_bytes", 1, "big") + byte2int = operator.itemgetter(0) + indexbytes = operator.getitem + iterbytes = iter + import io + StringIO = io.StringIO + BytesIO = io.BytesIO +else: + def b(s): + return s + # Workaround for standalone backslash + def u(s): + return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape") + unichr = unichr + int2byte = chr + def byte2int(bs): + return ord(bs[0]) + def indexbytes(buf, i): + return ord(buf[i]) + def iterbytes(buf): + return (ord(byte) for byte in buf) + import StringIO + StringIO = BytesIO = StringIO.StringIO \ No newline at end of file diff --git a/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/crash.py b/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/crash.py new file mode 100644 index 00000000..a53172e5 --- /dev/null +++ b/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/crash.py @@ -0,0 +1,1853 @@ +#!~/.wine/drive_c/Python25/python.exe +# -*- coding: utf-8 -*- + +# Copyright (c) 2009-2014, Mario Vilas +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright +# notice,this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# * Neither the name of the copyright holder nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. + +""" +Crash dump support. + +@group Crash reporting: + Crash, CrashDictionary + +@group Warnings: + CrashWarning + +@group Deprecated classes: + CrashContainer, CrashTable, CrashTableMSSQL, + VolatileCrashContainer, DummyCrashContainer +""" + +__revision__ = "$Id$" + +__all__ = [ + + # Object that represents a crash in the debugee. + 'Crash', + + # Crash storage. + 'CrashDictionary', + + # Warnings. + 'CrashWarning', + + # Backwards compatibility with WinAppDbg 1.4 and before. + 'CrashContainer', + 'CrashTable', + 'CrashTableMSSQL', + 'VolatileCrashContainer', + 'DummyCrashContainer', +] + +from winappdbg import win32 +from winappdbg import compat +from winappdbg.system import System +from winappdbg.textio import HexDump, CrashDump +from winappdbg.util import StaticClass, MemoryAddresses, PathOperations + +import sys +import os +import time +import zlib +import warnings + +# lazy imports +sql = None +anydbm = None + +#============================================================================== + +# Secure alternative to pickle, use it if present. +try: + import cerealizer + pickle = cerealizer + + # There is no optimization function for cerealized objects. + def optimize(picklestring): + return picklestring + + # There is no HIGHEST_PROTOCOL in cerealizer. + HIGHEST_PROTOCOL = 0 + + # Note: it's important NOT to provide backwards compatibility, otherwise + # it'd be just the same as not having this! + # + # To disable this security upgrade simply uncomment the following line: + # + # raise ImportError("Fallback to pickle for backwards compatibility") + +# If cerealizer is not present fallback to the insecure pickle module. +except ImportError: + + # Faster implementation of the pickle module as a C extension. + try: + import cPickle as pickle + + # If all fails fallback to the classic pickle module. + except ImportError: + import pickle + + # Fetch the highest protocol version. + HIGHEST_PROTOCOL = pickle.HIGHEST_PROTOCOL + + # Try to use the pickle optimizer if found. + try: + from pickletools import optimize + except ImportError: + def optimize(picklestring): + return picklestring + +class Marshaller (StaticClass): + """ + Custom pickler for L{Crash} objects. Optimizes the pickled data when using + the standard C{pickle} (or C{cPickle}) module. The pickled data is then + compressed using zlib. + """ + + @staticmethod + def dumps(obj, protocol=HIGHEST_PROTOCOL): + return zlib.compress(optimize(pickle.dumps(obj)), 9) + + @staticmethod + def loads(data): + return pickle.loads(zlib.decompress(data)) + +#============================================================================== + +class CrashWarning (Warning): + """ + An error occurred while gathering crash data. + Some data may be incomplete or missing. + """ + +#============================================================================== + +# Crash object. Must be serializable. +class Crash (object): + """ + Represents a crash, bug, or another interesting event in the debugee. + + @group Basic information: + timeStamp, signature, eventCode, eventName, pid, tid, arch, os, bits, + registers, labelPC, pc, sp, fp + + @group Optional information: + debugString, + modFileName, + lpBaseOfDll, + exceptionCode, + exceptionName, + exceptionDescription, + exceptionAddress, + exceptionLabel, + firstChance, + faultType, + faultAddress, + faultLabel, + isOurBreakpoint, + isSystemBreakpoint, + stackTrace, + stackTracePC, + stackTraceLabels, + stackTracePretty + + @group Extra information: + commandLine, + environment, + environmentData, + registersPeek, + stackRange, + stackFrame, + stackPeek, + faultCode, + faultMem, + faultPeek, + faultDisasm, + memoryMap + + @group Report: + briefReport, fullReport, notesReport, environmentReport, isExploitable + + @group Notes: + addNote, getNotes, iterNotes, hasNotes, clearNotes, notes + + @group Miscellaneous: + fetch_extra_data + + @type timeStamp: float + @ivar timeStamp: Timestamp as returned by time.time(). + + @type signature: object + @ivar signature: Approximately unique signature for the Crash object. + + This signature can be used as an heuristic to determine if two crashes + were caused by the same software error. Ideally it should be treated as + as opaque serializable object that can be tested for equality. + + @type notes: list( str ) + @ivar notes: List of strings, each string is a note. + + @type eventCode: int + @ivar eventCode: Event code as defined by the Win32 API. + + @type eventName: str + @ivar eventName: Event code user-friendly name. + + @type pid: int + @ivar pid: Process global ID. + + @type tid: int + @ivar tid: Thread global ID. + + @type arch: str + @ivar arch: Processor architecture. + + @type os: str + @ivar os: Operating system version. + + May indicate a 64 bit version even if L{arch} and L{bits} indicate 32 + bits. This means the crash occurred inside a WOW64 process. + + @type bits: int + @ivar bits: C{32} or C{64} bits. + + @type commandLine: None or str + @ivar commandLine: Command line for the target process. + + C{None} if unapplicable or unable to retrieve. + + @type environmentData: None or list of str + @ivar environmentData: Environment data for the target process. + + C{None} if unapplicable or unable to retrieve. + + @type environment: None or dict( str S{->} str ) + @ivar environment: Environment variables for the target process. + + C{None} if unapplicable or unable to retrieve. + + @type registers: dict( str S{->} int ) + @ivar registers: Dictionary mapping register names to their values. + + @type registersPeek: None or dict( str S{->} str ) + @ivar registersPeek: Dictionary mapping register names to the data they point to. + + C{None} if unapplicable or unable to retrieve. + + @type labelPC: None or str + @ivar labelPC: Label pointing to the program counter. + + C{None} or invalid if unapplicable or unable to retrieve. + + @type debugString: None or str + @ivar debugString: Debug string sent by the debugee. + + C{None} if unapplicable or unable to retrieve. + + @type exceptionCode: None or int + @ivar exceptionCode: Exception code as defined by the Win32 API. + + C{None} if unapplicable or unable to retrieve. + + @type exceptionName: None or str + @ivar exceptionName: Exception code user-friendly name. + + C{None} if unapplicable or unable to retrieve. + + @type exceptionDescription: None or str + @ivar exceptionDescription: Exception description. + + C{None} if unapplicable or unable to retrieve. + + @type exceptionAddress: None or int + @ivar exceptionAddress: Memory address where the exception occured. + + C{None} if unapplicable or unable to retrieve. + + @type exceptionLabel: None or str + @ivar exceptionLabel: Label pointing to the exception address. + + C{None} or invalid if unapplicable or unable to retrieve. + + @type faultType: None or int + @ivar faultType: Access violation type. + Only applicable to memory faults. + Should be one of the following constants: + + - L{win32.ACCESS_VIOLATION_TYPE_READ} + - L{win32.ACCESS_VIOLATION_TYPE_WRITE} + - L{win32.ACCESS_VIOLATION_TYPE_DEP} + + C{None} if unapplicable or unable to retrieve. + + @type faultAddress: None or int + @ivar faultAddress: Access violation memory address. + Only applicable to memory faults. + + C{None} if unapplicable or unable to retrieve. + + @type faultLabel: None or str + @ivar faultLabel: Label pointing to the access violation memory address. + Only applicable to memory faults. + + C{None} if unapplicable or unable to retrieve. + + @type firstChance: None or bool + @ivar firstChance: + C{True} for first chance exceptions, C{False} for second chance. + + C{None} if unapplicable or unable to retrieve. + + @type isOurBreakpoint: bool + @ivar isOurBreakpoint: + C{True} for breakpoints defined by the L{Debug} class, + C{False} otherwise. + + C{None} if unapplicable. + + @type isSystemBreakpoint: bool + @ivar isSystemBreakpoint: + C{True} for known system-defined breakpoints, + C{False} otherwise. + + C{None} if unapplicable. + + @type modFileName: None or str + @ivar modFileName: File name of module where the program counter points to. + + C{None} or invalid if unapplicable or unable to retrieve. + + @type lpBaseOfDll: None or int + @ivar lpBaseOfDll: Base of module where the program counter points to. + + C{None} if unapplicable or unable to retrieve. + + @type stackTrace: None or tuple of tuple( int, int, str ) + @ivar stackTrace: + Stack trace of the current thread as a tuple of + ( frame pointer, return address, module filename ). + + C{None} or empty if unapplicable or unable to retrieve. + + @type stackTracePretty: None or tuple of tuple( int, str ) + @ivar stackTracePretty: + Stack trace of the current thread as a tuple of + ( frame pointer, return location ). + + C{None} or empty if unapplicable or unable to retrieve. + + @type stackTracePC: None or tuple( int... ) + @ivar stackTracePC: Tuple of return addresses in the stack trace. + + C{None} or empty if unapplicable or unable to retrieve. + + @type stackTraceLabels: None or tuple( str... ) + @ivar stackTraceLabels: + Tuple of labels pointing to the return addresses in the stack trace. + + C{None} or empty if unapplicable or unable to retrieve. + + @type stackRange: tuple( int, int ) + @ivar stackRange: + Stack beginning and end pointers, in memory addresses order. + + C{None} if unapplicable or unable to retrieve. + + @type stackFrame: None or str + @ivar stackFrame: Data pointed to by the stack pointer. + + C{None} or empty if unapplicable or unable to retrieve. + + @type stackPeek: None or dict( int S{->} str ) + @ivar stackPeek: Dictionary mapping stack offsets to the data they point to. + + C{None} or empty if unapplicable or unable to retrieve. + + @type faultCode: None or str + @ivar faultCode: Data pointed to by the program counter. + + C{None} or empty if unapplicable or unable to retrieve. + + @type faultMem: None or str + @ivar faultMem: Data pointed to by the exception address. + + C{None} or empty if unapplicable or unable to retrieve. + + @type faultPeek: None or dict( intS{->} str ) + @ivar faultPeek: Dictionary mapping guessed pointers at L{faultMem} to the data they point to. + + C{None} or empty if unapplicable or unable to retrieve. + + @type faultDisasm: None or tuple of tuple( long, int, str, str ) + @ivar faultDisasm: Dissassembly around the program counter. + + C{None} or empty if unapplicable or unable to retrieve. + + @type memoryMap: None or list of L{win32.MemoryBasicInformation} objects. + @ivar memoryMap: Memory snapshot of the program. May contain the actual + data from the entire process memory if requested. + See L{fetch_extra_data} for more details. + + C{None} or empty if unapplicable or unable to retrieve. + + @type _rowid: int + @ivar _rowid: Row ID in the database. Internally used by the DAO layer. + Only present in crash dumps retrieved from the database. Do not rely + on this property to be present in future versions of WinAppDbg. + """ + + def __init__(self, event): + """ + @type event: L{Event} + @param event: Event object for crash. + """ + + # First of all, take the timestamp. + self.timeStamp = time.time() + + # Notes are initially empty. + self.notes = list() + + # Get the process and thread, but dont't store them in the DB. + process = event.get_process() + thread = event.get_thread() + + # Determine the architecture. + self.os = System.os + self.arch = process.get_arch() + self.bits = process.get_bits() + + # The following properties are always retrieved for all events. + self.eventCode = event.get_event_code() + self.eventName = event.get_event_name() + self.pid = event.get_pid() + self.tid = event.get_tid() + self.registers = dict(thread.get_context()) + self.labelPC = process.get_label_at_address(self.pc) + + # The following properties are only retrieved for some events. + self.commandLine = None + self.environment = None + self.environmentData = None + self.registersPeek = None + self.debugString = None + self.modFileName = None + self.lpBaseOfDll = None + self.exceptionCode = None + self.exceptionName = None + self.exceptionDescription = None + self.exceptionAddress = None + self.exceptionLabel = None + self.firstChance = None + self.faultType = None + self.faultAddress = None + self.faultLabel = None + self.isOurBreakpoint = None + self.isSystemBreakpoint = None + self.stackTrace = None + self.stackTracePC = None + self.stackTraceLabels = None + self.stackTracePretty = None + self.stackRange = None + self.stackFrame = None + self.stackPeek = None + self.faultCode = None + self.faultMem = None + self.faultPeek = None + self.faultDisasm = None + self.memoryMap = None + + # Get information for debug string events. + if self.eventCode == win32.OUTPUT_DEBUG_STRING_EVENT: + self.debugString = event.get_debug_string() + + # Get information for module load and unload events. + # For create and exit process events, get the information + # for the main module. + elif self.eventCode in (win32.CREATE_PROCESS_DEBUG_EVENT, + win32.EXIT_PROCESS_DEBUG_EVENT, + win32.LOAD_DLL_DEBUG_EVENT, + win32.UNLOAD_DLL_DEBUG_EVENT): + aModule = event.get_module() + self.modFileName = event.get_filename() + if not self.modFileName: + self.modFileName = aModule.get_filename() + self.lpBaseOfDll = event.get_module_base() + if not self.lpBaseOfDll: + self.lpBaseOfDll = aModule.get_base() + + # Get some information for exception events. + # To get the remaining information call fetch_extra_data(). + elif self.eventCode == win32.EXCEPTION_DEBUG_EVENT: + + # Exception information. + self.exceptionCode = event.get_exception_code() + self.exceptionName = event.get_exception_name() + self.exceptionDescription = event.get_exception_description() + self.exceptionAddress = event.get_exception_address() + self.firstChance = event.is_first_chance() + self.exceptionLabel = process.get_label_at_address( + self.exceptionAddress) + if self.exceptionCode in (win32.EXCEPTION_ACCESS_VIOLATION, + win32.EXCEPTION_GUARD_PAGE, + win32.EXCEPTION_IN_PAGE_ERROR): + self.faultType = event.get_fault_type() + self.faultAddress = event.get_fault_address() + self.faultLabel = process.get_label_at_address( + self.faultAddress) + elif self.exceptionCode in (win32.EXCEPTION_BREAKPOINT, + win32.EXCEPTION_SINGLE_STEP): + self.isOurBreakpoint = hasattr(event, 'breakpoint') \ + and event.breakpoint + self.isSystemBreakpoint = \ + process.is_system_defined_breakpoint(self.exceptionAddress) + + # Stack trace. + try: + self.stackTracePretty = thread.get_stack_trace_with_labels() + except Exception: + e = sys.exc_info()[1] + warnings.warn( + "Cannot get stack trace with labels, reason: %s" % str(e), + CrashWarning) + try: + self.stackTrace = thread.get_stack_trace() + stackTracePC = [ ra for (_,ra,_) in self.stackTrace ] + self.stackTracePC = tuple(stackTracePC) + stackTraceLabels = [ process.get_label_at_address(ra) \ + for ra in self.stackTracePC ] + self.stackTraceLabels = tuple(stackTraceLabels) + except Exception: + e = sys.exc_info()[1] + warnings.warn("Cannot get stack trace, reason: %s" % str(e), + CrashWarning) + + def fetch_extra_data(self, event, takeMemorySnapshot = 0): + """ + Fetch extra data from the L{Event} object. + + @note: Since this method may take a little longer to run, it's best to + call it only after you've determined the crash is interesting and + you want to save it. + + @type event: L{Event} + @param event: Event object for crash. + + @type takeMemorySnapshot: int + @param takeMemorySnapshot: + Memory snapshot behavior: + - C{0} to take no memory information (default). + - C{1} to take only the memory map. + See L{Process.get_memory_map}. + - C{2} to take a full memory snapshot. + See L{Process.take_memory_snapshot}. + - C{3} to take a live memory snapshot. + See L{Process.generate_memory_snapshot}. + """ + + # Get the process and thread, we'll use them below. + process = event.get_process() + thread = event.get_thread() + + # Get the command line for the target process. + try: + self.commandLine = process.get_command_line() + except Exception: + e = sys.exc_info()[1] + warnings.warn("Cannot get command line, reason: %s" % str(e), + CrashWarning) + + # Get the environment variables for the target process. + try: + self.environmentData = process.get_environment_data() + self.environment = process.parse_environment_data( + self.environmentData) + except Exception: + e = sys.exc_info()[1] + warnings.warn("Cannot get environment, reason: %s" % str(e), + CrashWarning) + + # Data pointed to by registers. + self.registersPeek = thread.peek_pointers_in_registers() + + # Module where execution is taking place. + aModule = process.get_module_at_address(self.pc) + if aModule is not None: + self.modFileName = aModule.get_filename() + self.lpBaseOfDll = aModule.get_base() + + # Contents of the stack frame. + try: + self.stackRange = thread.get_stack_range() + except Exception: + e = sys.exc_info()[1] + warnings.warn("Cannot get stack range, reason: %s" % str(e), + CrashWarning) + try: + self.stackFrame = thread.get_stack_frame() + stackFrame = self.stackFrame + except Exception: + self.stackFrame = thread.peek_stack_data() + stackFrame = self.stackFrame[:64] + if stackFrame: + self.stackPeek = process.peek_pointers_in_data(stackFrame) + + # Code being executed. + self.faultCode = thread.peek_code_bytes() + try: + self.faultDisasm = thread.disassemble_around_pc(32) + except Exception: + e = sys.exc_info()[1] + warnings.warn("Cannot disassemble, reason: %s" % str(e), + CrashWarning) + + # For memory related exceptions, get the memory contents + # of the location that caused the exception to be raised. + if self.eventCode == win32.EXCEPTION_DEBUG_EVENT: + if self.pc != self.exceptionAddress and self.exceptionCode in ( + win32.EXCEPTION_ACCESS_VIOLATION, + win32.EXCEPTION_ARRAY_BOUNDS_EXCEEDED, + win32.EXCEPTION_DATATYPE_MISALIGNMENT, + win32.EXCEPTION_IN_PAGE_ERROR, + win32.EXCEPTION_STACK_OVERFLOW, + win32.EXCEPTION_GUARD_PAGE, + ): + self.faultMem = process.peek(self.exceptionAddress, 64) + if self.faultMem: + self.faultPeek = process.peek_pointers_in_data( + self.faultMem) + + # TODO: maybe add names and versions of DLLs and EXE? + + # Take a snapshot of the process memory. Additionally get the + # memory contents if requested. + if takeMemorySnapshot == 1: + self.memoryMap = process.get_memory_map() + mappedFilenames = process.get_mapped_filenames(self.memoryMap) + for mbi in self.memoryMap: + mbi.filename = mappedFilenames.get(mbi.BaseAddress, None) + mbi.content = None + elif takeMemorySnapshot == 2: + self.memoryMap = process.take_memory_snapshot() + elif takeMemorySnapshot == 3: + self.memoryMap = process.generate_memory_snapshot() + + @property + def pc(self): + """ + Value of the program counter register. + + @rtype: int + """ + try: + return self.registers['Eip'] # i386 + except KeyError: + return self.registers['Rip'] # amd64 + + @property + def sp(self): + """ + Value of the stack pointer register. + + @rtype: int + """ + try: + return self.registers['Esp'] # i386 + except KeyError: + return self.registers['Rsp'] # amd64 + + @property + def fp(self): + """ + Value of the frame pointer register. + + @rtype: int + """ + try: + return self.registers['Ebp'] # i386 + except KeyError: + return self.registers['Rbp'] # amd64 + + def __str__(self): + return self.fullReport() + + def key(self): + """ + Alias of L{signature}. Deprecated since WinAppDbg 1.5. + """ + warnings.warn("Crash.key() method was deprecated in WinAppDbg 1.5", + DeprecationWarning) + return self.signature + + @property + def signature(self): + if self.labelPC: + pc = self.labelPC + else: + pc = self.pc + if self.stackTraceLabels: + trace = self.stackTraceLabels + else: + trace = self.stackTracePC + return ( + self.arch, + self.eventCode, + self.exceptionCode, + pc, + trace, + self.debugString, + ) + # TODO + # add the name and version of the binary where the crash happened? + + def isExploitable(self): + """ + Guess how likely is it that the bug causing the crash can be leveraged + into an exploitable vulnerability. + + @note: Don't take this as an equivalent of a real exploitability + analysis, that can only be done by a human being! This is only + a guideline, useful for example to sort crashes - placing the most + interesting ones at the top. + + @see: The heuristics are similar to those of the B{!exploitable} + extension for I{WinDBG}, which can be downloaded from here: + + U{http://www.codeplex.com/msecdbg} + + @rtype: tuple( str, str, str ) + @return: The first element of the tuple is the result of the analysis, + being one of the following: + + - Not an exception + - Not exploitable + - Not likely exploitable + - Unknown + - Probably exploitable + - Exploitable + + The second element of the tuple is a code to identify the matched + heuristic rule. + + The third element of the tuple is a description string of the + reason behind the result. + """ + + # Terminal rules + + if self.eventCode != win32.EXCEPTION_DEBUG_EVENT: + return ("Not an exception", "NotAnException", "The event is not an exception.") + + if self.stackRange and self.pc is not None and self.stackRange[0] <= self.pc < self.stackRange[1]: + return ("Exploitable", "StackCodeExecution", "Code execution from the stack is considered exploitable.") + + # This rule is NOT from !exploitable + if self.stackRange and self.sp is not None and not (self.stackRange[0] <= self.sp < self.stackRange[1]): + return ("Exploitable", "StackPointerCorruption", "Stack pointer corruption is considered exploitable.") + + if self.exceptionCode == win32.EXCEPTION_ILLEGAL_INSTRUCTION: + return ("Exploitable", "IllegalInstruction", "An illegal instruction exception indicates that the attacker controls execution flow.") + + if self.exceptionCode == win32.EXCEPTION_PRIV_INSTRUCTION: + return ("Exploitable", "PrivilegedInstruction", "A privileged instruction exception indicates that the attacker controls execution flow.") + + if self.exceptionCode == win32.EXCEPTION_GUARD_PAGE: + return ("Exploitable", "GuardPage", "A guard page violation indicates a stack overflow has occured, and the stack of another thread was reached (possibly the overflow length is not controlled by the attacker).") + + if self.exceptionCode == win32.STATUS_STACK_BUFFER_OVERRUN: + return ("Exploitable", "GSViolation", "An overrun of a protected stack buffer has been detected. This is considered exploitable, and must be fixed.") + + if self.exceptionCode == win32.STATUS_HEAP_CORRUPTION: + return ("Exploitable", "HeapCorruption", "Heap Corruption has been detected. This is considered exploitable, and must be fixed.") + + if self.exceptionCode == win32.EXCEPTION_ACCESS_VIOLATION: + nearNull = self.faultAddress is None or MemoryAddresses.align_address_to_page_start(self.faultAddress) == 0 + controlFlow = self.__is_control_flow() + blockDataMove = self.__is_block_data_move() + if self.faultType == win32.EXCEPTION_EXECUTE_FAULT: + if nearNull: + return ("Probably exploitable", "DEPViolation", "User mode DEP access violations are probably exploitable if near NULL.") + else: + return ("Exploitable", "DEPViolation", "User mode DEP access violations are exploitable.") + elif self.faultType == win32.EXCEPTION_WRITE_FAULT: + if nearNull: + return ("Probably exploitable", "WriteAV", "User mode write access violations that are near NULL are probably exploitable.") + else: + return ("Exploitable", "WriteAV", "User mode write access violations that are not near NULL are exploitable.") + elif self.faultType == win32.EXCEPTION_READ_FAULT: + if self.faultAddress == self.pc: + if nearNull: + return ("Probably exploitable", "ReadAVonIP", "Access violations at the instruction pointer are probably exploitable if near NULL.") + else: + return ("Exploitable", "ReadAVonIP", "Access violations at the instruction pointer are exploitable if not near NULL.") + if controlFlow: + if nearNull: + return ("Probably exploitable", "ReadAVonControlFlow", "Access violations near null in control flow instructions are considered probably exploitable.") + else: + return ("Exploitable", "ReadAVonControlFlow", "Access violations not near null in control flow instructions are considered exploitable.") + if blockDataMove: + return ("Probably exploitable", "ReadAVonBlockMove", "This is a read access violation in a block data move, and is therefore classified as probably exploitable.") + + # Rule: Tainted information used to control branch addresses is considered probably exploitable + # Rule: Tainted information used to control the target of a later write is probably exploitable + + # Non terminal rules + + # XXX TODO add rule to check if code is in writeable memory (probably exploitable) + + # XXX TODO maybe we should be returning a list of tuples instead? + + result = ("Unknown", "Unknown", "Exploitability unknown.") + + if self.exceptionCode == win32.EXCEPTION_ACCESS_VIOLATION: + if self.faultType == win32.EXCEPTION_READ_FAULT: + if nearNull: + result = ("Not likely exploitable", "ReadAVNearNull", "This is a user mode read access violation near null, and is probably not exploitable.") + + elif self.exceptionCode == win32.EXCEPTION_INT_DIVIDE_BY_ZERO: + result = ("Not likely exploitable", "DivideByZero", "This is an integer divide by zero, and is probably not exploitable.") + + elif self.exceptionCode == win32.EXCEPTION_FLT_DIVIDE_BY_ZERO: + result = ("Not likely exploitable", "DivideByZero", "This is a floating point divide by zero, and is probably not exploitable.") + + elif self.exceptionCode in (win32.EXCEPTION_BREAKPOINT, win32.STATUS_WX86_BREAKPOINT): + result = ("Unknown", "Breakpoint", "While a breakpoint itself is probably not exploitable, it may also be an indication that an attacker is testing a target. In either case breakpoints should not exist in production code.") + + # Rule: If the stack contains unknown symbols in user mode, call that out + # Rule: Tainted information used to control the source of a later block move unknown, but called out explicitly + # Rule: Tainted information used as an argument to a function is an unknown risk, but called out explicitly + # Rule: Tainted information used to control branch selection is an unknown risk, but called out explicitly + + return result + + def __is_control_flow(self): + """ + Private method to tell if the instruction pointed to by the program + counter is a control flow instruction. + + Currently only works for x86 and amd64 architectures. + """ + jump_instructions = ( + 'jmp', 'jecxz', 'jcxz', + 'ja', 'jnbe', 'jae', 'jnb', 'jb', 'jnae', 'jbe', 'jna', 'jc', 'je', + 'jz', 'jnc', 'jne', 'jnz', 'jnp', 'jpo', 'jp', 'jpe', 'jg', 'jnle', + 'jge', 'jnl', 'jl', 'jnge', 'jle', 'jng', 'jno', 'jns', 'jo', 'js' + ) + call_instructions = ( 'call', 'ret', 'retn' ) + loop_instructions = ( 'loop', 'loopz', 'loopnz', 'loope', 'loopne' ) + control_flow_instructions = call_instructions + loop_instructions + \ + jump_instructions + isControlFlow = False + instruction = None + if self.pc is not None and self.faultDisasm: + for disasm in self.faultDisasm: + if disasm[0] == self.pc: + instruction = disasm[2].lower().strip() + break + if instruction: + for x in control_flow_instructions: + if x in instruction: + isControlFlow = True + break + return isControlFlow + + def __is_block_data_move(self): + """ + Private method to tell if the instruction pointed to by the program + counter is a block data move instruction. + + Currently only works for x86 and amd64 architectures. + """ + block_data_move_instructions = ('movs', 'stos', 'lods') + isBlockDataMove = False + instruction = None + if self.pc is not None and self.faultDisasm: + for disasm in self.faultDisasm: + if disasm[0] == self.pc: + instruction = disasm[2].lower().strip() + break + if instruction: + for x in block_data_move_instructions: + if x in instruction: + isBlockDataMove = True + break + return isBlockDataMove + + def briefReport(self): + """ + @rtype: str + @return: Short description of the event. + """ + if self.exceptionCode is not None: + if self.exceptionCode == win32.EXCEPTION_BREAKPOINT: + if self.isOurBreakpoint: + what = "Breakpoint hit" + elif self.isSystemBreakpoint: + what = "System breakpoint hit" + else: + what = "Assertion failed" + elif self.exceptionDescription: + what = self.exceptionDescription + elif self.exceptionName: + what = self.exceptionName + else: + what = "Exception %s" % \ + HexDump.integer(self.exceptionCode, self.bits) + if self.firstChance: + chance = 'first' + else: + chance = 'second' + if self.exceptionLabel: + where = self.exceptionLabel + elif self.exceptionAddress: + where = HexDump.address(self.exceptionAddress, self.bits) + elif self.labelPC: + where = self.labelPC + else: + where = HexDump.address(self.pc, self.bits) + msg = "%s (%s chance) at %s" % (what, chance, where) + elif self.debugString is not None: + if self.labelPC: + where = self.labelPC + else: + where = HexDump.address(self.pc, self.bits) + msg = "Debug string from %s: %r" % (where, self.debugString) + else: + if self.labelPC: + where = self.labelPC + else: + where = HexDump.address(self.pc, self.bits) + msg = "%s (%s) at %s" % ( + self.eventName, + HexDump.integer(self.eventCode, self.bits), + where + ) + return msg + + def fullReport(self, bShowNotes = True): + """ + @type bShowNotes: bool + @param bShowNotes: C{True} to show the user notes, C{False} otherwise. + + @rtype: str + @return: Long description of the event. + """ + msg = self.briefReport() + msg += '\n' + + if self.bits == 32: + width = 16 + else: + width = 8 + + if self.eventCode == win32.EXCEPTION_DEBUG_EVENT: + (exploitability, expcode, expdescription) = self.isExploitable() + msg += '\nSecurity risk level: %s\n' % exploitability + msg += ' %s\n' % expdescription + + if bShowNotes and self.notes: + msg += '\nNotes:\n' + msg += self.notesReport() + + if self.commandLine: + msg += '\nCommand line: %s\n' % self.commandLine + + if self.environment: + msg += '\nEnvironment:\n' + msg += self.environmentReport() + + if not self.labelPC: + base = HexDump.address(self.lpBaseOfDll, self.bits) + if self.modFileName: + fn = PathOperations.pathname_to_filename(self.modFileName) + msg += '\nRunning in %s (%s)\n' % (fn, base) + else: + msg += '\nRunning in module at %s\n' % base + + if self.registers: + msg += '\nRegisters:\n' + msg += CrashDump.dump_registers(self.registers) + if self.registersPeek: + msg += '\n' + msg += CrashDump.dump_registers_peek(self.registers, + self.registersPeek, + width = width) + + if self.faultDisasm: + msg += '\nCode disassembly:\n' + msg += CrashDump.dump_code(self.faultDisasm, self.pc, + bits = self.bits) + + if self.stackTrace: + msg += '\nStack trace:\n' + if self.stackTracePretty: + msg += CrashDump.dump_stack_trace_with_labels( + self.stackTracePretty, + bits = self.bits) + else: + msg += CrashDump.dump_stack_trace(self.stackTrace, + bits = self.bits) + + if self.stackFrame: + if self.stackPeek: + msg += '\nStack pointers:\n' + msg += CrashDump.dump_stack_peek(self.stackPeek, width = width) + msg += '\nStack dump:\n' + msg += HexDump.hexblock(self.stackFrame, self.sp, + bits = self.bits, width = width) + + if self.faultCode and not self.modFileName: + msg += '\nCode dump:\n' + msg += HexDump.hexblock(self.faultCode, self.pc, + bits = self.bits, width = width) + + if self.faultMem: + if self.faultPeek: + msg += '\nException address pointers:\n' + msg += CrashDump.dump_data_peek(self.faultPeek, + self.exceptionAddress, + bits = self.bits, + width = width) + msg += '\nException address dump:\n' + msg += HexDump.hexblock(self.faultMem, self.exceptionAddress, + bits = self.bits, width = width) + + if self.memoryMap: + msg += '\nMemory map:\n' + mappedFileNames = dict() + for mbi in self.memoryMap: + if hasattr(mbi, 'filename') and mbi.filename: + mappedFileNames[mbi.BaseAddress] = mbi.filename + msg += CrashDump.dump_memory_map(self.memoryMap, mappedFileNames, + bits = self.bits) + + if not msg.endswith('\n\n'): + if not msg.endswith('\n'): + msg += '\n' + msg += '\n' + return msg + + def environmentReport(self): + """ + @rtype: str + @return: The process environment variables, + merged and formatted for a report. + """ + msg = '' + if self.environment: + for key, value in compat.iteritems(self.environment): + msg += ' %s=%s\n' % (key, value) + return msg + + def notesReport(self): + """ + @rtype: str + @return: All notes, merged and formatted for a report. + """ + msg = '' + if self.notes: + for n in self.notes: + n = n.strip('\n') + if '\n' in n: + n = n.strip('\n') + msg += ' * %s\n' % n.pop(0) + for x in n: + msg += ' %s\n' % x + else: + msg += ' * %s\n' % n + return msg + + def addNote(self, msg): + """ + Add a note to the crash event. + + @type msg: str + @param msg: Note text. + """ + self.notes.append(msg) + + def clearNotes(self): + """ + Clear the notes of this crash event. + """ + self.notes = list() + + def getNotes(self): + """ + Get the list of notes of this crash event. + + @rtype: list( str ) + @return: List of notes. + """ + return self.notes + + def iterNotes(self): + """ + Iterate the notes of this crash event. + + @rtype: listiterator + @return: Iterator of the list of notes. + """ + return self.notes.__iter__() + + def hasNotes(self): + """ + @rtype: bool + @return: C{True} if there are notes for this crash event. + """ + return bool( self.notes ) + +#============================================================================== + +class CrashContainer (object): + """ + Old crash dump persistencer using a DBM database. + Doesn't support duplicate crashes. + + @warning: + DBM database support is provided for backwards compatibility with older + versions of WinAppDbg. New applications should not use this class. + Also, DBM databases in Python suffer from multiple problems that can + easily be avoided by switching to a SQL database. + + @see: If you really must use a DBM database, try the standard C{shelve} + module instead: U{http://docs.python.org/library/shelve.html} + + @group Marshalling configuration: + optimizeKeys, optimizeValues, compressKeys, compressValues, escapeKeys, + escapeValues, binaryKeys, binaryValues + + @type optimizeKeys: bool + @cvar optimizeKeys: Ignored by the current implementation. + + Up to WinAppDbg 1.4 this setting caused the database keys to be + optimized when pickled with the standard C{pickle} module. + + But with a DBM database backend that causes inconsistencies, since the + same key can be serialized into multiple optimized pickles, thus losing + uniqueness. + + @type optimizeValues: bool + @cvar optimizeValues: C{True} to optimize the marshalling of keys, C{False} + otherwise. Only used with the C{pickle} module, ignored when using the + more secure C{cerealizer} module. + + @type compressKeys: bool + @cvar compressKeys: C{True} to compress keys when marshalling, C{False} + to leave them uncompressed. + + @type compressValues: bool + @cvar compressValues: C{True} to compress values when marshalling, C{False} + to leave them uncompressed. + + @type escapeKeys: bool + @cvar escapeKeys: C{True} to escape keys when marshalling, C{False} + to leave them uncompressed. + + @type escapeValues: bool + @cvar escapeValues: C{True} to escape values when marshalling, C{False} + to leave them uncompressed. + + @type binaryKeys: bool + @cvar binaryKeys: C{True} to marshall keys to binary format (the Python + C{buffer} type), C{False} to use text marshalled keys (C{str} type). + + @type binaryValues: bool + @cvar binaryValues: C{True} to marshall values to binary format (the Python + C{buffer} type), C{False} to use text marshalled values (C{str} type). + """ + + optimizeKeys = False + optimizeValues = True + compressKeys = False + compressValues = True + escapeKeys = False + escapeValues = False + binaryKeys = False + binaryValues = False + + def __init__(self, filename = None, allowRepeatedKeys = False): + """ + @type filename: str + @param filename: (Optional) File name for crash database. + If no filename is specified, the container is volatile. + + Volatile containers are stored only in memory and + destroyed when they go out of scope. + + @type allowRepeatedKeys: bool + @param allowRepeatedKeys: + Currently not supported, always use C{False}. + """ + if allowRepeatedKeys: + raise NotImplementedError() + self.__filename = filename + if filename: + global anydbm + if not anydbm: + import anydbm + self.__db = anydbm.open(filename, 'c') + self.__keys = dict([ (self.unmarshall_key(mk), mk) + for mk in self.__db.keys() ]) + else: + self.__db = dict() + self.__keys = dict() + + def remove_key(self, key): + """ + Removes the given key from the set of known keys. + + @type key: L{Crash} key. + @param key: Key to remove. + """ + del self.__keys[key] + + def marshall_key(self, key): + """ + Marshalls a Crash key to be used in the database. + + @see: L{__init__} + + @type key: L{Crash} key. + @param key: Key to convert. + + @rtype: str or buffer + @return: Converted key. + """ + if key in self.__keys: + return self.__keys[key] + skey = pickle.dumps(key, protocol = 0) + if self.compressKeys: + skey = zlib.compress(skey, zlib.Z_BEST_COMPRESSION) + if self.escapeKeys: + skey = skey.encode('hex') + if self.binaryKeys: + skey = buffer(skey) + self.__keys[key] = skey + return skey + + def unmarshall_key(self, key): + """ + Unmarshalls a Crash key read from the database. + + @type key: str or buffer + @param key: Key to convert. + + @rtype: L{Crash} key. + @return: Converted key. + """ + key = str(key) + if self.escapeKeys: + key = key.decode('hex') + if self.compressKeys: + key = zlib.decompress(key) + key = pickle.loads(key) + return key + + def marshall_value(self, value, storeMemoryMap = False): + """ + Marshalls a Crash object to be used in the database. + By default the C{memoryMap} member is B{NOT} stored here. + + @warning: Setting the C{storeMemoryMap} argument to C{True} can lead to + a severe performance penalty! + + @type value: L{Crash} + @param value: Object to convert. + + @type storeMemoryMap: bool + @param storeMemoryMap: C{True} to store the memory map, C{False} + otherwise. + + @rtype: str + @return: Converted object. + """ + if hasattr(value, 'memoryMap'): + crash = value + memoryMap = crash.memoryMap + try: + crash.memoryMap = None + if storeMemoryMap and memoryMap is not None: + # convert the generator to a list + crash.memoryMap = list(memoryMap) + if self.optimizeValues: + value = pickle.dumps(crash, protocol = HIGHEST_PROTOCOL) + value = optimize(value) + else: + value = pickle.dumps(crash, protocol = 0) + finally: + crash.memoryMap = memoryMap + del memoryMap + del crash + if self.compressValues: + value = zlib.compress(value, zlib.Z_BEST_COMPRESSION) + if self.escapeValues: + value = value.encode('hex') + if self.binaryValues: + value = buffer(value) + return value + + def unmarshall_value(self, value): + """ + Unmarshalls a Crash object read from the database. + + @type value: str + @param value: Object to convert. + + @rtype: L{Crash} + @return: Converted object. + """ + value = str(value) + if self.escapeValues: + value = value.decode('hex') + if self.compressValues: + value = zlib.decompress(value) + value = pickle.loads(value) + return value + + # The interface is meant to be similar to a Python set. + # However it may not be necessary to implement all of the set methods. + # Other methods like get, has_key, iterkeys and itervalues + # are dictionary-like. + + def __len__(self): + """ + @rtype: int + @return: Count of known keys. + """ + return len(self.__keys) + + def __bool__(self): + """ + @rtype: bool + @return: C{False} if there are no known keys. + """ + return bool(self.__keys) + + def __contains__(self, crash): + """ + @type crash: L{Crash} + @param crash: Crash object. + + @rtype: bool + @return: + C{True} if a Crash object with the same key is in the container. + """ + return self.has_key( crash.key() ) + + def has_key(self, key): + """ + @type key: L{Crash} key. + @param key: Key to find. + + @rtype: bool + @return: C{True} if the key is present in the set of known keys. + """ + return key in self.__keys + + def iterkeys(self): + """ + @rtype: iterator + @return: Iterator of known L{Crash} keys. + """ + return compat.iterkeys(self.__keys) + + class __CrashContainerIterator (object): + """ + Iterator of Crash objects. Returned by L{CrashContainer.__iter__}. + """ + + def __init__(self, container): + """ + @type container: L{CrashContainer} + @param container: Crash set to iterate. + """ + # It's important to keep a reference to the CrashContainer, + # rather than it's underlying database. + # Otherwise the destructor of CrashContainer may close the + # database while we're still iterating it. + # + # TODO: lock the database when iterating it. + # + self.__container = container + self.__keys_iter = compat.iterkeys(container) + + def next(self): + """ + @rtype: L{Crash} + @return: A B{copy} of a Crash object in the L{CrashContainer}. + @raise StopIteration: No more items left. + """ + key = self.__keys_iter.next() + return self.__container.get(key) + + def __del__(self): + "Class destructor. Closes the database when this object is destroyed." + try: + if self.__filename: + self.__db.close() + except: + pass + + def __iter__(self): + """ + @see: L{itervalues} + @rtype: iterator + @return: Iterator of the contained L{Crash} objects. + """ + return self.itervalues() + + def itervalues(self): + """ + @rtype: iterator + @return: Iterator of the contained L{Crash} objects. + + @warning: A B{copy} of each object is returned, + so any changes made to them will be lost. + + To preserve changes do the following: + 1. Keep a reference to the object. + 2. Delete the object from the set. + 3. Modify the object and add it again. + """ + return self.__CrashContainerIterator(self) + + def add(self, crash): + """ + Adds a new crash to the container. + If the crash appears to be already known, it's ignored. + + @see: L{Crash.key} + + @type crash: L{Crash} + @param crash: Crash object to add. + """ + if crash not in self: + key = crash.key() + skey = self.marshall_key(key) + data = self.marshall_value(crash, storeMemoryMap = True) + self.__db[skey] = data + + def __delitem__(self, key): + """ + Removes a crash from the container. + + @type key: L{Crash} unique key. + @param key: Key of the crash to get. + """ + skey = self.marshall_key(key) + del self.__db[skey] + self.remove_key(key) + + def remove(self, crash): + """ + Removes a crash from the container. + + @type crash: L{Crash} + @param crash: Crash object to remove. + """ + del self[ crash.key() ] + + def get(self, key): + """ + Retrieves a crash from the container. + + @type key: L{Crash} unique key. + @param key: Key of the crash to get. + + @rtype: L{Crash} object. + @return: Crash matching the given key. + + @see: L{iterkeys} + @warning: A B{copy} of each object is returned, + so any changes made to them will be lost. + + To preserve changes do the following: + 1. Keep a reference to the object. + 2. Delete the object from the set. + 3. Modify the object and add it again. + """ + skey = self.marshall_key(key) + data = self.__db[skey] + crash = self.unmarshall_value(data) + return crash + + def __getitem__(self, key): + """ + Retrieves a crash from the container. + + @type key: L{Crash} unique key. + @param key: Key of the crash to get. + + @rtype: L{Crash} object. + @return: Crash matching the given key. + + @see: L{iterkeys} + @warning: A B{copy} of each object is returned, + so any changes made to them will be lost. + + To preserve changes do the following: + 1. Keep a reference to the object. + 2. Delete the object from the set. + 3. Modify the object and add it again. + """ + return self.get(key) + +#============================================================================== + +class CrashDictionary(object): + """ + Dictionary-like persistence interface for L{Crash} objects. + + Currently the only implementation is through L{sql.CrashDAO}. + """ + + def __init__(self, url, creator = None, allowRepeatedKeys = True): + """ + @type url: str + @param url: Connection URL of the crash database. + See L{sql.CrashDAO.__init__} for more details. + + @type creator: callable + @param creator: (Optional) Callback function that creates the SQL + database connection. + + Normally it's not necessary to use this argument. However in some + odd cases you may need to customize the database connection, for + example when using the integrated authentication in MSSQL. + + @type allowRepeatedKeys: bool + @param allowRepeatedKeys: + If C{True} all L{Crash} objects are stored. + + If C{False} any L{Crash} object with the same signature as a + previously existing object will be ignored. + """ + global sql + if sql is None: + from winappdbg import sql + self._allowRepeatedKeys = allowRepeatedKeys + self._dao = sql.CrashDAO(url, creator) + + def add(self, crash): + """ + Adds a new crash to the container. + + @note: + When the C{allowRepeatedKeys} parameter of the constructor + is set to C{False}, duplicated crashes are ignored. + + @see: L{Crash.key} + + @type crash: L{Crash} + @param crash: Crash object to add. + """ + self._dao.add(crash, self._allowRepeatedKeys) + + def get(self, key): + """ + Retrieves a crash from the container. + + @type key: L{Crash} signature. + @param key: Heuristic signature of the crash to get. + + @rtype: L{Crash} object. + @return: Crash matching the given signature. If more than one is found, + retrieve the newest one. + + @see: L{iterkeys} + @warning: A B{copy} of each object is returned, + so any changes made to them will be lost. + + To preserve changes do the following: + 1. Keep a reference to the object. + 2. Delete the object from the set. + 3. Modify the object and add it again. + """ + found = self._dao.find(signature=key, limit=1, order=-1) + if not found: + raise KeyError(key) + return found[0] + + def __iter__(self): + """ + @rtype: iterator + @return: Iterator of the contained L{Crash} objects. + """ + offset = 0 + limit = 10 + while 1: + found = self._dao.find(offset=offset, limit=limit) + if not found: + break + offset += len(found) + for crash in found: + yield crash + + def itervalues(self): + """ + @rtype: iterator + @return: Iterator of the contained L{Crash} objects. + """ + return self.__iter__() + + def iterkeys(self): + """ + @rtype: iterator + @return: Iterator of the contained L{Crash} heuristic signatures. + """ + for crash in self: + yield crash.signature # FIXME this gives repeated results! + + def __contains__(self, crash): + """ + @type crash: L{Crash} + @param crash: Crash object. + + @rtype: bool + @return: C{True} if the Crash object is in the container. + """ + return self._dao.count(signature=crash.signature) > 0 + + def has_key(self, key): + """ + @type key: L{Crash} signature. + @param key: Heuristic signature of the crash to get. + + @rtype: bool + @return: C{True} if a matching L{Crash} object is in the container. + """ + return self._dao.count(signature=key) > 0 + + def __len__(self): + """ + @rtype: int + @return: Count of L{Crash} elements in the container. + """ + return self._dao.count() + + def __bool__(self): + """ + @rtype: bool + @return: C{False} if the container is empty. + """ + return bool( len(self) ) + +class CrashTable(CrashDictionary): + """ + Old crash dump persistencer using a SQLite database. + + @warning: + Superceded by L{CrashDictionary} since WinAppDbg 1.5. + New applications should not use this class. + """ + + def __init__(self, location = None, allowRepeatedKeys = True): + """ + @type location: str + @param location: (Optional) Location of the crash database. + If the location is a filename, it's an SQLite database file. + + If no location is specified, the container is volatile. + Volatile containers are stored only in memory and + destroyed when they go out of scope. + + @type allowRepeatedKeys: bool + @param allowRepeatedKeys: + If C{True} all L{Crash} objects are stored. + + If C{False} any L{Crash} object with the same signature as a + previously existing object will be ignored. + """ + warnings.warn( + "The %s class is deprecated since WinAppDbg 1.5." % self.__class__, + DeprecationWarning) + if location: + url = "sqlite:///%s" % location + else: + url = "sqlite://" + super(CrashTable, self).__init__(url, allowRepeatedKeys) + +class CrashTableMSSQL (CrashDictionary): + """ + Old crash dump persistencer using a Microsoft SQL Server database. + + @warning: + Superceded by L{CrashDictionary} since WinAppDbg 1.5. + New applications should not use this class. + """ + + def __init__(self, location = None, allowRepeatedKeys = True): + """ + @type location: str + @param location: Location of the crash database. + It must be an ODBC connection string. + + @type allowRepeatedKeys: bool + @param allowRepeatedKeys: + If C{True} all L{Crash} objects are stored. + + If C{False} any L{Crash} object with the same signature as a + previously existing object will be ignored. + """ + warnings.warn( + "The %s class is deprecated since WinAppDbg 1.5." % self.__class__, + DeprecationWarning) + import urllib + url = "mssql+pyodbc:///?odbc_connect=" + urllib.quote_plus(location) + super(CrashTableMSSQL, self).__init__(url, allowRepeatedKeys) + +class VolatileCrashContainer (CrashTable): + """ + Old in-memory crash dump storage. + + @warning: + Superceded by L{CrashDictionary} since WinAppDbg 1.5. + New applications should not use this class. + """ + + def __init__(self, allowRepeatedKeys = True): + """ + Volatile containers are stored only in memory and + destroyed when they go out of scope. + + @type allowRepeatedKeys: bool + @param allowRepeatedKeys: + If C{True} all L{Crash} objects are stored. + + If C{False} any L{Crash} object with the same key as a + previously existing object will be ignored. + """ + super(VolatileCrashContainer, self).__init__( + allowRepeatedKeys=allowRepeatedKeys) + +class DummyCrashContainer(object): + """ + Fakes a database of volatile Crash objects, + trying to mimic part of it's interface, but + doesn't actually store anything. + + Normally applications don't need to use this. + + @see: L{CrashDictionary} + """ + + def __init__(self, allowRepeatedKeys = True): + """ + Fake containers don't store L{Crash} objects, but they implement the + interface properly. + + @type allowRepeatedKeys: bool + @param allowRepeatedKeys: + Mimics the duplicate filter behavior found in real containers. + """ + self.__keys = set() + self.__count = 0 + self.__allowRepeatedKeys = allowRepeatedKeys + + def __contains__(self, crash): + """ + @type crash: L{Crash} + @param crash: Crash object. + + @rtype: bool + @return: C{True} if the Crash object is in the container. + """ + return crash.signature in self.__keys + + def __len__(self): + """ + @rtype: int + @return: Count of L{Crash} elements in the container. + """ + if self.__allowRepeatedKeys: + return self.__count + return len( self.__keys ) + + def __bool__(self): + """ + @rtype: bool + @return: C{False} if the container is empty. + """ + return bool( len(self) ) + + def add(self, crash): + """ + Adds a new crash to the container. + + @note: + When the C{allowRepeatedKeys} parameter of the constructor + is set to C{False}, duplicated crashes are ignored. + + @see: L{Crash.key} + + @type crash: L{Crash} + @param crash: Crash object to add. + """ + self.__keys.add( crash.signature ) + self.__count += 1 + + def get(self, key): + """ + This method is not supported. + """ + raise NotImplementedError() + + def has_key(self, key): + """ + @type key: L{Crash} signature. + @param key: Heuristic signature of the crash to get. + + @rtype: bool + @return: C{True} if a matching L{Crash} object is in the container. + """ + return self.__keys.has_key( key ) + + def iterkeys(self): + """ + @rtype: iterator + @return: Iterator of the contained L{Crash} object keys. + + @see: L{get} + @warning: A B{copy} of each object is returned, + so any changes made to them will be lost. + + To preserve changes do the following: + 1. Keep a reference to the object. + 2. Delete the object from the set. + 3. Modify the object and add it again. + """ + return iter(self.__keys) + +#============================================================================== +# Register the Crash class with the secure serializer. + +try: + cerealizer.register(Crash) + cerealizer.register(win32.MemoryBasicInformation) +except NameError: + pass diff --git a/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/debug.py b/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/debug.py new file mode 100644 index 00000000..8364a5b8 --- /dev/null +++ b/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/debug.py @@ -0,0 +1,1543 @@ +#!~/.wine/drive_c/Python25/python.exe +# -*- coding: utf-8 -*- + +# Copyright (c) 2009-2014, Mario Vilas +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright +# notice,this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# * Neither the name of the copyright holder nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. + +""" +Debugging. + +@group Debugging: + Debug + +@group Warnings: + MixedBitsWarning +""" + +__revision__ = "$Id$" + +__all__ = [ 'Debug', 'MixedBitsWarning' ] + +import sys +from winappdbg import win32 +from winappdbg.system import System +from winappdbg.process import Process +from winappdbg.thread import Thread +from winappdbg.module import Module +from winappdbg.window import Window +from winappdbg.breakpoint import _BreakpointContainer, CodeBreakpoint +from winappdbg.event import Event, EventHandler, EventDispatcher, EventFactory +from winappdbg.interactive import ConsoleDebugger + +import warnings +##import traceback + +#============================================================================== + +# If you set this warning to be considered as an error, you can stop the +# debugger from attaching to 64-bit processes from a 32-bit Python VM and +# visceversa. +class MixedBitsWarning (RuntimeWarning): + """ + This warning is issued when mixing 32 and 64 bit processes. + """ + +#============================================================================== + +# TODO +# * Add memory read and write operations, similar to those in the Process +# class, but hiding the presence of the code breakpoints. +# * Add a method to get the memory map of a process, but hiding the presence +# of the page breakpoints. +# * Maybe the previous two features should be implemented at the Process class +# instead, but how to communicate with the Debug object without creating +# circular references? Perhaps the "overrides" could be set using private +# members (so users won't see them), but then there's the problem of the +# users being able to access the snapshot (i.e. clear it), which is why it's +# not such a great idea to use the snapshot to store data that really belongs +# to the Debug class. + +class Debug (EventDispatcher, _BreakpointContainer): + """ + The main debugger class. + + @group Debugging: + interactive, attach, detach, detach_from_all, execv, execl, + kill, kill_all, + get_debugee_count, get_debugee_pids, + is_debugee, is_debugee_attached, is_debugee_started, + in_hostile_mode, + add_existing_session + + @group Debugging loop: + loop, stop, next, wait, dispatch, cont + + @undocumented: force_garbage_collection + + @type system: L{System} + @ivar system: A System snapshot that is automatically updated for + processes being debugged. Processes not being debugged in this snapshot + may be outdated. + """ + + # Automatically set to True the first time a Debug object is instanced. + _debug_static_init = False + + def __init__(self, eventHandler = None, bKillOnExit = False, + bHostileCode = False): + """ + Debugger object. + + @type eventHandler: L{EventHandler} + @param eventHandler: + (Optional, recommended) Custom event handler object. + + @type bKillOnExit: bool + @param bKillOnExit: (Optional) Kill on exit mode. + If C{True} debugged processes are killed when the debugger is + stopped. If C{False} when the debugger stops it detaches from all + debugged processes and leaves them running (default). + + @type bHostileCode: bool + @param bHostileCode: (Optional) Hostile code mode. + Set to C{True} to take some basic precautions against anti-debug + tricks. Disabled by default. + + @warn: When hostile mode is enabled, some things may not work as + expected! This is because the anti-anti debug tricks may disrupt + the behavior of the Win32 debugging APIs or WinAppDbg itself. + + @note: The L{eventHandler} parameter may be any callable Python object + (for example a function, or an instance method). + However you'll probably find it more convenient to use an instance + of a subclass of L{EventHandler} here. + + @raise WindowsError: Raises an exception on error. + """ + EventDispatcher.__init__(self, eventHandler) + _BreakpointContainer.__init__(self) + + self.system = System() + self.lastEvent = None + self.__firstDebugee = True + self.__bKillOnExit = bKillOnExit + self.__bHostileCode = bHostileCode + self.__breakOnEP = set() # set of pids + self.__attachedDebugees = set() # set of pids + self.__startedDebugees = set() # set of pids + + if not self._debug_static_init: + self._debug_static_init = True + + # Request debug privileges for the current process. + # Only do this once, and only after instancing a Debug object, + # so passive debuggers don't get detected because of this. + self.system.request_debug_privileges(bIgnoreExceptions = False) + + # Try to fix the symbol store path if it wasn't set. + # But don't enable symbol downloading by default, since it may + # degrade performance severely. + self.system.fix_symbol_store_path(remote = False, force = False) + +## # It's hard not to create circular references, +## # and if we have a destructor, we can end up leaking everything. +## # It's best to code the debugging loop properly to always +## # stop the debugger before going out of scope. +## def __del__(self): +## self.stop() + + def __enter__(self): + """ + Compatibility with the "C{with}" Python statement. + """ + return self + + def __exit__(self, type, value, traceback): + """ + Compatibility with the "C{with}" Python statement. + """ + self.stop() + + def __len__(self): + """ + @rtype: int + @return: Number of processes being debugged. + """ + return self.get_debugee_count() + + # TODO: maybe custom __bool__ to break out of loop() ? + # it already does work (because of __len__) but it'd be + # useful to do it from the event handler anyway + +#------------------------------------------------------------------------------ + + def __setSystemKillOnExitMode(self): + # Make sure the default system behavior on detaching from processes + # versus killing them matches our preferences. This only affects the + # scenario where the Python VM dies unexpectedly without running all + # the finally clauses, or the user failed to either instance the Debug + # object inside a with block or call the stop() method before quitting. + if self.__firstDebugee: + try: + System.set_kill_on_exit_mode(self.__bKillOnExit) + self.__firstDebugee = False + except Exception: + pass + + def attach(self, dwProcessId): + """ + Attaches to an existing process for debugging. + + @see: L{detach}, L{execv}, L{execl} + + @type dwProcessId: int + @param dwProcessId: Global ID of a process to attach to. + + @rtype: L{Process} + @return: A new Process object. Normally you don't need to use it now, + it's best to interact with the process from the event handler. + + @raise WindowsError: Raises an exception on error. + Depending on the circumstances, the debugger may or may not have + attached to the target process. + """ + + # Get the Process object from the snapshot, + # if missing create a new one. + try: + aProcess = self.system.get_process(dwProcessId) + except KeyError: + aProcess = Process(dwProcessId) + + # Warn when mixing 32 and 64 bits. + # This also allows the user to stop attaching altogether, + # depending on how the warnings are configured. + if System.bits != aProcess.get_bits(): + msg = "Mixture of 32 and 64 bits is considered experimental." \ + " Use at your own risk!" + warnings.warn(msg, MixedBitsWarning) + + # Attach to the process. + win32.DebugActiveProcess(dwProcessId) + + # Add the new PID to the set of debugees. + self.__attachedDebugees.add(dwProcessId) + + # Match the system kill-on-exit flag to our own. + self.__setSystemKillOnExitMode() + + # If the Process object was not in the snapshot, add it now. + if not self.system.has_process(dwProcessId): + self.system._add_process(aProcess) + + # Scan the process threads and loaded modules. + # This is prefered because the thread and library events do not + # properly give some information, like the filename for each module. + aProcess.scan_threads() + aProcess.scan_modules() + + # Return the Process object, like the execv() and execl() methods. + return aProcess + + def execv(self, argv, **kwargs): + """ + Starts a new process for debugging. + + This method uses a list of arguments. To use a command line string + instead, use L{execl}. + + @see: L{attach}, L{detach} + + @type argv: list( str... ) + @param argv: List of command line arguments to pass to the debugee. + The first element must be the debugee executable filename. + + @type bBreakOnEntryPoint: bool + @keyword bBreakOnEntryPoint: C{True} to automatically set a breakpoint + at the program entry point. + + @type bConsole: bool + @keyword bConsole: True to inherit the console of the debugger. + Defaults to C{False}. + + @type bFollow: bool + @keyword bFollow: C{True} to automatically attach to child processes. + Defaults to C{False}. + + @type bInheritHandles: bool + @keyword bInheritHandles: C{True} if the new process should inherit + it's parent process' handles. Defaults to C{False}. + + @type bSuspended: bool + @keyword bSuspended: C{True} to suspend the main thread before any code + is executed in the debugee. Defaults to C{False}. + + @keyword dwParentProcessId: C{None} or C{0} if the debugger process + should be the parent process (default), or a process ID to + forcefully set as the debugee's parent (only available for Windows + Vista and above). + + In hostile mode, the default is not the debugger process but the + process ID for "explorer.exe". + + @type iTrustLevel: int or None + @keyword iTrustLevel: Trust level. + Must be one of the following values: + - 0: B{No trust}. May not access certain resources, such as + cryptographic keys and credentials. Only available since + Windows XP and 2003, desktop editions. This is the default + in hostile mode. + - 1: B{Normal trust}. Run with the same privileges as a normal + user, that is, one that doesn't have the I{Administrator} or + I{Power User} user rights. Only available since Windows XP + and 2003, desktop editions. + - 2: B{Full trust}. Run with the exact same privileges as the + current user. This is the default in normal mode. + + @type bAllowElevation: bool + @keyword bAllowElevation: C{True} to allow the child process to keep + UAC elevation, if the debugger itself is running elevated. C{False} + to ensure the child process doesn't run with elevation. Defaults to + C{True}. + + This flag is only meaningful on Windows Vista and above, and if the + debugger itself is running with elevation. It can be used to make + sure the child processes don't run elevated as well. + + This flag DOES NOT force an elevation prompt when the debugger is + not running with elevation. + + Note that running the debugger with elevation (or the Python + interpreter at all for that matter) is not normally required. + You should only need to if the target program requires elevation + to work properly (for example if you try to debug an installer). + + @rtype: L{Process} + @return: A new Process object. Normally you don't need to use it now, + it's best to interact with the process from the event handler. + + @raise WindowsError: Raises an exception on error. + """ + if type(argv) in (str, compat.unicode): + raise TypeError("Debug.execv expects a list, not a string") + lpCmdLine = self.system.argv_to_cmdline(argv) + return self.execl(lpCmdLine, **kwargs) + + def execl(self, lpCmdLine, **kwargs): + """ + Starts a new process for debugging. + + This method uses a command line string. To use a list of arguments + instead, use L{execv}. + + @see: L{attach}, L{detach} + + @type lpCmdLine: str + @param lpCmdLine: Command line string to execute. + The first token must be the debugee executable filename. + Tokens with spaces must be enclosed in double quotes. + Tokens including double quote characters must be escaped with a + backslash. + + @type bBreakOnEntryPoint: bool + @keyword bBreakOnEntryPoint: C{True} to automatically set a breakpoint + at the program entry point. Defaults to C{False}. + + @type bConsole: bool + @keyword bConsole: True to inherit the console of the debugger. + Defaults to C{False}. + + @type bFollow: bool + @keyword bFollow: C{True} to automatically attach to child processes. + Defaults to C{False}. + + @type bInheritHandles: bool + @keyword bInheritHandles: C{True} if the new process should inherit + it's parent process' handles. Defaults to C{False}. + + @type bSuspended: bool + @keyword bSuspended: C{True} to suspend the main thread before any code + is executed in the debugee. Defaults to C{False}. + + @type dwParentProcessId: int or None + @keyword dwParentProcessId: C{None} or C{0} if the debugger process + should be the parent process (default), or a process ID to + forcefully set as the debugee's parent (only available for Windows + Vista and above). + + In hostile mode, the default is not the debugger process but the + process ID for "explorer.exe". + + @type iTrustLevel: int + @keyword iTrustLevel: Trust level. + Must be one of the following values: + - 0: B{No trust}. May not access certain resources, such as + cryptographic keys and credentials. Only available since + Windows XP and 2003, desktop editions. This is the default + in hostile mode. + - 1: B{Normal trust}. Run with the same privileges as a normal + user, that is, one that doesn't have the I{Administrator} or + I{Power User} user rights. Only available since Windows XP + and 2003, desktop editions. + - 2: B{Full trust}. Run with the exact same privileges as the + current user. This is the default in normal mode. + + @type bAllowElevation: bool + @keyword bAllowElevation: C{True} to allow the child process to keep + UAC elevation, if the debugger itself is running elevated. C{False} + to ensure the child process doesn't run with elevation. Defaults to + C{True} in normal mode and C{False} in hostile mode. + + This flag is only meaningful on Windows Vista and above, and if the + debugger itself is running with elevation. It can be used to make + sure the child processes don't run elevated as well. + + This flag DOES NOT force an elevation prompt when the debugger is + not running with elevation. + + Note that running the debugger with elevation (or the Python + interpreter at all for that matter) is not normally required. + You should only need to if the target program requires elevation + to work properly (for example if you try to debug an installer). + + @rtype: L{Process} + @return: A new Process object. Normally you don't need to use it now, + it's best to interact with the process from the event handler. + + @raise WindowsError: Raises an exception on error. + """ + if type(lpCmdLine) not in (str, compat.unicode): + warnings.warn("Debug.execl expects a string") + + # Set the "debug" flag to True. + kwargs['bDebug'] = True + + # Pop the "break on entry point" flag. + bBreakOnEntryPoint = kwargs.pop('bBreakOnEntryPoint', False) + + # Set the default trust level if requested. + if 'iTrustLevel' not in kwargs: + if self.__bHostileCode: + kwargs['iTrustLevel'] = 0 + else: + kwargs['iTrustLevel'] = 2 + + # Set the default UAC elevation flag if requested. + if 'bAllowElevation' not in kwargs: + kwargs['bAllowElevation'] = not self.__bHostileCode + + # In hostile mode the default parent process is explorer.exe. + # Only supported for Windows Vista and above. + if self.__bHostileCode and not kwargs.get('dwParentProcessId', None): + try: + vista_and_above = self.__vista_and_above + except AttributeError: + osi = win32.OSVERSIONINFOEXW() + osi.dwMajorVersion = 6 + osi.dwMinorVersion = 0 + osi.dwPlatformId = win32.VER_PLATFORM_WIN32_NT + mask = 0 + mask = win32.VerSetConditionMask(mask, + win32.VER_MAJORVERSION, + win32.VER_GREATER_EQUAL) + mask = win32.VerSetConditionMask(mask, + win32.VER_MAJORVERSION, + win32.VER_GREATER_EQUAL) + mask = win32.VerSetConditionMask(mask, + win32.VER_PLATFORMID, + win32.VER_EQUAL) + vista_and_above = win32.VerifyVersionInfoW(osi, + win32.VER_MAJORVERSION | \ + win32.VER_MINORVERSION | \ + win32.VER_PLATFORMID, + mask) + self.__vista_and_above = vista_and_above + if vista_and_above: + dwParentProcessId = self.system.get_explorer_pid() + if dwParentProcessId: + kwargs['dwParentProcessId'] = dwParentProcessId + else: + msg = ("Failed to find \"explorer.exe\"!" + " Using the debugger as parent process.") + warnings.warn(msg, RuntimeWarning) + + # Start the new process. + aProcess = None + try: + aProcess = self.system.start_process(lpCmdLine, **kwargs) + dwProcessId = aProcess.get_pid() + + # Match the system kill-on-exit flag to our own. + self.__setSystemKillOnExitMode() + + # Warn when mixing 32 and 64 bits. + # This also allows the user to stop attaching altogether, + # depending on how the warnings are configured. + if System.bits != aProcess.get_bits(): + msg = "Mixture of 32 and 64 bits is considered experimental." \ + " Use at your own risk!" + warnings.warn(msg, MixedBitsWarning) + + # Add the new PID to the set of debugees. + self.__startedDebugees.add(dwProcessId) + + # Add the new PID to the set of "break on EP" debugees if needed. + if bBreakOnEntryPoint: + self.__breakOnEP.add(dwProcessId) + + # Return the Process object. + return aProcess + + # On error kill the new process and raise an exception. + except: + if aProcess is not None: + try: + try: + self.__startedDebugees.remove(aProcess.get_pid()) + except KeyError: + pass + finally: + try: + try: + self.__breakOnEP.remove(aProcess.get_pid()) + except KeyError: + pass + finally: + try: + aProcess.kill() + except Exception: + pass + raise + + def add_existing_session(self, dwProcessId, bStarted = False): + """ + Use this method only when for some reason the debugger's been attached + to the target outside of WinAppDbg (for example when integrating with + other tools). + + You don't normally need to call this method. Most users should call + L{attach}, L{execv} or L{execl} instead. + + @type dwProcessId: int + @param dwProcessId: Global process ID. + + @type bStarted: bool + @param bStarted: C{True} if the process was started by the debugger, + or C{False} if the process was attached to instead. + + @raise WindowsError: The target process does not exist, is not attached + to the debugger anymore. + """ + + # Register the process object with the snapshot. + if not self.system.has_process(dwProcessId): + aProcess = Process(dwProcessId) + self.system._add_process(aProcess) + else: + aProcess = self.system.get_process(dwProcessId) + + # Test for debug privileges on the target process. + # Raises WindowsException on error. + aProcess.get_handle() + + # Register the process ID with the debugger. + if bStarted: + self.__attachedDebugees.add(dwProcessId) + else: + self.__startedDebugees.add(dwProcessId) + + # Match the system kill-on-exit flag to our own. + self.__setSystemKillOnExitMode() + + # Scan the process threads and loaded modules. + # This is prefered because the thread and library events do not + # properly give some information, like the filename for each module. + aProcess.scan_threads() + aProcess.scan_modules() + + def __cleanup_process(self, dwProcessId, bIgnoreExceptions = False): + """ + Perform the necessary cleanup of a process about to be killed or + detached from. + + This private method is called by L{kill} and L{detach}. + + @type dwProcessId: int + @param dwProcessId: Global ID of a process to kill. + + @type bIgnoreExceptions: bool + @param bIgnoreExceptions: C{True} to ignore any exceptions that may be + raised when killing the process. + + @raise WindowsError: Raises an exception on error, unless + C{bIgnoreExceptions} is C{True}. + """ + # If the process is being debugged... + if self.is_debugee(dwProcessId): + + # Make sure a Process object exists or the following calls fail. + if not self.system.has_process(dwProcessId): + aProcess = Process(dwProcessId) + try: + aProcess.get_handle() + except WindowsError: + pass # fails later on with more specific reason + self.system._add_process(aProcess) + + # Erase all breakpoints in the process. + try: + self.erase_process_breakpoints(dwProcessId) + except Exception: + if not bIgnoreExceptions: + raise + e = sys.exc_info()[1] + warnings.warn(str(e), RuntimeWarning) + + # Stop tracing all threads in the process. + try: + self.stop_tracing_process(dwProcessId) + except Exception: + if not bIgnoreExceptions: + raise + e = sys.exc_info()[1] + warnings.warn(str(e), RuntimeWarning) + + # The process is no longer a debugee. + try: + if dwProcessId in self.__attachedDebugees: + self.__attachedDebugees.remove(dwProcessId) + if dwProcessId in self.__startedDebugees: + self.__startedDebugees.remove(dwProcessId) + except Exception: + if not bIgnoreExceptions: + raise + e = sys.exc_info()[1] + warnings.warn(str(e), RuntimeWarning) + + # Clear and remove the process from the snapshot. + # If the user wants to do something with it after detaching + # a new Process instance should be created. + try: + if self.system.has_process(dwProcessId): + try: + self.system.get_process(dwProcessId).clear() + finally: + self.system._del_process(dwProcessId) + except Exception: + if not bIgnoreExceptions: + raise + e = sys.exc_info()[1] + warnings.warn(str(e), RuntimeWarning) + + # If the last debugging event is related to this process, forget it. + try: + if self.lastEvent and self.lastEvent.get_pid() == dwProcessId: + self.lastEvent = None + except Exception: + if not bIgnoreExceptions: + raise + e = sys.exc_info()[1] + warnings.warn(str(e), RuntimeWarning) + + def kill(self, dwProcessId, bIgnoreExceptions = False): + """ + Kills a process currently being debugged. + + @see: L{detach} + + @type dwProcessId: int + @param dwProcessId: Global ID of a process to kill. + + @type bIgnoreExceptions: bool + @param bIgnoreExceptions: C{True} to ignore any exceptions that may be + raised when killing the process. + + @raise WindowsError: Raises an exception on error, unless + C{bIgnoreExceptions} is C{True}. + """ + + # Keep a reference to the process. We'll need it later. + try: + aProcess = self.system.get_process(dwProcessId) + except KeyError: + aProcess = Process(dwProcessId) + + # Cleanup all data referring to the process. + self.__cleanup_process(dwProcessId, + bIgnoreExceptions = bIgnoreExceptions) + + # Kill the process. + try: + try: + if self.is_debugee(dwProcessId): + try: + if aProcess.is_alive(): + aProcess.suspend() + finally: + self.detach(dwProcessId, + bIgnoreExceptions = bIgnoreExceptions) + finally: + aProcess.kill() + except Exception: + if not bIgnoreExceptions: + raise + e = sys.exc_info()[1] + warnings.warn(str(e), RuntimeWarning) + + # Cleanup what remains of the process data. + try: + aProcess.clear() + except Exception: + if not bIgnoreExceptions: + raise + e = sys.exc_info()[1] + warnings.warn(str(e), RuntimeWarning) + + def kill_all(self, bIgnoreExceptions = False): + """ + Kills from all processes currently being debugged. + + @type bIgnoreExceptions: bool + @param bIgnoreExceptions: C{True} to ignore any exceptions that may be + raised when killing each process. C{False} to stop and raise an + exception when encountering an error. + + @raise WindowsError: Raises an exception on error, unless + C{bIgnoreExceptions} is C{True}. + """ + for pid in self.get_debugee_pids(): + self.kill(pid, bIgnoreExceptions = bIgnoreExceptions) + + def detach(self, dwProcessId, bIgnoreExceptions = False): + """ + Detaches from a process currently being debugged. + + @note: On Windows 2000 and below the process is killed. + + @see: L{attach}, L{detach_from_all} + + @type dwProcessId: int + @param dwProcessId: Global ID of a process to detach from. + + @type bIgnoreExceptions: bool + @param bIgnoreExceptions: C{True} to ignore any exceptions that may be + raised when detaching. C{False} to stop and raise an exception when + encountering an error. + + @raise WindowsError: Raises an exception on error, unless + C{bIgnoreExceptions} is C{True}. + """ + + # Keep a reference to the process. We'll need it later. + try: + aProcess = self.system.get_process(dwProcessId) + except KeyError: + aProcess = Process(dwProcessId) + + # Determine if there is support for detaching. + # This check should only fail on Windows 2000 and older. + try: + win32.DebugActiveProcessStop + can_detach = True + except AttributeError: + can_detach = False + + # Continue the last event before detaching. + # XXX not sure about this... + try: + if can_detach and self.lastEvent and \ + self.lastEvent.get_pid() == dwProcessId: + self.cont(self.lastEvent) + except Exception: + if not bIgnoreExceptions: + raise + e = sys.exc_info()[1] + warnings.warn(str(e), RuntimeWarning) + + # Cleanup all data referring to the process. + self.__cleanup_process(dwProcessId, + bIgnoreExceptions = bIgnoreExceptions) + + try: + # Detach from the process. + # On Windows 2000 and before, kill the process. + if can_detach: + try: + win32.DebugActiveProcessStop(dwProcessId) + except Exception: + if not bIgnoreExceptions: + raise + e = sys.exc_info()[1] + warnings.warn(str(e), RuntimeWarning) + else: + try: + aProcess.kill() + except Exception: + if not bIgnoreExceptions: + raise + e = sys.exc_info()[1] + warnings.warn(str(e), RuntimeWarning) + + finally: + + # Cleanup what remains of the process data. + aProcess.clear() + + def detach_from_all(self, bIgnoreExceptions = False): + """ + Detaches from all processes currently being debugged. + + @note: To better handle last debugging event, call L{stop} instead. + + @type bIgnoreExceptions: bool + @param bIgnoreExceptions: C{True} to ignore any exceptions that may be + raised when detaching. + + @raise WindowsError: Raises an exception on error, unless + C{bIgnoreExceptions} is C{True}. + """ + for pid in self.get_debugee_pids(): + self.detach(pid, bIgnoreExceptions = bIgnoreExceptions) + +#------------------------------------------------------------------------------ + + def wait(self, dwMilliseconds = None): + """ + Waits for the next debug event. + + @see: L{cont}, L{dispatch}, L{loop} + + @type dwMilliseconds: int + @param dwMilliseconds: (Optional) Timeout in milliseconds. + Use C{INFINITE} or C{None} for no timeout. + + @rtype: L{Event} + @return: An event that occured in one of the debugees. + + @raise WindowsError: Raises an exception on error. + If no target processes are left to debug, + the error code is L{win32.ERROR_INVALID_HANDLE}. + """ + + # Wait for the next debug event. + raw = win32.WaitForDebugEvent(dwMilliseconds) + event = EventFactory.get(self, raw) + + # Remember it. + self.lastEvent = event + + # Return it. + return event + + def dispatch(self, event = None): + """ + Calls the debug event notify callbacks. + + @see: L{cont}, L{loop}, L{wait} + + @type event: L{Event} + @param event: (Optional) Event object returned by L{wait}. + + @raise WindowsError: Raises an exception on error. + """ + + # If no event object was given, use the last event. + if event is None: + event = self.lastEvent + + # Ignore dummy events. + if not event: + return + + # Determine the default behaviour for this event. + # XXX HACK + # Some undocumented flags are used, but as far as I know in those + # versions of Windows that don't support them they should behave + # like DGB_CONTINUE. + + code = event.get_event_code() + if code == win32.EXCEPTION_DEBUG_EVENT: + + # At this point, by default some exception types are swallowed by + # the debugger, because we don't know yet if it was caused by the + # debugger itself or the debugged process. + # + # Later on (see breakpoint.py) if we determined the exception was + # not caused directly by the debugger itself, we set the default + # back to passing the exception to the debugee. + # + # The "invalid handle" exception is also swallowed by the debugger + # because it's not normally generated by the debugee. But in + # hostile mode we want to pass it to the debugee, as it may be the + # result of an anti-debug trick. In that case it's best to disable + # bad handles detection with Microsoft's gflags.exe utility. See: + # http://msdn.microsoft.com/en-us/library/windows/hardware/ff549557(v=vs.85).aspx + + exc_code = event.get_exception_code() + if exc_code in ( + win32.EXCEPTION_BREAKPOINT, + win32.EXCEPTION_WX86_BREAKPOINT, + win32.EXCEPTION_SINGLE_STEP, + win32.EXCEPTION_GUARD_PAGE, + ): + event.continueStatus = win32.DBG_CONTINUE + elif exc_code == win32.EXCEPTION_INVALID_HANDLE: + if self.__bHostileCode: + event.continueStatus = win32.DBG_EXCEPTION_NOT_HANDLED + else: + event.continueStatus = win32.DBG_CONTINUE + else: + event.continueStatus = win32.DBG_EXCEPTION_NOT_HANDLED + + elif code == win32.RIP_EVENT and \ + event.get_rip_type() == win32.SLE_ERROR: + + # RIP events that signal fatal events should kill the process. + event.continueStatus = win32.DBG_TERMINATE_PROCESS + + else: + + # Other events need this continue code. + # Sometimes other codes can be used and are ignored, sometimes not. + # For example, when using the DBG_EXCEPTION_NOT_HANDLED code, + # debug strings are sent twice (!) + event.continueStatus = win32.DBG_CONTINUE + + # Dispatch the debug event. + return EventDispatcher.dispatch(self, event) + + def cont(self, event = None): + """ + Resumes execution after processing a debug event. + + @see: dispatch(), loop(), wait() + + @type event: L{Event} + @param event: (Optional) Event object returned by L{wait}. + + @raise WindowsError: Raises an exception on error. + """ + + # If no event object was given, use the last event. + if event is None: + event = self.lastEvent + + # Ignore dummy events. + if not event: + return + + # Get the event continue status information. + dwProcessId = event.get_pid() + dwThreadId = event.get_tid() + dwContinueStatus = event.continueStatus + + # Check if the process is still being debugged. + if self.is_debugee(dwProcessId): + + # Try to flush the instruction cache. + try: + if self.system.has_process(dwProcessId): + aProcess = self.system.get_process(dwProcessId) + else: + aProcess = Process(dwProcessId) + aProcess.flush_instruction_cache() + except WindowsError: + pass + + # XXX TODO + # + # Try to execute the UnhandledExceptionFilter for second chance + # exceptions, at least when in hostile mode (in normal mode it + # would be breaking compatibility, as users may actually expect + # second chance exceptions to be raised again). + # + # Reportedly in Windows 7 (maybe in Vista too) this seems to be + # happening already. In XP and below the UnhandledExceptionFilter + # was never called for processes being debugged. + + # Continue execution of the debugee. + win32.ContinueDebugEvent(dwProcessId, dwThreadId, dwContinueStatus) + + # If the event is the last event, forget it. + if event == self.lastEvent: + self.lastEvent = None + + def stop(self, bIgnoreExceptions = True): + """ + Stops debugging all processes. + + If the kill on exit mode is on, debugged processes are killed when the + debugger is stopped. Otherwise when the debugger stops it detaches from + all debugged processes and leaves them running (default). For more + details see: L{__init__} + + @note: This method is better than L{detach_from_all} because it can + gracefully handle the last debugging event before detaching. + + @type bIgnoreExceptions: bool + @param bIgnoreExceptions: C{True} to ignore any exceptions that may be + raised when detaching. + """ + + # Determine if we have a last debug event that we need to continue. + try: + event = self.lastEvent + has_event = bool(event) + except Exception: + if not bIgnoreExceptions: + raise + e = sys.exc_info()[1] + warnings.warn(str(e), RuntimeWarning) + has_event = False + + # If we do... + if has_event: + + # Disable all breakpoints in the process before resuming execution. + try: + pid = event.get_pid() + self.disable_process_breakpoints(pid) + except Exception: + if not bIgnoreExceptions: + raise + e = sys.exc_info()[1] + warnings.warn(str(e), RuntimeWarning) + + # Disable all breakpoints in the thread before resuming execution. + try: + tid = event.get_tid() + self.disable_thread_breakpoints(tid) + except Exception: + if not bIgnoreExceptions: + raise + e = sys.exc_info()[1] + warnings.warn(str(e), RuntimeWarning) + + # Resume execution. + try: + event.continueDebugEvent = win32.DBG_CONTINUE + self.cont(event) + except Exception: + if not bIgnoreExceptions: + raise + e = sys.exc_info()[1] + warnings.warn(str(e), RuntimeWarning) + + # Detach from or kill all debuggees. + try: + if self.__bKillOnExit: + self.kill_all(bIgnoreExceptions) + else: + self.detach_from_all(bIgnoreExceptions) + except Exception: + if not bIgnoreExceptions: + raise + e = sys.exc_info()[1] + warnings.warn(str(e), RuntimeWarning) + + # Cleanup the process snapshots. + try: + self.system.clear() + except Exception: + if not bIgnoreExceptions: + raise + e = sys.exc_info()[1] + warnings.warn(str(e), RuntimeWarning) + + # Close all Win32 handles the Python garbage collector failed to close. + self.force_garbage_collection(bIgnoreExceptions) + + def next(self): + """ + Handles the next debug event. + + @see: L{cont}, L{dispatch}, L{wait}, L{stop} + + @raise WindowsError: Raises an exception on error. + + If the wait operation causes an error, debugging is stopped + (meaning all debugees are either killed or detached from). + + If the event dispatching causes an error, the event is still + continued before returning. This may happen, for example, if the + event handler raises an exception nobody catches. + """ + try: + event = self.wait() + except Exception: + self.stop() + raise + try: + self.dispatch() + finally: + self.cont() + + def loop(self): + """ + Simple debugging loop. + + This debugging loop is meant to be useful for most simple scripts. + It iterates as long as there is at least one debugee, or an exception + is raised. Multiple calls are allowed. + + This is a trivial example script:: + import sys + debug = Debug() + try: + debug.execv( sys.argv [ 1 : ] ) + debug.loop() + finally: + debug.stop() + + @see: L{next}, L{stop} + + U{http://msdn.microsoft.com/en-us/library/ms681675(VS.85).aspx} + + @raise WindowsError: Raises an exception on error. + + If the wait operation causes an error, debugging is stopped + (meaning all debugees are either killed or detached from). + + If the event dispatching causes an error, the event is still + continued before returning. This may happen, for example, if the + event handler raises an exception nobody catches. + """ + while self: + self.next() + + def get_debugee_count(self): + """ + @rtype: int + @return: Number of processes being debugged. + """ + return len(self.__attachedDebugees) + len(self.__startedDebugees) + + def get_debugee_pids(self): + """ + @rtype: list( int... ) + @return: Global IDs of processes being debugged. + """ + return list(self.__attachedDebugees) + list(self.__startedDebugees) + + def is_debugee(self, dwProcessId): + """ + Determine if the debugger is debugging the given process. + + @see: L{is_debugee_attached}, L{is_debugee_started} + + @type dwProcessId: int + @param dwProcessId: Process global ID. + + @rtype: bool + @return: C{True} if the given process is being debugged + by this L{Debug} instance. + """ + return self.is_debugee_attached(dwProcessId) or \ + self.is_debugee_started(dwProcessId) + + def is_debugee_started(self, dwProcessId): + """ + Determine if the given process was started by the debugger. + + @see: L{is_debugee}, L{is_debugee_attached} + + @type dwProcessId: int + @param dwProcessId: Process global ID. + + @rtype: bool + @return: C{True} if the given process was started for debugging by this + L{Debug} instance. + """ + return dwProcessId in self.__startedDebugees + + def is_debugee_attached(self, dwProcessId): + """ + Determine if the debugger is attached to the given process. + + @see: L{is_debugee}, L{is_debugee_started} + + @type dwProcessId: int + @param dwProcessId: Process global ID. + + @rtype: bool + @return: C{True} if the given process is attached to this + L{Debug} instance. + """ + return dwProcessId in self.__attachedDebugees + + def in_hostile_mode(self): + """ + Determine if we're in hostile mode (anti-anti-debug). + + @rtype: bool + @return: C{True} if this C{Debug} instance was started in hostile mode, + C{False} otherwise. + """ + return self.__bHostileCode + +#------------------------------------------------------------------------------ + + def interactive(self, bConfirmQuit = True, bShowBanner = True): + """ + Start an interactive debugging session. + + @type bConfirmQuit: bool + @param bConfirmQuit: Set to C{True} to ask the user for confirmation + before closing the session, C{False} otherwise. + + @type bShowBanner: bool + @param bShowBanner: Set to C{True} to show a banner before entering + the session and after leaving it, C{False} otherwise. + + @warn: This will temporarily disable the user-defined event handler! + + This method returns when the user closes the session. + """ + print('') + print("-" * 79) + print("Interactive debugging session started.") + print("Use the \"help\" command to list all available commands.") + print("Use the \"quit\" command to close this session.") + print("-" * 79) + if self.lastEvent is None: + print('') + console = ConsoleDebugger() + console.confirm_quit = bConfirmQuit + console.load_history() + try: + console.start_using_debugger(self) + console.loop() + finally: + console.stop_using_debugger() + console.save_history() + print('') + print("-" * 79) + print("Interactive debugging session closed.") + print("-" * 79) + print('') + +#------------------------------------------------------------------------------ + + @staticmethod + def force_garbage_collection(bIgnoreExceptions = True): + """ + Close all Win32 handles the Python garbage collector failed to close. + + @type bIgnoreExceptions: bool + @param bIgnoreExceptions: C{True} to ignore any exceptions that may be + raised when detaching. + """ + try: + import gc + gc.collect() + bRecollect = False + for obj in list(gc.garbage): + try: + if isinstance(obj, win32.Handle): + obj.close() + elif isinstance(obj, Event): + obj.debug = None + elif isinstance(obj, Process): + obj.clear() + elif isinstance(obj, Thread): + obj.set_process(None) + obj.clear() + elif isinstance(obj, Module): + obj.set_process(None) + elif isinstance(obj, Window): + obj.set_process(None) + else: + continue + gc.garbage.remove(obj) + del obj + bRecollect = True + except Exception: + if not bIgnoreExceptions: + raise + e = sys.exc_info()[1] + warnings.warn(str(e), RuntimeWarning) + if bRecollect: + gc.collect() + except Exception: + if not bIgnoreExceptions: + raise + e = sys.exc_info()[1] + warnings.warn(str(e), RuntimeWarning) + +#------------------------------------------------------------------------------ + + def _notify_create_process(self, event): + """ + Notify the creation of a new process. + + @warning: This method is meant to be used internally by the debugger. + + @type event: L{CreateProcessEvent} + @param event: Create process event. + + @rtype: bool + @return: C{True} to call the user-defined handle, C{False} otherwise. + """ + dwProcessId = event.get_pid() + if dwProcessId not in self.__attachedDebugees: + if dwProcessId not in self.__startedDebugees: + self.__startedDebugees.add(dwProcessId) + + retval = self.system._notify_create_process(event) + + # Set a breakpoint on the program's entry point if requested. + # Try not to use the Event object's entry point value, as in some cases + # it may be wrong. See: http://pferrie.host22.com/misc/lowlevel3.htm + if dwProcessId in self.__breakOnEP: + try: + lpEntryPoint = event.get_process().get_entry_point() + except Exception: + lpEntryPoint = event.get_start_address() + + # It'd be best to use a hardware breakpoint instead, at least in + # hostile mode. But since the main thread's context gets smashed + # by the loader, I haven't found a way to make it work yet. + self.break_at(dwProcessId, lpEntryPoint) + + # Defeat isDebuggerPresent by patching PEB->BeingDebugged. + # When we do this, some debugging APIs cease to work as expected. + # For example, the system breakpoint isn't hit when we attach. + # For that reason we need to define a code breakpoint at the + # code location where a new thread is spawned by the debugging + # APIs, ntdll!DbgUiRemoteBreakin. + if self.__bHostileCode: + aProcess = event.get_process() + try: + hProcess = aProcess.get_handle(win32.PROCESS_QUERY_INFORMATION) + pbi = win32.NtQueryInformationProcess( + hProcess, win32.ProcessBasicInformation) + ptr = pbi.PebBaseAddress + 2 + if aProcess.peek(ptr, 1) == '\x01': + aProcess.poke(ptr, '\x00') + except WindowsError: + e = sys.exc_info()[1] + warnings.warn( + "Cannot patch PEB->BeingDebugged, reason: %s" % e.strerror) + + return retval + + def _notify_create_thread(self, event): + """ + Notify the creation of a new thread. + + @warning: This method is meant to be used internally by the debugger. + + @type event: L{CreateThreadEvent} + @param event: Create thread event. + + @rtype: bool + @return: C{True} to call the user-defined handle, C{False} otherwise. + """ + return event.get_process()._notify_create_thread(event) + + def _notify_load_dll(self, event): + """ + Notify the load of a new module. + + @warning: This method is meant to be used internally by the debugger. + + @type event: L{LoadDLLEvent} + @param event: Load DLL event. + + @rtype: bool + @return: C{True} to call the user-defined handle, C{False} otherwise. + """ + + # Pass the event to the breakpoint container. + bCallHandler = _BreakpointContainer._notify_load_dll(self, event) + + # Get the process where the DLL was loaded. + aProcess = event.get_process() + + # Pass the event to the process. + bCallHandler = aProcess._notify_load_dll(event) and bCallHandler + + # Anti-anti-debugging tricks on ntdll.dll. + if self.__bHostileCode: + aModule = event.get_module() + if aModule.match_name('ntdll.dll'): + + # Since we've overwritten the PEB to hide + # ourselves, we no longer have the system + # breakpoint when attaching to the process. + # Set a breakpoint at ntdll!DbgUiRemoteBreakin + # instead (that's where the debug API spawns + # it's auxiliary threads). This also defeats + # a simple anti-debugging trick: the hostile + # process could have overwritten the int3 + # instruction at the system breakpoint. + self.break_at(aProcess.get_pid(), + aProcess.resolve_label('ntdll!DbgUiRemoteBreakin')) + + return bCallHandler + + def _notify_exit_process(self, event): + """ + Notify the termination of a process. + + @warning: This method is meant to be used internally by the debugger. + + @type event: L{ExitProcessEvent} + @param event: Exit process event. + + @rtype: bool + @return: C{True} to call the user-defined handle, C{False} otherwise. + """ + bCallHandler1 = _BreakpointContainer._notify_exit_process(self, event) + bCallHandler2 = self.system._notify_exit_process(event) + + try: + self.detach( event.get_pid() ) + except WindowsError: + e = sys.exc_info()[1] + if e.winerror != win32.ERROR_INVALID_PARAMETER: + warnings.warn( + "Failed to detach from dead process, reason: %s" % str(e), + RuntimeWarning) + except Exception: + e = sys.exc_info()[1] + warnings.warn( + "Failed to detach from dead process, reason: %s" % str(e), + RuntimeWarning) + + return bCallHandler1 and bCallHandler2 + + def _notify_exit_thread(self, event): + """ + Notify the termination of a thread. + + @warning: This method is meant to be used internally by the debugger. + + @type event: L{ExitThreadEvent} + @param event: Exit thread event. + + @rtype: bool + @return: C{True} to call the user-defined handle, C{False} otherwise. + """ + bCallHandler1 = _BreakpointContainer._notify_exit_thread(self, event) + bCallHandler2 = event.get_process()._notify_exit_thread(event) + return bCallHandler1 and bCallHandler2 + + def _notify_unload_dll(self, event): + """ + Notify the unload of a module. + + @warning: This method is meant to be used internally by the debugger. + + @type event: L{UnloadDLLEvent} + @param event: Unload DLL event. + + @rtype: bool + @return: C{True} to call the user-defined handle, C{False} otherwise. + """ + bCallHandler1 = _BreakpointContainer._notify_unload_dll(self, event) + bCallHandler2 = event.get_process()._notify_unload_dll(event) + return bCallHandler1 and bCallHandler2 + + def _notify_rip(self, event): + """ + Notify of a RIP event. + + @warning: This method is meant to be used internally by the debugger. + + @type event: L{RIPEvent} + @param event: RIP event. + + @rtype: bool + @return: C{True} to call the user-defined handle, C{False} otherwise. + """ + event.debug.detach( event.get_pid() ) + return True + + def _notify_debug_control_c(self, event): + """ + Notify of a Debug Ctrl-C exception. + + @warning: This method is meant to be used internally by the debugger. + + @note: This exception is only raised when a debugger is attached, and + applications are not supposed to handle it, so we need to handle it + ourselves or the application may crash. + + @see: U{http://msdn.microsoft.com/en-us/library/aa363082(VS.85).aspx} + + @type event: L{ExceptionEvent} + @param event: Debug Ctrl-C exception event. + + @rtype: bool + @return: C{True} to call the user-defined handle, C{False} otherwise. + """ + if event.is_first_chance(): + event.continueStatus = win32.DBG_EXCEPTION_HANDLED + return True + + def _notify_ms_vc_exception(self, event): + """ + Notify of a Microsoft Visual C exception. + + @warning: This method is meant to be used internally by the debugger. + + @note: This allows the debugger to understand the + Microsoft Visual C thread naming convention. + + @see: U{http://msdn.microsoft.com/en-us/library/xcb2z8hs.aspx} + + @type event: L{ExceptionEvent} + @param event: Microsoft Visual C exception event. + + @rtype: bool + @return: C{True} to call the user-defined handle, C{False} otherwise. + """ + dwType = event.get_exception_information(0) + if dwType == 0x1000: + pszName = event.get_exception_information(1) + dwThreadId = event.get_exception_information(2) + dwFlags = event.get_exception_information(3) + + aProcess = event.get_process() + szName = aProcess.peek_string(pszName, fUnicode = False) + if szName: + + if dwThreadId == -1: + dwThreadId = event.get_tid() + + if aProcess.has_thread(dwThreadId): + aThread = aProcess.get_thread(dwThreadId) + else: + aThread = Thread(dwThreadId) + aProcess._add_thread(aThread) + +## if aThread.get_name() is None: +## aThread.set_name(szName) + aThread.set_name(szName) + + return True diff --git a/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/disasm.py b/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/disasm.py new file mode 100644 index 00000000..230e3314 --- /dev/null +++ b/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/disasm.py @@ -0,0 +1,722 @@ +#!~/.wine/drive_c/Python25/python.exe +# -*- coding: utf-8 -*- + +# Copyright (c) 2009-2014, Mario Vilas +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright +# notice,this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# * Neither the name of the copyright holder nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. + +""" +Binary code disassembly. + +@group Disassembler loader: + Disassembler, Engine + +@group Disassembler engines: + BeaEngine, CapstoneEngine, DistormEngine, + LibdisassembleEngine, PyDasmEngine +""" + +from __future__ import with_statement + +__revision__ = "$Id$" + +__all__ = [ + 'Disassembler', + 'Engine', + 'BeaEngine', + 'CapstoneEngine', + 'DistormEngine', + 'LibdisassembleEngine', + 'PyDasmEngine', +] + +from winappdbg.textio import HexDump +from winappdbg import win32 + +import ctypes +import warnings + +# lazy imports +BeaEnginePython = None +distorm3 = None +pydasm = None +libdisassemble = None +capstone = None + +#============================================================================== + +class Engine (object): + """ + Base class for disassembly engine adaptors. + + @type name: str + @cvar name: Engine name to use with the L{Disassembler} class. + + @type desc: str + @cvar desc: User friendly name of the disassembler engine. + + @type url: str + @cvar url: Download URL. + + @type supported: set(str) + @cvar supported: Set of supported processor architectures. + For more details see L{win32.version._get_arch}. + + @type arch: str + @ivar arch: Name of the processor architecture. + """ + + name = "" + desc = "" + url = "" + supported = set() + + def __init__(self, arch = None): + """ + @type arch: str + @param arch: Name of the processor architecture. + If not provided the current processor architecture is assumed. + For more details see L{win32.version._get_arch}. + + @raise NotImplementedError: This disassembler doesn't support the + requested processor architecture. + """ + self.arch = self._validate_arch(arch) + try: + self._import_dependencies() + except ImportError: + msg = "%s is not installed or can't be found. Download it from: %s" + msg = msg % (self.name, self.url) + raise NotImplementedError(msg) + + def _validate_arch(self, arch = None): + """ + @type arch: str + @param arch: Name of the processor architecture. + If not provided the current processor architecture is assumed. + For more details see L{win32.version._get_arch}. + + @rtype: str + @return: Name of the processor architecture. + If not provided the current processor architecture is assumed. + For more details see L{win32.version._get_arch}. + + @raise NotImplementedError: This disassembler doesn't support the + requested processor architecture. + """ + + # Use the default architecture if none specified. + if not arch: + arch = win32.arch + + # Validate the architecture. + if arch not in self.supported: + msg = "The %s engine cannot decode %s code." + msg = msg % (self.name, arch) + raise NotImplementedError(msg) + + # Return the architecture. + return arch + + def _import_dependencies(self): + """ + Loads the dependencies for this disassembler. + + @raise ImportError: This disassembler cannot find or load the + necessary dependencies to make it work. + """ + raise SyntaxError("Subclasses MUST implement this method!") + + def decode(self, address, code): + """ + @type address: int + @param address: Memory address where the code was read from. + + @type code: str + @param code: Machine code to disassemble. + + @rtype: list of tuple( long, int, str, str ) + @return: List of tuples. Each tuple represents an assembly instruction + and contains: + - Memory address of instruction. + - Size of instruction in bytes. + - Disassembly line of instruction. + - Hexadecimal dump of instruction. + + @raise NotImplementedError: This disassembler could not be loaded. + This may be due to missing dependencies. + """ + raise NotImplementedError() + +#============================================================================== + +class BeaEngine (Engine): + """ + Integration with the BeaEngine disassembler by Beatrix. + + @see: U{https://sourceforge.net/projects/winappdbg/files/additional%20packages/BeaEngine/} + """ + + name = "BeaEngine" + desc = "BeaEngine disassembler by Beatrix" + url = "https://sourceforge.net/projects/winappdbg/files/additional%20packages/BeaEngine/" + + supported = set(( + win32.ARCH_I386, + win32.ARCH_AMD64, + )) + + def _import_dependencies(self): + + # Load the BeaEngine ctypes wrapper. + global BeaEnginePython + if BeaEnginePython is None: + import BeaEnginePython + + def decode(self, address, code): + addressof = ctypes.addressof + + # Instance the code buffer. + buffer = ctypes.create_string_buffer(code) + buffer_ptr = addressof(buffer) + + # Instance the disassembler structure. + Instruction = BeaEnginePython.DISASM() + Instruction.VirtualAddr = address + Instruction.EIP = buffer_ptr + Instruction.SecurityBlock = buffer_ptr + len(code) + if self.arch == win32.ARCH_I386: + Instruction.Archi = 0 + else: + Instruction.Archi = 0x40 + Instruction.Options = ( BeaEnginePython.Tabulation + + BeaEnginePython.NasmSyntax + + BeaEnginePython.SuffixedNumeral + + BeaEnginePython.ShowSegmentRegs ) + + # Prepare for looping over each instruction. + result = [] + Disasm = BeaEnginePython.Disasm + InstructionPtr = addressof(Instruction) + hexdump = HexDump.hexadecimal + append = result.append + OUT_OF_BLOCK = BeaEnginePython.OUT_OF_BLOCK + UNKNOWN_OPCODE = BeaEnginePython.UNKNOWN_OPCODE + + # For each decoded instruction... + while True: + + # Calculate the current offset into the buffer. + offset = Instruction.EIP - buffer_ptr + + # If we've gone past the buffer, break the loop. + if offset >= len(code): + break + + # Decode the current instruction. + InstrLength = Disasm(InstructionPtr) + + # If BeaEngine detects we've gone past the buffer, break the loop. + if InstrLength == OUT_OF_BLOCK: + break + + # The instruction could not be decoded. + if InstrLength == UNKNOWN_OPCODE: + + # Output a single byte as a "db" instruction. + char = "%.2X" % ord(buffer[offset]) + result.append(( + Instruction.VirtualAddr, + 1, + "db %sh" % char, + char, + )) + Instruction.VirtualAddr += 1 + Instruction.EIP += 1 + + # The instruction was decoded but reading past the buffer's end. + # This can happen when the last instruction is a prefix without an + # opcode. For example: decode(0, '\x66') + elif offset + InstrLength > len(code): + + # Output each byte as a "db" instruction. + for char in buffer[ offset : offset + len(code) ]: + char = "%.2X" % ord(char) + result.append(( + Instruction.VirtualAddr, + 1, + "db %sh" % char, + char, + )) + Instruction.VirtualAddr += 1 + Instruction.EIP += 1 + + # The instruction was decoded correctly. + else: + + # Output the decoded instruction. + append(( + Instruction.VirtualAddr, + InstrLength, + Instruction.CompleteInstr.strip(), + hexdump(buffer.raw[offset:offset+InstrLength]), + )) + Instruction.VirtualAddr += InstrLength + Instruction.EIP += InstrLength + + # Return the list of decoded instructions. + return result + +#============================================================================== + +class DistormEngine (Engine): + """ + Integration with the diStorm disassembler by Gil Dabah. + + @see: U{https://code.google.com/p/distorm3} + """ + + name = "diStorm" + desc = "diStorm disassembler by Gil Dabah" + url = "https://code.google.com/p/distorm3" + + supported = set(( + win32.ARCH_I386, + win32.ARCH_AMD64, + )) + + def _import_dependencies(self): + + # Load the distorm bindings. + global distorm3 + if distorm3 is None: + try: + import distorm3 + except ImportError: + import distorm as distorm3 + + # Load the decoder function. + self.__decode = distorm3.Decode + + # Load the bits flag. + self.__flag = { + win32.ARCH_I386: distorm3.Decode32Bits, + win32.ARCH_AMD64: distorm3.Decode64Bits, + }[self.arch] + + def decode(self, address, code): + return self.__decode(address, code, self.__flag) + +#============================================================================== + +class PyDasmEngine (Engine): + """ + Integration with PyDasm: Python bindings to libdasm. + + @see: U{https://code.google.com/p/libdasm/} + """ + + name = "PyDasm" + desc = "PyDasm: Python bindings to libdasm" + url = "https://code.google.com/p/libdasm/" + + supported = set(( + win32.ARCH_I386, + )) + + def _import_dependencies(self): + + # Load the libdasm bindings. + global pydasm + if pydasm is None: + import pydasm + + def decode(self, address, code): + + # Decode each instruction in the buffer. + result = [] + offset = 0 + while offset < len(code): + + # Try to decode the current instruction. + instruction = pydasm.get_instruction(code[offset:offset+32], + pydasm.MODE_32) + + # Get the memory address of the current instruction. + current = address + offset + + # Illegal opcode or opcode longer than remaining buffer. + if not instruction or instruction.length + offset > len(code): + hexdump = '%.2X' % ord(code[offset]) + disasm = 'db 0x%s' % hexdump + ilen = 1 + + # Correctly decoded instruction. + else: + disasm = pydasm.get_instruction_string(instruction, + pydasm.FORMAT_INTEL, + current) + ilen = instruction.length + hexdump = HexDump.hexadecimal(code[offset:offset+ilen]) + + # Add the decoded instruction to the list. + result.append(( + current, + ilen, + disasm, + hexdump, + )) + + # Move to the next instruction. + offset += ilen + + # Return the list of decoded instructions. + return result + +#============================================================================== + +class LibdisassembleEngine (Engine): + """ + Integration with Immunity libdisassemble. + + @see: U{http://www.immunitysec.com/resources-freesoftware.shtml} + """ + + name = "Libdisassemble" + desc = "Immunity libdisassemble" + url = "http://www.immunitysec.com/resources-freesoftware.shtml" + + supported = set(( + win32.ARCH_I386, + )) + + def _import_dependencies(self): + + # Load the libdisassemble module. + # Since it doesn't come with an installer or an __init__.py file + # users can only install it manually however they feel like it, + # so we'll have to do a bit of guessing to find it. + + global libdisassemble + if libdisassemble is None: + try: + + # If installed properly with __init__.py + import libdisassemble.disassemble as libdisassemble + + except ImportError: + + # If installed by just copying and pasting the files + import disassemble as libdisassemble + + def decode(self, address, code): + + # Decode each instruction in the buffer. + result = [] + offset = 0 + while offset < len(code): + + # Decode the current instruction. + opcode = libdisassemble.Opcode( code[offset:offset+32] ) + length = opcode.getSize() + disasm = opcode.printOpcode('INTEL') + hexdump = HexDump.hexadecimal( code[offset:offset+length] ) + + # Add the decoded instruction to the list. + result.append(( + address + offset, + length, + disasm, + hexdump, + )) + + # Move to the next instruction. + offset += length + + # Return the list of decoded instructions. + return result + +#============================================================================== + +class CapstoneEngine (Engine): + """ + Integration with the Capstone disassembler by Nguyen Anh Quynh. + + @see: U{http://www.capstone-engine.org/} + """ + + name = "Capstone" + desc = "Capstone disassembler by Nguyen Anh Quynh" + url = "http://www.capstone-engine.org/" + + supported = set(( + win32.ARCH_I386, + win32.ARCH_AMD64, + win32.ARCH_THUMB, + win32.ARCH_ARM, + win32.ARCH_ARM64, + )) + + def _import_dependencies(self): + + # Load the Capstone bindings. + global capstone + if capstone is None: + import capstone + + # Load the constants for the requested architecture. + self.__constants = { + win32.ARCH_I386: + (capstone.CS_ARCH_X86, capstone.CS_MODE_32), + win32.ARCH_AMD64: + (capstone.CS_ARCH_X86, capstone.CS_MODE_64), + win32.ARCH_THUMB: + (capstone.CS_ARCH_ARM, capstone.CS_MODE_THUMB), + win32.ARCH_ARM: + (capstone.CS_ARCH_ARM, capstone.CS_MODE_ARM), + win32.ARCH_ARM64: + (capstone.CS_ARCH_ARM64, capstone.CS_MODE_ARM), + } + + # Test for the bug in early versions of Capstone. + # If found, warn the user about it. + try: + self.__bug = not isinstance( + capstone.cs_disasm_quick( + capstone.CS_ARCH_X86, capstone.CS_MODE_32, "\x90", 1)[0], + capstone.capstone.CsInsn) + except AttributeError: + self.__bug = False + if self.__bug: + warnings.warn( + "This version of the Capstone bindings is unstable," + " please upgrade to a newer one!", + RuntimeWarning, stacklevel=4) + + + def decode(self, address, code): + + # Get the constants for the requested architecture. + arch, mode = self.__constants[self.arch] + + # Get the decoder function outside the loop. + decoder = capstone.cs_disasm_quick + + # If the buggy version of the bindings are being used, we need to catch + # all exceptions broadly. If not, we only need to catch CsError. + if self.__bug: + CsError = Exception + else: + CsError = capstone.CsError + + # Create the variables for the instruction length, mnemonic and + # operands. That way they won't be created within the loop, + # minimizing the chances data might be overwritten. + # This only makes sense for the buggy vesion of the bindings, normally + # memory accesses are safe). + length = mnemonic = op_str = None + + # For each instruction... + result = [] + offset = 0 + while offset < len(code): + + # Disassemble a single instruction, because disassembling multiple + # instructions may cause excessive memory usage (Capstone allocates + # approximately 1K of metadata per each decoded instruction). + instr = None + try: + instr = decoder( + arch, mode, code[offset:offset+16], address+offset, 1)[0] + except IndexError: + pass # No instructions decoded. + except CsError: + pass # Any other error. + + # On success add the decoded instruction. + if instr is not None: + + # Get the instruction length, mnemonic and operands. + # Copy the values quickly before someone overwrites them, + # if using the buggy version of the bindings (otherwise it's + # irrelevant in which order we access the properties). + length = instr.size + mnemonic = instr.mnemonic + op_str = instr.op_str + + # Concatenate the mnemonic and the operands. + if op_str: + disasm = "%s %s" % (mnemonic, op_str) + else: + disasm = mnemonic + + # Get the instruction bytes as a hexadecimal dump. + hexdump = HexDump.hexadecimal( code[offset:offset+length] ) + + # On error add a "define constant" instruction. + # The exact instruction depends on the architecture. + else: + + # The number of bytes to skip depends on the architecture. + # On Intel processors we'll skip one byte, since we can't + # really know the instruction length. On the rest of the + # architectures we always know the instruction length. + if self.arch in (win32.ARCH_I386, win32.ARCH_AMD64): + length = 1 + else: + length = 4 + + # Get the skipped bytes as a hexadecimal dump. + skipped = code[offset:offset+length] + hexdump = HexDump.hexadecimal(skipped) + + # Build the "define constant" instruction. + # On Intel processors it's "db". + # On ARM processors it's "dcb". + if self.arch in (win32.ARCH_I386, win32.ARCH_AMD64): + mnemonic = "db " + else: + mnemonic = "dcb " + bytes = [] + for b in skipped: + if b.isalpha(): + bytes.append("'%s'" % b) + else: + bytes.append("0x%x" % ord(b)) + op_str = ", ".join(bytes) + disasm = mnemonic + op_str + + # Add the decoded instruction to the list. + result.append(( + address + offset, + length, + disasm, + hexdump, + )) + + # Update the offset. + offset += length + + # Return the list of decoded instructions. + return result + +#============================================================================== + +# TODO: use a lock to access __decoder +# TODO: look in sys.modules for whichever disassembler is already loaded + +class Disassembler (object): + """ + Generic disassembler. Uses a set of adapters to decide which library to + load for which supported platform. + + @type engines: tuple( L{Engine} ) + @cvar engines: Set of supported engines. If you implement your own adapter + you can add its class here to make it available to L{Disassembler}. + Supported disassemblers are: + """ + + engines = ( + DistormEngine, # diStorm engine goes first for backwards compatibility + BeaEngine, + CapstoneEngine, + LibdisassembleEngine, + PyDasmEngine, + ) + + # Add the list of supported disassemblers to the docstring. + __doc__ += "\n" + for e in engines: + __doc__ += " - %s - %s (U{%s})\n" % (e.name, e.desc, e.url) + del e + + # Cache of already loaded disassemblers. + __decoder = {} + + def __new__(cls, arch = None, engine = None): + """ + Factory class. You can't really instance a L{Disassembler} object, + instead one of the adapter L{Engine} subclasses is returned. + + @type arch: str + @param arch: (Optional) Name of the processor architecture. + If not provided the current processor architecture is assumed. + For more details see L{win32.version._get_arch}. + + @type engine: str + @param engine: (Optional) Name of the disassembler engine. + If not provided a compatible one is loaded automatically. + See: L{Engine.name} + + @raise NotImplementedError: No compatible disassembler was found that + could decode machine code for the requested architecture. This may + be due to missing dependencies. + + @raise ValueError: An unknown engine name was supplied. + """ + + # Use the default architecture if none specified. + if not arch: + arch = win32.arch + + # Return a compatible engine if none specified. + if not engine: + found = False + for clazz in cls.engines: + try: + if arch in clazz.supported: + selected = (clazz.name, arch) + try: + decoder = cls.__decoder[selected] + except KeyError: + decoder = clazz(arch) + cls.__decoder[selected] = decoder + return decoder + except NotImplementedError: + pass + msg = "No disassembler engine available for %s code." % arch + raise NotImplementedError(msg) + + # Return the specified engine. + selected = (engine, arch) + try: + decoder = cls.__decoder[selected] + except KeyError: + found = False + engineLower = engine.lower() + for clazz in cls.engines: + if clazz.name.lower() == engineLower: + found = True + break + if not found: + msg = "Unsupported disassembler engine: %s" % engine + raise ValueError(msg) + if arch not in clazz.supported: + msg = "The %s engine cannot decode %s code." % selected + raise NotImplementedError(msg) + decoder = clazz(arch) + cls.__decoder[selected] = decoder + return decoder diff --git a/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/event.py b/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/event.py new file mode 100644 index 00000000..af64727b --- /dev/null +++ b/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/event.py @@ -0,0 +1,1869 @@ +#!~/.wine/drive_c/Python25/python.exe +# -*- coding: utf-8 -*- + +# Copyright (c) 2009-2014, Mario Vilas +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright +# notice,this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# * Neither the name of the copyright holder nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. + +""" +Event handling module. + +@see: U{http://apps.sourceforge.net/trac/winappdbg/wiki/Debugging} + +@group Debugging: + EventHandler, EventSift + +@group Debug events: + EventFactory, + EventDispatcher, + Event, + NoEvent, + CreateProcessEvent, + CreateThreadEvent, + ExitProcessEvent, + ExitThreadEvent, + LoadDLLEvent, + UnloadDLLEvent, + OutputDebugStringEvent, + RIPEvent, + ExceptionEvent + +@group Warnings: + EventCallbackWarning +""" + +__revision__ = "$Id$" + +__all__ = [ + # Factory of Event objects and all of it's subclasses. + # Users should not need to instance Event objects directly. + 'EventFactory', + + # Event dispatcher used internally by the Debug class. + 'EventDispatcher', + + # Base classes for user-defined event handlers. + 'EventHandler', + 'EventSift', + + # Warning for uncaught exceptions on event callbacks. + 'EventCallbackWarning', + + # Dummy event object that can be used as a placeholder. + # It's never returned by the EventFactory. + 'NoEvent', + + # Base class for event objects. + 'Event', + + # Event objects. + 'CreateProcessEvent', + 'CreateThreadEvent', + 'ExitProcessEvent', + 'ExitThreadEvent', + 'LoadDLLEvent', + 'UnloadDLLEvent', + 'OutputDebugStringEvent', + 'RIPEvent', + 'ExceptionEvent' + ] + +from winappdbg import win32 +from winappdbg import compat +from winappdbg.win32 import FileHandle, ProcessHandle, ThreadHandle +from winappdbg.breakpoint import ApiHook +from winappdbg.module import Module +from winappdbg.thread import Thread +from winappdbg.process import Process +from winappdbg.textio import HexDump +from winappdbg.util import StaticClass, PathOperations + +import sys +import ctypes +import warnings +import traceback + +#============================================================================== + +class EventCallbackWarning (RuntimeWarning): + """ + This warning is issued when an uncaught exception was raised by a + user-defined event handler. + """ + +#============================================================================== + +class Event (object): + """ + Event object. + + @type eventMethod: str + @cvar eventMethod: + Method name to call when using L{EventHandler} subclasses. + Used internally. + + @type eventName: str + @cvar eventName: + User-friendly name of the event. + + @type eventDescription: str + @cvar eventDescription: + User-friendly description of the event. + + @type debug: L{Debug} + @ivar debug: + Debug object that received the event. + + @type raw: L{DEBUG_EVENT} + @ivar raw: + Raw DEBUG_EVENT structure as used by the Win32 API. + + @type continueStatus: int + @ivar continueStatus: + Continue status to pass to L{win32.ContinueDebugEvent}. + """ + + eventMethod = 'unknown_event' + eventName = 'Unknown event' + eventDescription = 'A debug event of an unknown type has occured.' + + def __init__(self, debug, raw): + """ + @type debug: L{Debug} + @param debug: Debug object that received the event. + + @type raw: L{DEBUG_EVENT} + @param raw: Raw DEBUG_EVENT structure as used by the Win32 API. + """ + self.debug = debug + self.raw = raw + self.continueStatus = win32.DBG_EXCEPTION_NOT_HANDLED + +## @property +## def debug(self): +## """ +## @rtype debug: L{Debug} +## @return debug: +## Debug object that received the event. +## """ +## return self.__debug() + + def get_event_name(self): + """ + @rtype: str + @return: User-friendly name of the event. + """ + return self.eventName + + def get_event_description(self): + """ + @rtype: str + @return: User-friendly description of the event. + """ + return self.eventDescription + + def get_event_code(self): + """ + @rtype: int + @return: Debug event code as defined in the Win32 API. + """ + return self.raw.dwDebugEventCode + +## # Compatibility with version 1.0 +## # XXX to be removed in version 1.4 +## def get_code(self): +## """ +## Alias of L{get_event_code} for backwards compatibility +## with WinAppDbg version 1.0. +## Will be phased out in the next version. +## +## @rtype: int +## @return: Debug event code as defined in the Win32 API. +## """ +## return self.get_event_code() + + def get_pid(self): + """ + @see: L{get_process} + + @rtype: int + @return: Process global ID where the event occured. + """ + return self.raw.dwProcessId + + def get_tid(self): + """ + @see: L{get_thread} + + @rtype: int + @return: Thread global ID where the event occured. + """ + return self.raw.dwThreadId + + def get_process(self): + """ + @see: L{get_pid} + + @rtype: L{Process} + @return: Process where the event occured. + """ + pid = self.get_pid() + system = self.debug.system + if system.has_process(pid): + process = system.get_process(pid) + else: + # XXX HACK + # The process object was missing for some reason, so make a new one. + process = Process(pid) + system._add_process(process) +## process.scan_threads() # not needed + process.scan_modules() + return process + + def get_thread(self): + """ + @see: L{get_tid} + + @rtype: L{Thread} + @return: Thread where the event occured. + """ + tid = self.get_tid() + process = self.get_process() + if process.has_thread(tid): + thread = process.get_thread(tid) + else: + # XXX HACK + # The thread object was missing for some reason, so make a new one. + thread = Thread(tid) + process._add_thread(thread) + return thread + +#============================================================================== + +class NoEvent (Event): + """ + No event. + + Dummy L{Event} object that can be used as a placeholder when no debug + event has occured yet. It's never returned by the L{EventFactory}. + """ + + eventMethod = 'no_event' + eventName = 'No event' + eventDescription = 'No debug event has occured.' + + def __init__(self, debug, raw = None): + Event.__init__(self, debug, raw) + + def __len__(self): + """ + Always returns C{0}, so when evaluating the object as a boolean it's + always C{False}. This prevents L{Debug.cont} from trying to continue + a dummy event. + """ + return 0 + + def get_event_code(self): + return -1 + + def get_pid(self): + return -1 + + def get_tid(self): + return -1 + + def get_process(self): + return Process(self.get_pid()) + + def get_thread(self): + return Thread(self.get_tid()) + +#============================================================================== + +class ExceptionEvent (Event): + """ + Exception event. + + @type exceptionName: dict( int S{->} str ) + @cvar exceptionName: + Mapping of exception constants to their names. + + @type exceptionDescription: dict( int S{->} str ) + @cvar exceptionDescription: + Mapping of exception constants to user-friendly strings. + + @type breakpoint: L{Breakpoint} + @ivar breakpoint: + If the exception was caused by one of our breakpoints, this member + contains a reference to the breakpoint object. Otherwise it's not + defined. It should only be used from the condition or action callback + routines, instead of the event handler. + + @type hook: L{Hook} + @ivar hook: + If the exception was caused by a function hook, this member contains a + reference to the hook object. Otherwise it's not defined. It should + only be used from the hook callback routines, instead of the event + handler. + """ + + eventName = 'Exception event' + eventDescription = 'An exception was raised by the debugee.' + + __exceptionMethod = { + win32.EXCEPTION_ACCESS_VIOLATION : 'access_violation', + win32.EXCEPTION_ARRAY_BOUNDS_EXCEEDED : 'array_bounds_exceeded', + win32.EXCEPTION_BREAKPOINT : 'breakpoint', + win32.EXCEPTION_DATATYPE_MISALIGNMENT : 'datatype_misalignment', + win32.EXCEPTION_FLT_DENORMAL_OPERAND : 'float_denormal_operand', + win32.EXCEPTION_FLT_DIVIDE_BY_ZERO : 'float_divide_by_zero', + win32.EXCEPTION_FLT_INEXACT_RESULT : 'float_inexact_result', + win32.EXCEPTION_FLT_INVALID_OPERATION : 'float_invalid_operation', + win32.EXCEPTION_FLT_OVERFLOW : 'float_overflow', + win32.EXCEPTION_FLT_STACK_CHECK : 'float_stack_check', + win32.EXCEPTION_FLT_UNDERFLOW : 'float_underflow', + win32.EXCEPTION_ILLEGAL_INSTRUCTION : 'illegal_instruction', + win32.EXCEPTION_IN_PAGE_ERROR : 'in_page_error', + win32.EXCEPTION_INT_DIVIDE_BY_ZERO : 'integer_divide_by_zero', + win32.EXCEPTION_INT_OVERFLOW : 'integer_overflow', + win32.EXCEPTION_INVALID_DISPOSITION : 'invalid_disposition', + win32.EXCEPTION_NONCONTINUABLE_EXCEPTION : 'noncontinuable_exception', + win32.EXCEPTION_PRIV_INSTRUCTION : 'privileged_instruction', + win32.EXCEPTION_SINGLE_STEP : 'single_step', + win32.EXCEPTION_STACK_OVERFLOW : 'stack_overflow', + win32.EXCEPTION_GUARD_PAGE : 'guard_page', + win32.EXCEPTION_INVALID_HANDLE : 'invalid_handle', + win32.EXCEPTION_POSSIBLE_DEADLOCK : 'possible_deadlock', + win32.EXCEPTION_WX86_BREAKPOINT : 'wow64_breakpoint', + win32.CONTROL_C_EXIT : 'control_c_exit', + win32.DBG_CONTROL_C : 'debug_control_c', + win32.MS_VC_EXCEPTION : 'ms_vc_exception', + } + + __exceptionName = { + win32.EXCEPTION_ACCESS_VIOLATION : 'EXCEPTION_ACCESS_VIOLATION', + win32.EXCEPTION_ARRAY_BOUNDS_EXCEEDED : 'EXCEPTION_ARRAY_BOUNDS_EXCEEDED', + win32.EXCEPTION_BREAKPOINT : 'EXCEPTION_BREAKPOINT', + win32.EXCEPTION_DATATYPE_MISALIGNMENT : 'EXCEPTION_DATATYPE_MISALIGNMENT', + win32.EXCEPTION_FLT_DENORMAL_OPERAND : 'EXCEPTION_FLT_DENORMAL_OPERAND', + win32.EXCEPTION_FLT_DIVIDE_BY_ZERO : 'EXCEPTION_FLT_DIVIDE_BY_ZERO', + win32.EXCEPTION_FLT_INEXACT_RESULT : 'EXCEPTION_FLT_INEXACT_RESULT', + win32.EXCEPTION_FLT_INVALID_OPERATION : 'EXCEPTION_FLT_INVALID_OPERATION', + win32.EXCEPTION_FLT_OVERFLOW : 'EXCEPTION_FLT_OVERFLOW', + win32.EXCEPTION_FLT_STACK_CHECK : 'EXCEPTION_FLT_STACK_CHECK', + win32.EXCEPTION_FLT_UNDERFLOW : 'EXCEPTION_FLT_UNDERFLOW', + win32.EXCEPTION_ILLEGAL_INSTRUCTION : 'EXCEPTION_ILLEGAL_INSTRUCTION', + win32.EXCEPTION_IN_PAGE_ERROR : 'EXCEPTION_IN_PAGE_ERROR', + win32.EXCEPTION_INT_DIVIDE_BY_ZERO : 'EXCEPTION_INT_DIVIDE_BY_ZERO', + win32.EXCEPTION_INT_OVERFLOW : 'EXCEPTION_INT_OVERFLOW', + win32.EXCEPTION_INVALID_DISPOSITION : 'EXCEPTION_INVALID_DISPOSITION', + win32.EXCEPTION_NONCONTINUABLE_EXCEPTION : 'EXCEPTION_NONCONTINUABLE_EXCEPTION', + win32.EXCEPTION_PRIV_INSTRUCTION : 'EXCEPTION_PRIV_INSTRUCTION', + win32.EXCEPTION_SINGLE_STEP : 'EXCEPTION_SINGLE_STEP', + win32.EXCEPTION_STACK_OVERFLOW : 'EXCEPTION_STACK_OVERFLOW', + win32.EXCEPTION_GUARD_PAGE : 'EXCEPTION_GUARD_PAGE', + win32.EXCEPTION_INVALID_HANDLE : 'EXCEPTION_INVALID_HANDLE', + win32.EXCEPTION_POSSIBLE_DEADLOCK : 'EXCEPTION_POSSIBLE_DEADLOCK', + win32.EXCEPTION_WX86_BREAKPOINT : 'EXCEPTION_WX86_BREAKPOINT', + win32.CONTROL_C_EXIT : 'CONTROL_C_EXIT', + win32.DBG_CONTROL_C : 'DBG_CONTROL_C', + win32.MS_VC_EXCEPTION : 'MS_VC_EXCEPTION', + } + + __exceptionDescription = { + win32.EXCEPTION_ACCESS_VIOLATION : 'Access violation', + win32.EXCEPTION_ARRAY_BOUNDS_EXCEEDED : 'Array bounds exceeded', + win32.EXCEPTION_BREAKPOINT : 'Breakpoint', + win32.EXCEPTION_DATATYPE_MISALIGNMENT : 'Datatype misalignment', + win32.EXCEPTION_FLT_DENORMAL_OPERAND : 'Float denormal operand', + win32.EXCEPTION_FLT_DIVIDE_BY_ZERO : 'Float divide by zero', + win32.EXCEPTION_FLT_INEXACT_RESULT : 'Float inexact result', + win32.EXCEPTION_FLT_INVALID_OPERATION : 'Float invalid operation', + win32.EXCEPTION_FLT_OVERFLOW : 'Float overflow', + win32.EXCEPTION_FLT_STACK_CHECK : 'Float stack check', + win32.EXCEPTION_FLT_UNDERFLOW : 'Float underflow', + win32.EXCEPTION_ILLEGAL_INSTRUCTION : 'Illegal instruction', + win32.EXCEPTION_IN_PAGE_ERROR : 'In-page error', + win32.EXCEPTION_INT_DIVIDE_BY_ZERO : 'Integer divide by zero', + win32.EXCEPTION_INT_OVERFLOW : 'Integer overflow', + win32.EXCEPTION_INVALID_DISPOSITION : 'Invalid disposition', + win32.EXCEPTION_NONCONTINUABLE_EXCEPTION : 'Noncontinuable exception', + win32.EXCEPTION_PRIV_INSTRUCTION : 'Privileged instruction', + win32.EXCEPTION_SINGLE_STEP : 'Single step event', + win32.EXCEPTION_STACK_OVERFLOW : 'Stack limits overflow', + win32.EXCEPTION_GUARD_PAGE : 'Guard page hit', + win32.EXCEPTION_INVALID_HANDLE : 'Invalid handle', + win32.EXCEPTION_POSSIBLE_DEADLOCK : 'Possible deadlock', + win32.EXCEPTION_WX86_BREAKPOINT : 'WOW64 breakpoint', + win32.CONTROL_C_EXIT : 'Control-C exit', + win32.DBG_CONTROL_C : 'Debug Control-C', + win32.MS_VC_EXCEPTION : 'Microsoft Visual C++ exception', + } + + @property + def eventMethod(self): + return self.__exceptionMethod.get( + self.get_exception_code(), 'unknown_exception') + + def get_exception_name(self): + """ + @rtype: str + @return: Name of the exception as defined by the Win32 API. + """ + code = self.get_exception_code() + unk = HexDump.integer(code) + return self.__exceptionName.get(code, unk) + + def get_exception_description(self): + """ + @rtype: str + @return: User-friendly name of the exception. + """ + code = self.get_exception_code() + description = self.__exceptionDescription.get(code, None) + if description is None: + try: + description = 'Exception code %s (%s)' + description = description % (HexDump.integer(code), + ctypes.FormatError(code)) + except OverflowError: + description = 'Exception code %s' % HexDump.integer(code) + return description + + def is_first_chance(self): + """ + @rtype: bool + @return: C{True} for first chance exceptions, C{False} for last chance. + """ + return self.raw.u.Exception.dwFirstChance != 0 + + def is_last_chance(self): + """ + @rtype: bool + @return: The opposite of L{is_first_chance}. + """ + return not self.is_first_chance() + + def is_noncontinuable(self): + """ + @see: U{http://msdn.microsoft.com/en-us/library/aa363082(VS.85).aspx} + + @rtype: bool + @return: C{True} if the exception is noncontinuable, + C{False} otherwise. + + Attempting to continue a noncontinuable exception results in an + EXCEPTION_NONCONTINUABLE_EXCEPTION exception to be raised. + """ + return bool( self.raw.u.Exception.ExceptionRecord.ExceptionFlags & \ + win32.EXCEPTION_NONCONTINUABLE ) + + def is_continuable(self): + """ + @rtype: bool + @return: The opposite of L{is_noncontinuable}. + """ + return not self.is_noncontinuable() + + def is_user_defined_exception(self): + """ + Determines if this is an user-defined exception. User-defined + exceptions may contain any exception code that is not system reserved. + + Often the exception code is also a valid Win32 error code, but that's + up to the debugged application. + + @rtype: bool + @return: C{True} if the exception is user-defined, C{False} otherwise. + """ + return self.get_exception_code() & 0x10000000 == 0 + + def is_system_defined_exception(self): + """ + @rtype: bool + @return: The opposite of L{is_user_defined_exception}. + """ + return not self.is_user_defined_exception() + + def get_exception_code(self): + """ + @rtype: int + @return: Exception code as defined by the Win32 API. + """ + return self.raw.u.Exception.ExceptionRecord.ExceptionCode + + def get_exception_address(self): + """ + @rtype: int + @return: Memory address where the exception occured. + """ + address = self.raw.u.Exception.ExceptionRecord.ExceptionAddress + if address is None: + address = 0 + return address + + def get_exception_information(self, index): + """ + @type index: int + @param index: Index into the exception information block. + + @rtype: int + @return: Exception information DWORD. + """ + if index < 0 or index > win32.EXCEPTION_MAXIMUM_PARAMETERS: + raise IndexError("Array index out of range: %s" % repr(index)) + info = self.raw.u.Exception.ExceptionRecord.ExceptionInformation + value = info[index] + if value is None: + value = 0 + return value + + def get_exception_information_as_list(self): + """ + @rtype: list( int ) + @return: Exception information block. + """ + info = self.raw.u.Exception.ExceptionRecord.ExceptionInformation + data = list() + for index in compat.xrange(0, win32.EXCEPTION_MAXIMUM_PARAMETERS): + value = info[index] + if value is None: + value = 0 + data.append(value) + return data + + def get_fault_type(self): + """ + @rtype: int + @return: Access violation type. + Should be one of the following constants: + + - L{win32.EXCEPTION_READ_FAULT} + - L{win32.EXCEPTION_WRITE_FAULT} + - L{win32.EXCEPTION_EXECUTE_FAULT} + + @note: This method is only meaningful for access violation exceptions, + in-page memory error exceptions and guard page exceptions. + + @raise NotImplementedError: Wrong kind of exception. + """ + if self.get_exception_code() not in (win32.EXCEPTION_ACCESS_VIOLATION, + win32.EXCEPTION_IN_PAGE_ERROR, win32.EXCEPTION_GUARD_PAGE): + msg = "This method is not meaningful for %s." + raise NotImplementedError(msg % self.get_exception_name()) + return self.get_exception_information(0) + + def get_fault_address(self): + """ + @rtype: int + @return: Access violation memory address. + + @note: This method is only meaningful for access violation exceptions, + in-page memory error exceptions and guard page exceptions. + + @raise NotImplementedError: Wrong kind of exception. + """ + if self.get_exception_code() not in (win32.EXCEPTION_ACCESS_VIOLATION, + win32.EXCEPTION_IN_PAGE_ERROR, win32.EXCEPTION_GUARD_PAGE): + msg = "This method is not meaningful for %s." + raise NotImplementedError(msg % self.get_exception_name()) + return self.get_exception_information(1) + + def get_ntstatus_code(self): + """ + @rtype: int + @return: NTSTATUS status code that caused the exception. + + @note: This method is only meaningful for in-page memory error + exceptions. + + @raise NotImplementedError: Not an in-page memory error. + """ + if self.get_exception_code() != win32.EXCEPTION_IN_PAGE_ERROR: + msg = "This method is only meaningful "\ + "for in-page memory error exceptions." + raise NotImplementedError(msg) + return self.get_exception_information(2) + + def is_nested(self): + """ + @rtype: bool + @return: Returns C{True} if there are additional exception records + associated with this exception. This would mean the exception + is nested, that is, it was triggered while trying to handle + at least one previous exception. + """ + return bool(self.raw.u.Exception.ExceptionRecord.ExceptionRecord) + + def get_raw_exception_record_list(self): + """ + Traverses the exception record linked list and builds a Python list. + + Nested exception records are received for nested exceptions. This + happens when an exception is raised in the debugee while trying to + handle a previous exception. + + @rtype: list( L{win32.EXCEPTION_RECORD} ) + @return: + List of raw exception record structures as used by the Win32 API. + + There is always at least one exception record, so the list is + never empty. All other methods of this class read from the first + exception record only, that is, the most recent exception. + """ + # The first EXCEPTION_RECORD is contained in EXCEPTION_DEBUG_INFO. + # The remaining EXCEPTION_RECORD structures are linked by pointers. + nested = list() + record = self.raw.u.Exception + while True: + record = record.ExceptionRecord + if not record: + break + nested.append(record) + return nested + + def get_nested_exceptions(self): + """ + Traverses the exception record linked list and builds a Python list. + + Nested exception records are received for nested exceptions. This + happens when an exception is raised in the debugee while trying to + handle a previous exception. + + @rtype: list( L{ExceptionEvent} ) + @return: + List of ExceptionEvent objects representing each exception record + found in this event. + + There is always at least one exception record, so the list is + never empty. All other methods of this class read from the first + exception record only, that is, the most recent exception. + """ + # The list always begins with ourselves. + # Just put a reference to "self" as the first element, + # and start looping from the second exception record. + nested = [ self ] + raw = self.raw + dwDebugEventCode = raw.dwDebugEventCode + dwProcessId = raw.dwProcessId + dwThreadId = raw.dwThreadId + dwFirstChance = raw.u.Exception.dwFirstChance + record = raw.u.Exception.ExceptionRecord + while True: + record = record.ExceptionRecord + if not record: + break + raw = win32.DEBUG_EVENT() + raw.dwDebugEventCode = dwDebugEventCode + raw.dwProcessId = dwProcessId + raw.dwThreadId = dwThreadId + raw.u.Exception.ExceptionRecord = record + raw.u.Exception.dwFirstChance = dwFirstChance + event = EventFactory.get(self.debug, raw) + nested.append(event) + return nested + +#============================================================================== + +class CreateThreadEvent (Event): + """ + Thread creation event. + """ + + eventMethod = 'create_thread' + eventName = 'Thread creation event' + eventDescription = 'A new thread has started.' + + def get_thread_handle(self): + """ + @rtype: L{ThreadHandle} + @return: Thread handle received from the system. + Returns C{None} if the handle is not available. + """ + # The handle doesn't need to be closed. + # See http://msdn.microsoft.com/en-us/library/ms681423(VS.85).aspx + hThread = self.raw.u.CreateThread.hThread + if hThread in (0, win32.NULL, win32.INVALID_HANDLE_VALUE): + hThread = None + else: + hThread = ThreadHandle(hThread, False, win32.THREAD_ALL_ACCESS) + return hThread + + def get_teb(self): + """ + @rtype: int + @return: Pointer to the TEB. + """ + return self.raw.u.CreateThread.lpThreadLocalBase + + def get_start_address(self): + """ + @rtype: int + @return: Pointer to the first instruction to execute in this thread. + + Returns C{NULL} when the debugger attached to a process + and the thread already existed. + + See U{http://msdn.microsoft.com/en-us/library/ms679295(VS.85).aspx} + """ + return self.raw.u.CreateThread.lpStartAddress + +#============================================================================== + +class CreateProcessEvent (Event): + """ + Process creation event. + """ + + eventMethod = 'create_process' + eventName = 'Process creation event' + eventDescription = 'A new process has started.' + + def get_file_handle(self): + """ + @rtype: L{FileHandle} or None + @return: File handle to the main module, received from the system. + Returns C{None} if the handle is not available. + """ + # This handle DOES need to be closed. + # Therefore we must cache it so it doesn't + # get closed after the first call. + try: + hFile = self.__hFile + except AttributeError: + hFile = self.raw.u.CreateProcessInfo.hFile + if hFile in (0, win32.NULL, win32.INVALID_HANDLE_VALUE): + hFile = None + else: + hFile = FileHandle(hFile, True) + self.__hFile = hFile + return hFile + + def get_process_handle(self): + """ + @rtype: L{ProcessHandle} + @return: Process handle received from the system. + Returns C{None} if the handle is not available. + """ + # The handle doesn't need to be closed. + # See http://msdn.microsoft.com/en-us/library/ms681423(VS.85).aspx + hProcess = self.raw.u.CreateProcessInfo.hProcess + if hProcess in (0, win32.NULL, win32.INVALID_HANDLE_VALUE): + hProcess = None + else: + hProcess = ProcessHandle(hProcess, False, win32.PROCESS_ALL_ACCESS) + return hProcess + + def get_thread_handle(self): + """ + @rtype: L{ThreadHandle} + @return: Thread handle received from the system. + Returns C{None} if the handle is not available. + """ + # The handle doesn't need to be closed. + # See http://msdn.microsoft.com/en-us/library/ms681423(VS.85).aspx + hThread = self.raw.u.CreateProcessInfo.hThread + if hThread in (0, win32.NULL, win32.INVALID_HANDLE_VALUE): + hThread = None + else: + hThread = ThreadHandle(hThread, False, win32.THREAD_ALL_ACCESS) + return hThread + + def get_start_address(self): + """ + @rtype: int + @return: Pointer to the first instruction to execute in this process. + + Returns C{NULL} when the debugger attaches to a process. + + See U{http://msdn.microsoft.com/en-us/library/ms679295(VS.85).aspx} + """ + return self.raw.u.CreateProcessInfo.lpStartAddress + + def get_image_base(self): + """ + @rtype: int + @return: Base address of the main module. + @warn: This value is taken from the PE file + and may be incorrect because of ASLR! + """ + # TODO try to calculate the real value when ASLR is active. + return self.raw.u.CreateProcessInfo.lpBaseOfImage + + def get_teb(self): + """ + @rtype: int + @return: Pointer to the TEB. + """ + return self.raw.u.CreateProcessInfo.lpThreadLocalBase + + def get_debug_info(self): + """ + @rtype: str + @return: Debugging information. + """ + raw = self.raw.u.CreateProcessInfo + ptr = raw.lpBaseOfImage + raw.dwDebugInfoFileOffset + size = raw.nDebugInfoSize + data = self.get_process().peek(ptr, size) + if len(data) == size: + return data + return None + + def get_filename(self): + """ + @rtype: str, None + @return: This method does it's best to retrieve the filename to + the main module of the process. However, sometimes that's not + possible, and C{None} is returned instead. + """ + + # Try to get the filename from the file handle. + szFilename = None + hFile = self.get_file_handle() + if hFile: + szFilename = hFile.get_filename() + if not szFilename: + + # Try to get it from CREATE_PROCESS_DEBUG_INFO.lpImageName + # It's NULL or *NULL most of the times, see MSDN: + # http://msdn.microsoft.com/en-us/library/ms679286(VS.85).aspx + aProcess = self.get_process() + lpRemoteFilenamePtr = self.raw.u.CreateProcessInfo.lpImageName + if lpRemoteFilenamePtr: + lpFilename = aProcess.peek_uint(lpRemoteFilenamePtr) + fUnicode = bool( self.raw.u.CreateProcessInfo.fUnicode ) + szFilename = aProcess.peek_string(lpFilename, fUnicode) + + # XXX TODO + # Sometimes the filename is relative (ntdll.dll, kernel32.dll). + # It could be converted to an absolute pathname (SearchPath). + + # Try to get it from Process.get_image_name(). + if not szFilename: + szFilename = aProcess.get_image_name() + + # Return the filename, or None on error. + return szFilename + + def get_module_base(self): + """ + @rtype: int + @return: Base address of the main module. + """ + return self.get_image_base() + + def get_module(self): + """ + @rtype: L{Module} + @return: Main module of the process. + """ + return self.get_process().get_module( self.get_module_base() ) + +#============================================================================== + +class ExitThreadEvent (Event): + """ + Thread termination event. + """ + + eventMethod = 'exit_thread' + eventName = 'Thread termination event' + eventDescription = 'A thread has finished executing.' + + def get_exit_code(self): + """ + @rtype: int + @return: Exit code of the thread. + """ + return self.raw.u.ExitThread.dwExitCode + +#============================================================================== + +class ExitProcessEvent (Event): + """ + Process termination event. + """ + + eventMethod = 'exit_process' + eventName = 'Process termination event' + eventDescription = 'A process has finished executing.' + + def get_exit_code(self): + """ + @rtype: int + @return: Exit code of the process. + """ + return self.raw.u.ExitProcess.dwExitCode + + def get_filename(self): + """ + @rtype: None or str + @return: Filename of the main module. + C{None} if the filename is unknown. + """ + return self.get_module().get_filename() + + def get_image_base(self): + """ + @rtype: int + @return: Base address of the main module. + """ + return self.get_module_base() + + def get_module_base(self): + """ + @rtype: int + @return: Base address of the main module. + """ + return self.get_module().get_base() + + def get_module(self): + """ + @rtype: L{Module} + @return: Main module of the process. + """ + return self.get_process().get_main_module() + +#============================================================================== + +class LoadDLLEvent (Event): + """ + Module load event. + """ + + eventMethod = 'load_dll' + eventName = 'Module load event' + eventDescription = 'A new DLL library was loaded by the debugee.' + + def get_module_base(self): + """ + @rtype: int + @return: Base address for the newly loaded DLL. + """ + return self.raw.u.LoadDll.lpBaseOfDll + + def get_module(self): + """ + @rtype: L{Module} + @return: Module object for the newly loaded DLL. + """ + lpBaseOfDll = self.get_module_base() + aProcess = self.get_process() + if aProcess.has_module(lpBaseOfDll): + aModule = aProcess.get_module(lpBaseOfDll) + else: + # XXX HACK + # For some reason the module object is missing, so make a new one. + aModule = Module(lpBaseOfDll, + hFile = self.get_file_handle(), + fileName = self.get_filename(), + process = aProcess) + aProcess._add_module(aModule) + return aModule + + def get_file_handle(self): + """ + @rtype: L{FileHandle} or None + @return: File handle to the newly loaded DLL received from the system. + Returns C{None} if the handle is not available. + """ + # This handle DOES need to be closed. + # Therefore we must cache it so it doesn't + # get closed after the first call. + try: + hFile = self.__hFile + except AttributeError: + hFile = self.raw.u.LoadDll.hFile + if hFile in (0, win32.NULL, win32.INVALID_HANDLE_VALUE): + hFile = None + else: + hFile = FileHandle(hFile, True) + self.__hFile = hFile + return hFile + + def get_filename(self): + """ + @rtype: str, None + @return: This method does it's best to retrieve the filename to + the newly loaded module. However, sometimes that's not + possible, and C{None} is returned instead. + """ + szFilename = None + + # Try to get it from LOAD_DLL_DEBUG_INFO.lpImageName + # It's NULL or *NULL most of the times, see MSDN: + # http://msdn.microsoft.com/en-us/library/ms679286(VS.85).aspx + aProcess = self.get_process() + lpRemoteFilenamePtr = self.raw.u.LoadDll.lpImageName + if lpRemoteFilenamePtr: + lpFilename = aProcess.peek_uint(lpRemoteFilenamePtr) + fUnicode = bool( self.raw.u.LoadDll.fUnicode ) + szFilename = aProcess.peek_string(lpFilename, fUnicode) + if not szFilename: + szFilename = None + + # Try to get the filename from the file handle. + if not szFilename: + hFile = self.get_file_handle() + if hFile: + szFilename = hFile.get_filename() + + # Return the filename, or None on error. + return szFilename + +#============================================================================== + +class UnloadDLLEvent (Event): + """ + Module unload event. + """ + + eventMethod = 'unload_dll' + eventName = 'Module unload event' + eventDescription = 'A DLL library was unloaded by the debugee.' + + def get_module_base(self): + """ + @rtype: int + @return: Base address for the recently unloaded DLL. + """ + return self.raw.u.UnloadDll.lpBaseOfDll + + def get_module(self): + """ + @rtype: L{Module} + @return: Module object for the recently unloaded DLL. + """ + lpBaseOfDll = self.get_module_base() + aProcess = self.get_process() + if aProcess.has_module(lpBaseOfDll): + aModule = aProcess.get_module(lpBaseOfDll) + else: + aModule = Module(lpBaseOfDll, process = aProcess) + aProcess._add_module(aModule) + return aModule + + def get_file_handle(self): + """ + @rtype: None or L{FileHandle} + @return: File handle to the recently unloaded DLL. + Returns C{None} if the handle is not available. + """ + hFile = self.get_module().hFile + if hFile in (0, win32.NULL, win32.INVALID_HANDLE_VALUE): + hFile = None + return hFile + + def get_filename(self): + """ + @rtype: None or str + @return: Filename of the recently unloaded DLL. + C{None} if the filename is unknown. + """ + return self.get_module().get_filename() + +#============================================================================== + +class OutputDebugStringEvent (Event): + """ + Debug string output event. + """ + + eventMethod = 'output_string' + eventName = 'Debug string output event' + eventDescription = 'The debugee sent a message to the debugger.' + + def get_debug_string(self): + """ + @rtype: str, compat.unicode + @return: String sent by the debugee. + It may be ANSI or Unicode and may end with a null character. + """ + return self.get_process().peek_string( + self.raw.u.DebugString.lpDebugStringData, + bool( self.raw.u.DebugString.fUnicode ), + self.raw.u.DebugString.nDebugStringLength) + +#============================================================================== + +class RIPEvent (Event): + """ + RIP event. + """ + + eventMethod = 'rip' + eventName = 'RIP event' + eventDescription = 'An error has occured and the process ' \ + 'can no longer be debugged.' + + def get_rip_error(self): + """ + @rtype: int + @return: RIP error code as defined by the Win32 API. + """ + return self.raw.u.RipInfo.dwError + + def get_rip_type(self): + """ + @rtype: int + @return: RIP type code as defined by the Win32 API. + May be C{0} or one of the following: + - L{win32.SLE_ERROR} + - L{win32.SLE_MINORERROR} + - L{win32.SLE_WARNING} + """ + return self.raw.u.RipInfo.dwType + +#============================================================================== + +class EventFactory (StaticClass): + """ + Factory of L{Event} objects. + + @type baseEvent: L{Event} + @cvar baseEvent: + Base class for Event objects. + It's used for unknown event codes. + + @type eventClasses: dict( int S{->} L{Event} ) + @cvar eventClasses: + Dictionary that maps event codes to L{Event} subclasses. + """ + + baseEvent = Event + eventClasses = { + win32.EXCEPTION_DEBUG_EVENT : ExceptionEvent, # 1 + win32.CREATE_THREAD_DEBUG_EVENT : CreateThreadEvent, # 2 + win32.CREATE_PROCESS_DEBUG_EVENT : CreateProcessEvent, # 3 + win32.EXIT_THREAD_DEBUG_EVENT : ExitThreadEvent, # 4 + win32.EXIT_PROCESS_DEBUG_EVENT : ExitProcessEvent, # 5 + win32.LOAD_DLL_DEBUG_EVENT : LoadDLLEvent, # 6 + win32.UNLOAD_DLL_DEBUG_EVENT : UnloadDLLEvent, # 7 + win32.OUTPUT_DEBUG_STRING_EVENT : OutputDebugStringEvent, # 8 + win32.RIP_EVENT : RIPEvent, # 9 + } + + @classmethod + def get(cls, debug, raw): + """ + @type debug: L{Debug} + @param debug: Debug object that received the event. + + @type raw: L{DEBUG_EVENT} + @param raw: Raw DEBUG_EVENT structure as used by the Win32 API. + + @rtype: L{Event} + @returns: An Event object or one of it's subclasses, + depending on the event type. + """ + eventClass = cls.eventClasses.get(raw.dwDebugEventCode, cls.baseEvent) + return eventClass(debug, raw) + +#============================================================================== + +class EventHandler (object): + """ + Base class for debug event handlers. + + Your program should subclass it to implement it's own event handling. + + The constructor can be overriden as long as you call the superclass + constructor. The special method L{__call__} B{MUST NOT} be overriden. + + The signature for event handlers is the following:: + + def event_handler(self, event): + + Where B{event} is an L{Event} object. + + Each event handler is named after the event they handle. + This is the list of all valid event handler names: + + - I{event} + + Receives an L{Event} object or an object of any of it's subclasses, + and handles any event for which no handler was defined. + + - I{unknown_event} + + Receives an L{Event} object or an object of any of it's subclasses, + and handles any event unknown to the debugging engine. (This is not + likely to happen unless the Win32 debugging API is changed in future + versions of Windows). + + - I{exception} + + Receives an L{ExceptionEvent} object and handles any exception for + which no handler was defined. See above for exception handlers. + + - I{unknown_exception} + + Receives an L{ExceptionEvent} object and handles any exception unknown + to the debugging engine. This usually happens for C++ exceptions, which + are not standardized and may change from one compiler to the next. + + Currently we have partial support for C++ exceptions thrown by Microsoft + compilers. + + Also see: U{RaiseException() + } + + - I{create_thread} + + Receives a L{CreateThreadEvent} object. + + - I{create_process} + + Receives a L{CreateProcessEvent} object. + + - I{exit_thread} + + Receives a L{ExitThreadEvent} object. + + - I{exit_process} + + Receives a L{ExitProcessEvent} object. + + - I{load_dll} + + Receives a L{LoadDLLEvent} object. + + - I{unload_dll} + + Receives an L{UnloadDLLEvent} object. + + - I{output_string} + + Receives an L{OutputDebugStringEvent} object. + + - I{rip} + + Receives a L{RIPEvent} object. + + This is the list of all valid exception handler names + (they all receive an L{ExceptionEvent} object): + + - I{access_violation} + - I{array_bounds_exceeded} + - I{breakpoint} + - I{control_c_exit} + - I{datatype_misalignment} + - I{debug_control_c} + - I{float_denormal_operand} + - I{float_divide_by_zero} + - I{float_inexact_result} + - I{float_invalid_operation} + - I{float_overflow} + - I{float_stack_check} + - I{float_underflow} + - I{guard_page} + - I{illegal_instruction} + - I{in_page_error} + - I{integer_divide_by_zero} + - I{integer_overflow} + - I{invalid_disposition} + - I{invalid_handle} + - I{ms_vc_exception} + - I{noncontinuable_exception} + - I{possible_deadlock} + - I{privileged_instruction} + - I{single_step} + - I{stack_overflow} + - I{wow64_breakpoint} + + + + @type apiHooks: dict( str S{->} list( tuple( str, int ) ) ) + @cvar apiHooks: + Dictionary that maps module names to lists of + tuples of ( procedure name, parameter count ). + + All procedures listed here will be hooked for calls from the debugee. + When this happens, the corresponding event handler can be notified both + when the procedure is entered and when it's left by the debugee. + + For example, let's hook the LoadLibraryEx() API call. + This would be the declaration of apiHooks:: + + from winappdbg import EventHandler + from winappdbg.win32 import * + + # (...) + + class MyEventHandler (EventHandler): + + apiHook = { + + "kernel32.dll" : ( + + # Procedure name Signature + ( "LoadLibraryEx", (PVOID, HANDLE, DWORD) ), + + # (more procedures can go here...) + ), + + # (more libraries can go here...) + } + + # (your method definitions go here...) + + Note that all pointer types are treated like void pointers, so your + callback won't get the string or structure pointed to by it, but the + remote memory address instead. This is so to prevent the ctypes library + from being "too helpful" and trying to dereference the pointer. To get + the actual data being pointed to, use one of the L{Process.read} + methods. + + Now, to intercept calls to LoadLibraryEx define a method like this in + your event handler class:: + + def pre_LoadLibraryEx(self, event, ra, lpFilename, hFile, dwFlags): + szFilename = event.get_process().peek_string(lpFilename) + + # (...) + + Note that the first parameter is always the L{Event} object, and the + second parameter is the return address. The third parameter and above + are the values passed to the hooked function. + + Finally, to intercept returns from calls to LoadLibraryEx define a + method like this:: + + def post_LoadLibraryEx(self, event, retval): + # (...) + + The first parameter is the L{Event} object and the second is the + return value from the hooked function. + """ + +#------------------------------------------------------------------------------ + + # Default (empty) API hooks dictionary. + apiHooks = {} + + def __init__(self): + """ + Class constructor. Don't forget to call it when subclassing! + + Forgetting to call the superclass constructor is a common mistake when + you're new to Python. :) + + Example:: + class MyEventHandler (EventHandler): + + # Override the constructor to use an extra argument. + def __init__(self, myArgument): + + # Do something with the argument, like keeping it + # as an instance variable. + self.myVariable = myArgument + + # Call the superclass constructor. + super(MyEventHandler, self).__init__() + + # The rest of your code below... + """ + + # TODO + # All this does is set up the hooks. + # This code should be moved to the EventDispatcher class. + # Then the hooks can be set up at set_event_handler() instead, making + # this class even simpler. The downside here is deciding where to store + # the ApiHook objects. + + # Convert the tuples into instances of the ApiHook class. + # A new dictionary must be instanced, otherwise we could also be + # affecting all other instances of the EventHandler. + apiHooks = dict() + for lib, hooks in compat.iteritems(self.apiHooks): + hook_objs = [] + for proc, args in hooks: + if type(args) in (int, long): + h = ApiHook(self, lib, proc, paramCount = args) + else: + h = ApiHook(self, lib, proc, signature = args) + hook_objs.append(h) + apiHooks[lib] = hook_objs + self.__apiHooks = apiHooks + + def __get_hooks_for_dll(self, event): + """ + Get the requested API hooks for the current DLL. + + Used by L{__hook_dll} and L{__unhook_dll}. + """ + result = [] + if self.__apiHooks: + path = event.get_module().get_filename() + if path: + lib_name = PathOperations.pathname_to_filename(path).lower() + for hook_lib, hook_api_list in compat.iteritems(self.__apiHooks): + if hook_lib == lib_name: + result.extend(hook_api_list) + return result + + def __hook_dll(self, event): + """ + Hook the requested API calls (in self.apiHooks). + + This method is called automatically whenever a DLL is loaded. + """ + debug = event.debug + pid = event.get_pid() + for hook_api_stub in self.__get_hooks_for_dll(event): + hook_api_stub.hook(debug, pid) + + def __unhook_dll(self, event): + """ + Unhook the requested API calls (in self.apiHooks). + + This method is called automatically whenever a DLL is unloaded. + """ + debug = event.debug + pid = event.get_pid() + for hook_api_stub in self.__get_hooks_for_dll(event): + hook_api_stub.unhook(debug, pid) + + def __call__(self, event): + """ + Dispatch debug events. + + @warn: B{Don't override this method!} + + @type event: L{Event} + @param event: Event object. + """ + try: + code = event.get_event_code() + if code == win32.LOAD_DLL_DEBUG_EVENT: + self.__hook_dll(event) + elif code == win32.UNLOAD_DLL_DEBUG_EVENT: + self.__unhook_dll(event) + finally: + method = EventDispatcher.get_handler_method(self, event) + if method is not None: + return method(event) + +#============================================================================== + +# TODO +# * Make it more generic by adding a few more callbacks. +# That way it will be possible to make a thread sifter too. +# * This interface feels too much like an antipattern. +# When apiHooks is deprecated this will have to be reviewed. + +class EventSift(EventHandler): + """ + Event handler that allows you to use customized event handlers for each + process you're attached to. + + This makes coding the event handlers much easier, because each instance + will only "know" about one process. So you can code your event handler as + if only one process was being debugged, but your debugger can attach to + multiple processes. + + Example:: + from winappdbg import Debug, EventHandler, EventSift + + # This class was written assuming only one process is attached. + # If you used it directly it would break when attaching to another + # process, or when a child process is spawned. + class MyEventHandler (EventHandler): + + def create_process(self, event): + self.first = True + self.name = event.get_process().get_filename() + print "Attached to %s" % self.name + + def breakpoint(self, event): + if self.first: + self.first = False + print "First breakpoint reached at %s" % self.name + + def exit_process(self, event): + print "Detached from %s" % self.name + + # Now when debugging we use the EventSift to be able to work with + # multiple processes while keeping our code simple. :) + if __name__ == "__main__": + handler = EventSift(MyEventHandler) + #handler = MyEventHandler() # try uncommenting this line... + with Debug(handler) as debug: + debug.execl("calc.exe") + debug.execl("notepad.exe") + debug.execl("charmap.exe") + debug.loop() + + Subclasses of C{EventSift} can prevent specific event types from + being forwarded by simply defining a method for it. That means your + subclass can handle some event types globally while letting other types + be handled on per-process basis. To forward events manually you can + call C{self.event(event)}. + + Example:: + class MySift (EventSift): + + # Don't forward this event. + def debug_control_c(self, event): + pass + + # Handle this event globally without forwarding it. + def output_string(self, event): + print "Debug string: %s" % event.get_debug_string() + + # Handle this event globally and then forward it. + def create_process(self, event): + print "New process created, PID: %d" % event.get_pid() + return self.event(event) + + # All other events will be forwarded. + + Note that overriding the C{event} method would cause no events to be + forwarded at all. To prevent this, call the superclass implementation. + + Example:: + + def we_want_to_forward_this_event(event): + "Use whatever logic you want here..." + # (...return True or False...) + + class MySift (EventSift): + + def event(self, event): + + # If the event matches some custom criteria... + if we_want_to_forward_this_event(event): + + # Forward it. + return super(MySift, self).event(event) + + # Otherwise, don't. + + @type cls: class + @ivar cls: + Event handler class. There will be one instance of this class + per debugged process in the L{forward} dictionary. + + @type argv: list + @ivar argv: + Positional arguments to pass to the constructor of L{cls}. + + @type argd: list + @ivar argd: + Keyword arguments to pass to the constructor of L{cls}. + + @type forward: dict + @ivar forward: + Dictionary that maps each debugged process ID to an instance of L{cls}. + """ + + def __init__(self, cls, *argv, **argd): + """ + Maintains an instance of your event handler for each process being + debugged, and forwards the events of each process to each corresponding + instance. + + @warn: If you subclass L{EventSift} and reimplement this method, + don't forget to call the superclass constructor! + + @see: L{event} + + @type cls: class + @param cls: Event handler class. This must be the class itself, not an + instance! All additional arguments passed to the constructor of + the event forwarder will be passed on to the constructor of this + class as well. + """ + self.cls = cls + self.argv = argv + self.argd = argd + self.forward = dict() + super(EventSift, self).__init__() + + # XXX HORRIBLE HACK + # This makes apiHooks work in the inner handlers. + def __call__(self, event): + try: + eventCode = event.get_event_code() + if eventCode in (win32.LOAD_DLL_DEBUG_EVENT, + win32.LOAD_DLL_DEBUG_EVENT): + pid = event.get_pid() + handler = self.forward.get(pid, None) + if handler is None: + handler = self.cls(*self.argv, **self.argd) + self.forward[pid] = handler + if isinstance(handler, EventHandler): + if eventCode == win32.LOAD_DLL_DEBUG_EVENT: + handler.__EventHandler_hook_dll(event) + else: + handler.__EventHandler_unhook_dll(event) + finally: + return super(EventSift, self).__call__(event) + + def event(self, event): + """ + Forwards events to the corresponding instance of your event handler + for this process. + + If you subclass L{EventSift} and reimplement this method, no event + will be forwarded at all unless you call the superclass implementation. + + If your filtering is based on the event type, there's a much easier way + to do it: just implement a handler for it. + """ + eventCode = event.get_event_code() + pid = event.get_pid() + handler = self.forward.get(pid, None) + if handler is None: + handler = self.cls(*self.argv, **self.argd) + if eventCode != win32.EXIT_PROCESS_DEBUG_EVENT: + self.forward[pid] = handler + elif eventCode == win32.EXIT_PROCESS_DEBUG_EVENT: + del self.forward[pid] + return handler(event) + +#============================================================================== + +class EventDispatcher (object): + """ + Implements debug event dispatching capabilities. + + @group Debugging events: + get_event_handler, set_event_handler, get_handler_method + """ + + # Maps event code constants to the names of the pre-notify routines. + # These routines are called BEFORE the user-defined handlers. + # Unknown codes are ignored. + __preEventNotifyCallbackName = { + win32.CREATE_THREAD_DEBUG_EVENT : '_notify_create_thread', + win32.CREATE_PROCESS_DEBUG_EVENT : '_notify_create_process', + win32.LOAD_DLL_DEBUG_EVENT : '_notify_load_dll', + } + + # Maps event code constants to the names of the post-notify routines. + # These routines are called AFTER the user-defined handlers. + # Unknown codes are ignored. + __postEventNotifyCallbackName = { + win32.EXIT_THREAD_DEBUG_EVENT : '_notify_exit_thread', + win32.EXIT_PROCESS_DEBUG_EVENT : '_notify_exit_process', + win32.UNLOAD_DLL_DEBUG_EVENT : '_notify_unload_dll', + win32.RIP_EVENT : '_notify_rip', + } + + # Maps exception code constants to the names of the pre-notify routines. + # These routines are called BEFORE the user-defined handlers. + # Unknown codes are ignored. + __preExceptionNotifyCallbackName = { + win32.EXCEPTION_BREAKPOINT : '_notify_breakpoint', + win32.EXCEPTION_WX86_BREAKPOINT : '_notify_breakpoint', + win32.EXCEPTION_SINGLE_STEP : '_notify_single_step', + win32.EXCEPTION_GUARD_PAGE : '_notify_guard_page', + win32.DBG_CONTROL_C : '_notify_debug_control_c', + win32.MS_VC_EXCEPTION : '_notify_ms_vc_exception', + } + + # Maps exception code constants to the names of the post-notify routines. + # These routines are called AFTER the user-defined handlers. + # Unknown codes are ignored. + __postExceptionNotifyCallbackName = { + } + + def __init__(self, eventHandler = None): + """ + Event dispatcher. + + @type eventHandler: L{EventHandler} + @param eventHandler: (Optional) User-defined event handler. + + @raise TypeError: The event handler is of an incorrect type. + + @note: The L{eventHandler} parameter may be any callable Python object + (for example a function, or an instance method). + However you'll probably find it more convenient to use an instance + of a subclass of L{EventHandler} here. + """ + self.set_event_handler(eventHandler) + + def get_event_handler(self): + """ + Get the event handler. + + @see: L{set_event_handler} + + @rtype: L{EventHandler} + @return: Current event handler object, or C{None}. + """ + return self.__eventHandler + + def set_event_handler(self, eventHandler): + """ + Set the event handler. + + @warn: This is normally not needed. Use with care! + + @type eventHandler: L{EventHandler} + @param eventHandler: New event handler object, or C{None}. + + @rtype: L{EventHandler} + @return: Previous event handler object, or C{None}. + + @raise TypeError: The event handler is of an incorrect type. + + @note: The L{eventHandler} parameter may be any callable Python object + (for example a function, or an instance method). + However you'll probably find it more convenient to use an instance + of a subclass of L{EventHandler} here. + """ + if eventHandler is not None and not callable(eventHandler): + raise TypeError("Event handler must be a callable object") + try: + wrong_type = issubclass(eventHandler, EventHandler) + except TypeError: + wrong_type = False + if wrong_type: + classname = str(eventHandler) + msg = "Event handler must be an instance of class %s" + msg += "rather than the %s class itself. (Missing parens?)" + msg = msg % (classname, classname) + raise TypeError(msg) + try: + previous = self.__eventHandler + except AttributeError: + previous = None + self.__eventHandler = eventHandler + return previous + + @staticmethod + def get_handler_method(eventHandler, event, fallback=None): + """ + Retrieves the appropriate callback method from an L{EventHandler} + instance for the given L{Event} object. + + @type eventHandler: L{EventHandler} + @param eventHandler: + Event handler object whose methods we are examining. + + @type event: L{Event} + @param event: Debugging event to be handled. + + @type fallback: callable + @param fallback: (Optional) If no suitable method is found in the + L{EventHandler} instance, return this value. + + @rtype: callable + @return: Bound method that will handle the debugging event. + Returns C{None} if no such method is defined. + """ + eventCode = event.get_event_code() + method = getattr(eventHandler, 'event', fallback) + if eventCode == win32.EXCEPTION_DEBUG_EVENT: + method = getattr(eventHandler, 'exception', method) + method = getattr(eventHandler, event.eventMethod, method) + return method + + def dispatch(self, event): + """ + Sends event notifications to the L{Debug} object and + the L{EventHandler} object provided by the user. + + The L{Debug} object will forward the notifications to it's contained + snapshot objects (L{System}, L{Process}, L{Thread} and L{Module}) when + appropriate. + + @warning: This method is called automatically from L{Debug.dispatch}. + + @see: L{Debug.cont}, L{Debug.loop}, L{Debug.wait} + + @type event: L{Event} + @param event: Event object passed to L{Debug.dispatch}. + + @raise WindowsError: Raises an exception on error. + """ + returnValue = None + bCallHandler = True + pre_handler = None + post_handler = None + eventCode = event.get_event_code() + + # Get the pre and post notification methods for exceptions. + # If not found, the following steps take care of that. + if eventCode == win32.EXCEPTION_DEBUG_EVENT: + exceptionCode = event.get_exception_code() + pre_name = self.__preExceptionNotifyCallbackName.get( + exceptionCode, None) + post_name = self.__postExceptionNotifyCallbackName.get( + exceptionCode, None) + if pre_name is not None: + pre_handler = getattr(self, pre_name, None) + if post_name is not None: + post_handler = getattr(self, post_name, None) + + # Get the pre notification method for all other events. + # This includes the exception event if no notify method was found + # for this exception code. + if pre_handler is None: + pre_name = self.__preEventNotifyCallbackName.get(eventCode, None) + if pre_name is not None: + pre_handler = getattr(self, pre_name, pre_handler) + + # Get the post notification method for all other events. + # This includes the exception event if no notify method was found + # for this exception code. + if post_handler is None: + post_name = self.__postEventNotifyCallbackName.get(eventCode, None) + if post_name is not None: + post_handler = getattr(self, post_name, post_handler) + + # Call the pre-notify method only if it was defined. + # If an exception is raised don't call the other methods. + if pre_handler is not None: + bCallHandler = pre_handler(event) + + # Call the user-defined event handler only if the pre-notify + # method was not defined, or was and it returned True. + try: + if bCallHandler and self.__eventHandler is not None: + try: + returnValue = self.__eventHandler(event) + except Exception: + e = sys.exc_info()[1] + msg = ("Event handler pre-callback %r" + " raised an exception: %s") + msg = msg % (self.__eventHandler, traceback.format_exc(e)) + warnings.warn(msg, EventCallbackWarning) + returnValue = None + + # Call the post-notify method if defined, even if an exception is + # raised by the user-defined event handler. + finally: + if post_handler is not None: + post_handler(event) + + # Return the value from the call to the user-defined event handler. + # If not defined return None. + return returnValue diff --git a/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/interactive.py b/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/interactive.py new file mode 100644 index 00000000..f14883a2 --- /dev/null +++ b/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/interactive.py @@ -0,0 +1,2281 @@ +#!~/.wine/drive_c/Python25/python.exe +# -*- coding: utf-8 -*- + +# Acknowledgements: +# Nicolas Economou, for his command line debugger on which this is inspired. +# http://tinyurl.com/nicolaseconomou + +# Copyright (c) 2009-2014, Mario Vilas +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright +# notice,this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# * Neither the name of the copyright holder nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. + +""" +Interactive debugging console. + +@group Debugging: + ConsoleDebugger + +@group Exceptions: + CmdError +""" + +from __future__ import with_statement + +__revision__ = "$Id$" + +__all__ = [ 'ConsoleDebugger', 'CmdError' ] + +# TODO document this module with docstrings. +# TODO command to set a last error breakpoint. +# TODO command to show available plugins. + +from winappdbg import win32 +from winappdbg import compat +from winappdbg.system import System +from winappdbg.util import PathOperations +from winappdbg.event import EventHandler, NoEvent +from winappdbg.textio import HexInput, HexOutput, HexDump, CrashDump, DebugLog + +import os +import sys +import code +import time +import warnings +import traceback + +# too many variables named "cmd" to have a module by the same name :P +from cmd import Cmd + +# lazy imports +readline = None + +#============================================================================== + +class DummyEvent (NoEvent): + "Dummy event object used internally by L{ConsoleDebugger}." + + def get_pid(self): + return self._pid + + def get_tid(self): + return self._tid + + def get_process(self): + return self._process + + def get_thread(self): + return self._thread + +#============================================================================== + +class CmdError (Exception): + """ + Exception raised when a command parsing error occurs. + Used internally by L{ConsoleDebugger}. + """ + +#============================================================================== + +class ConsoleDebugger (Cmd, EventHandler): + """ + Interactive console debugger. + + @see: L{Debug.interactive} + """ + +#------------------------------------------------------------------------------ +# Class variables + + # Exception to raise when an error occurs executing a command. + command_error_exception = CmdError + + # Milliseconds to wait for debug events in the main loop. + dwMilliseconds = 100 + + # History file name. + history_file = '.winappdbg_history' + + # Confirm before quitting? + confirm_quit = True + + # Valid plugin name characters. + valid_plugin_name_chars = 'ABCDEFGHIJKLMNOPQRSTUVWXY' \ + 'abcdefghijklmnopqrstuvwxy' \ + '012345678' \ + '_' + + # Names of the registers. + segment_names = ( 'cs', 'ds', 'es', 'fs', 'gs' ) + + register_alias_64_to_32 = { + 'eax':'Rax', 'ebx':'Rbx', 'ecx':'Rcx', 'edx':'Rdx', + 'eip':'Rip', 'ebp':'Rbp', 'esp':'Rsp', 'esi':'Rsi', 'edi':'Rdi' + } + register_alias_64_to_16 = { 'ax':'Rax', 'bx':'Rbx', 'cx':'Rcx', 'dx':'Rdx' } + register_alias_64_to_8_low = { 'al':'Rax', 'bl':'Rbx', 'cl':'Rcx', 'dl':'Rdx' } + register_alias_64_to_8_high = { 'ah':'Rax', 'bh':'Rbx', 'ch':'Rcx', 'dh':'Rdx' } + register_alias_32_to_16 = { 'ax':'Eax', 'bx':'Ebx', 'cx':'Ecx', 'dx':'Edx' } + register_alias_32_to_8_low = { 'al':'Eax', 'bl':'Ebx', 'cl':'Ecx', 'dl':'Edx' } + register_alias_32_to_8_high = { 'ah':'Eax', 'bh':'Ebx', 'ch':'Ecx', 'dh':'Edx' } + + register_aliases_full_32 = list(segment_names) + register_aliases_full_32.extend(compat.iterkeys(register_alias_32_to_16)) + register_aliases_full_32.extend(compat.iterkeys(register_alias_32_to_8_low)) + register_aliases_full_32.extend(compat.iterkeys(register_alias_32_to_8_high)) + register_aliases_full_32 = tuple(register_aliases_full_32) + + register_aliases_full_64 = list(segment_names) + register_aliases_full_64.extend(compat.iterkeys(register_alias_64_to_32)) + register_aliases_full_64.extend(compat.iterkeys(register_alias_64_to_16)) + register_aliases_full_64.extend(compat.iterkeys(register_alias_64_to_8_low)) + register_aliases_full_64.extend(compat.iterkeys(register_alias_64_to_8_high)) + register_aliases_full_64 = tuple(register_aliases_full_64) + + # Names of the control flow instructions. + jump_instructions = ( + 'jmp', 'jecxz', 'jcxz', + 'ja', 'jnbe', 'jae', 'jnb', 'jb', 'jnae', 'jbe', 'jna', 'jc', 'je', + 'jz', 'jnc', 'jne', 'jnz', 'jnp', 'jpo', 'jp', 'jpe', 'jg', 'jnle', + 'jge', 'jnl', 'jl', 'jnge', 'jle', 'jng', 'jno', 'jns', 'jo', 'js' + ) + call_instructions = ( 'call', 'ret', 'retn' ) + loop_instructions = ( 'loop', 'loopz', 'loopnz', 'loope', 'loopne' ) + control_flow_instructions = call_instructions + loop_instructions + \ + jump_instructions + +#------------------------------------------------------------------------------ +# Instance variables + + def __init__(self): + """ + Interactive console debugger. + + @see: L{Debug.interactive} + """ + Cmd.__init__(self) + EventHandler.__init__(self) + + # Quit the debugger when True. + self.debuggerExit = False + + # Full path to the history file. + self.history_file_full_path = None + + # Last executed command. + self.__lastcmd = "" + +#------------------------------------------------------------------------------ +# Debugger + + # Use this Debug object. + def start_using_debugger(self, debug): + + # Clear the previous Debug object. + self.stop_using_debugger() + + # Keep the Debug object. + self.debug = debug + + # Set ourselves as the event handler for the debugger. + self.prevHandler = debug.set_event_handler(self) + + # Stop using the Debug object given by start_using_debugger(). + # Circular references must be removed, or the destructors never get called. + def stop_using_debugger(self): + if hasattr(self, 'debug'): + debug = self.debug + debug.set_event_handler(self.prevHandler) + del self.prevHandler + del self.debug + return debug + return None + + # Destroy the Debug object. + def destroy_debugger(self, autodetach = True): + debug = self.stop_using_debugger() + if debug is not None: + if not autodetach: + debug.kill_all(bIgnoreExceptions=True) + debug.lastEvent = None + debug.stop() + del debug + + @property + def lastEvent(self): + return self.debug.lastEvent + + def set_fake_last_event(self, process): + if self.lastEvent is None: + self.debug.lastEvent = DummyEvent(self.debug) + self.debug.lastEvent._process = process + self.debug.lastEvent._thread = process.get_thread( + process.get_thread_ids()[0]) + self.debug.lastEvent._pid = process.get_pid() + self.debug.lastEvent._tid = self.lastEvent._thread.get_tid() + +#------------------------------------------------------------------------------ +# Input + +# TODO +# * try to guess breakpoints when insufficient data is given +# * child Cmd instances will have to be used for other prompts, for example +# when assembling or editing memory - it may also be a good idea to think +# if it's possible to make the main Cmd instance also a child, instead of +# the debugger itself - probably the same goes for the EventHandler, maybe +# it can be used as a contained object rather than a parent class. + + # Join a token list into an argument string. + def join_tokens(self, token_list): + return self.debug.system.argv_to_cmdline(token_list) + + # Split an argument string into a token list. + def split_tokens(self, arg, min_count = 0, max_count = None): + token_list = self.debug.system.cmdline_to_argv(arg) + if len(token_list) < min_count: + raise CmdError("missing parameters.") + if max_count and len(token_list) > max_count: + raise CmdError("too many parameters.") + return token_list + + # Token is a thread ID or name. + def input_thread(self, token): + targets = self.input_thread_list( [token] ) + if len(targets) == 0: + raise CmdError("missing thread name or ID") + if len(targets) > 1: + msg = "more than one thread with that name:\n" + for tid in targets: + msg += "\t%d\n" % tid + msg = msg[:-len("\n")] + raise CmdError(msg) + return targets[0] + + # Token list is a list of thread IDs or names. + def input_thread_list(self, token_list): + targets = set() + system = self.debug.system + for token in token_list: + try: + tid = self.input_integer(token) + if not system.has_thread(tid): + raise CmdError("thread not found (%d)" % tid) + targets.add(tid) + except ValueError: + found = set() + for process in system.iter_processes(): + found.update( system.find_threads_by_name(token) ) + if not found: + raise CmdError("thread not found (%s)" % token) + for thread in found: + targets.add( thread.get_tid() ) + targets = list(targets) + targets.sort() + return targets + + # Token is a process ID or name. + def input_process(self, token): + targets = self.input_process_list( [token] ) + if len(targets) == 0: + raise CmdError("missing process name or ID") + if len(targets) > 1: + msg = "more than one process with that name:\n" + for pid in targets: + msg += "\t%d\n" % pid + msg = msg[:-len("\n")] + raise CmdError(msg) + return targets[0] + + # Token list is a list of process IDs or names. + def input_process_list(self, token_list): + targets = set() + system = self.debug.system + for token in token_list: + try: + pid = self.input_integer(token) + if not system.has_process(pid): + raise CmdError("process not found (%d)" % pid) + targets.add(pid) + except ValueError: + found = system.find_processes_by_filename(token) + if not found: + raise CmdError("process not found (%s)" % token) + for (process, _) in found: + targets.add( process.get_pid() ) + targets = list(targets) + targets.sort() + return targets + + # Token is a command line to execute. + def input_command_line(self, command_line): + argv = self.debug.system.cmdline_to_argv(command_line) + if not argv: + raise CmdError("missing command line to execute") + fname = argv[0] + if not os.path.exists(fname): + try: + fname, _ = win32.SearchPath(None, fname, '.exe') + except WindowsError: + raise CmdError("file not found: %s" % fname) + argv[0] = fname + command_line = self.debug.system.argv_to_cmdline(argv) + return command_line + + # Token is an integer. + # Only hexadecimal format is supported. + def input_hexadecimal_integer(self, token): + return int(token, 0x10) + + # Token is an integer. + # It can be in any supported format. + def input_integer(self, token): + return HexInput.integer(token) +## input_integer = input_hexadecimal_integer + + # Token is an address. + # The address can be a integer, a label or a register. + def input_address(self, token, pid = None, tid = None): + address = None + if self.is_register(token): + if tid is None: + if self.lastEvent is None or pid != self.lastEvent.get_pid(): + msg = "can't resolve register (%s) for unknown thread" + raise CmdError(msg % token) + tid = self.lastEvent.get_tid() + address = self.input_register(token, tid) + if address is None: + try: + address = self.input_hexadecimal_integer(token) + except ValueError: + if pid is None: + if self.lastEvent is None: + raise CmdError("no current process set") + process = self.lastEvent.get_process() + elif self.lastEvent is not None and pid == self.lastEvent.get_pid(): + process = self.lastEvent.get_process() + else: + try: + process = self.debug.system.get_process(pid) + except KeyError: + raise CmdError("process not found (%d)" % pid) + try: + address = process.resolve_label(token) + except Exception: + raise CmdError("unknown address (%s)" % token) + return address + + # Token is an address range, or a single address. + # The addresses can be integers, labels or registers. + def input_address_range(self, token_list, pid = None, tid = None): + if len(token_list) == 2: + token_1, token_2 = token_list + address = self.input_address(token_1, pid, tid) + try: + size = self.input_integer(token_2) + except ValueError: + raise CmdError("bad address range: %s %s" % (token_1, token_2)) + elif len(token_list) == 1: + token = token_list[0] + if '-' in token: + try: + token_1, token_2 = token.split('-') + except Exception: + raise CmdError("bad address range: %s" % token) + address = self.input_address(token_1, pid, tid) + size = self.input_address(token_2, pid, tid) - address + else: + address = self.input_address(token, pid, tid) + size = None + return address, size + + # XXX TODO + # Support non-integer registers here. + def is_register(self, token): + if win32.arch == 'i386': + if token in self.register_aliases_full_32: + return True + token = token.title() + for (name, typ) in win32.CONTEXT._fields_: + if name == token: + return win32.sizeof(typ) == win32.sizeof(win32.DWORD) + elif win32.arch == 'amd64': + if token in self.register_aliases_full_64: + return True + token = token.title() + for (name, typ) in win32.CONTEXT._fields_: + if name == token: + return win32.sizeof(typ) == win32.sizeof(win32.DWORD64) + return False + + # The token is a register name. + # Returns None if no register name is matched. + def input_register(self, token, tid = None): + if tid is None: + if self.lastEvent is None: + raise CmdError("no current process set") + thread = self.lastEvent.get_thread() + else: + thread = self.debug.system.get_thread(tid) + ctx = thread.get_context() + + token = token.lower() + title = token.title() + + if title in ctx: + return ctx.get(title) # eax -> Eax + + if ctx.arch == 'i386': + + if token in self.segment_names: + return ctx.get( 'Seg%s' % title ) # cs -> SegCs + + if token in self.register_alias_32_to_16: + return ctx.get( self.register_alias_32_to_16[token] ) & 0xFFFF + + if token in self.register_alias_32_to_8_low: + return ctx.get( self.register_alias_32_to_8_low[token] ) & 0xFF + + if token in self.register_alias_32_to_8_high: + return (ctx.get( self.register_alias_32_to_8_high[token] ) & 0xFF00) >> 8 + + elif ctx.arch == 'amd64': + + if token in self.segment_names: + return ctx.get( 'Seg%s' % title ) # cs -> SegCs + + if token in self.register_alias_64_to_32: + return ctx.get( self.register_alias_64_to_32[token] ) & 0xFFFFFFFF + + if token in self.register_alias_64_to_16: + return ctx.get( self.register_alias_64_to_16[token] ) & 0xFFFF + + if token in self.register_alias_64_to_8_low: + return ctx.get( self.register_alias_64_to_8_low[token] ) & 0xFF + + if token in self.register_alias_64_to_8_high: + return (ctx.get( self.register_alias_64_to_8_high[token] ) & 0xFF00) >> 8 + + return None + + # Token list contains an address or address range. + # The prefix is also parsed looking for process and thread IDs. + def input_full_address_range(self, token_list): + pid, tid = self.get_process_and_thread_ids_from_prefix() + address, size = self.input_address_range(token_list, pid, tid) + return pid, tid, address, size + + # Token list contains a breakpoint. + def input_breakpoint(self, token_list): + pid, tid, address, size = self.input_full_address_range(token_list) + if not self.debug.is_debugee(pid): + raise CmdError("target process is not being debugged") + return pid, tid, address, size + + # Token list contains a memory address, and optional size and process. + # Sets the results as the default for the next display command. + def input_display(self, token_list, default_size = 64): + pid, tid, address, size = self.input_full_address_range(token_list) + if not size: + size = default_size + next_address = HexOutput.integer(address + size) + self.default_display_target = next_address + return pid, tid, address, size + +#------------------------------------------------------------------------------ +# Output + + # Tell the user a module was loaded. + def print_module_load(self, event): + mod = event.get_module() + base = mod.get_base() + name = mod.get_filename() + if not name: + name = '' + msg = "Loaded module (%s) %s" + msg = msg % (HexDump.address(base), name) + print(msg) + + # Tell the user a module was unloaded. + def print_module_unload(self, event): + mod = event.get_module() + base = mod.get_base() + name = mod.get_filename() + if not name: + name = '' + msg = "Unloaded module (%s) %s" + msg = msg % (HexDump.address(base), name) + print(msg) + + # Tell the user a process was started. + def print_process_start(self, event): + pid = event.get_pid() + start = event.get_start_address() + if start: + start = HexOutput.address(start) + print("Started process %d at %s" % (pid, start)) + else: + print("Attached to process %d" % pid) + + # Tell the user a thread was started. + def print_thread_start(self, event): + tid = event.get_tid() + start = event.get_start_address() + if start: + with warnings.catch_warnings(): + warnings.simplefilter("ignore") + start = event.get_process().get_label_at_address(start) + print("Started thread %d at %s" % (tid, start)) + else: + print("Attached to thread %d" % tid) + + # Tell the user a process has finished. + def print_process_end(self, event): + pid = event.get_pid() + code = event.get_exit_code() + print("Process %d terminated, exit code %d" % (pid, code)) + + # Tell the user a thread has finished. + def print_thread_end(self, event): + tid = event.get_tid() + code = event.get_exit_code() + print("Thread %d terminated, exit code %d" % (tid, code)) + + # Print(debug strings. + def print_debug_string(self, event): + tid = event.get_tid() + string = event.get_debug_string() + print("Thread %d says: %r" % (tid, string)) + + # Inform the user of any other debugging event. + def print_event(self, event): + code = HexDump.integer( event.get_event_code() ) + name = event.get_event_name() + desc = event.get_event_description() + if code in desc: + print('') + print("%s: %s" % (name, desc)) + else: + print('') + print("%s (%s): %s" % (name, code, desc)) + self.print_event_location(event) + + # Stop on exceptions and prompt for commands. + def print_exception(self, event): + address = HexDump.address( event.get_exception_address() ) + code = HexDump.integer( event.get_exception_code() ) + desc = event.get_exception_description() + if event.is_first_chance(): + chance = 'first' + else: + chance = 'second' + if code in desc: + msg = "%s at address %s (%s chance)" % (desc, address, chance) + else: + msg = "%s (%s) at address %s (%s chance)" % (desc, code, address, chance) + print('') + print(msg) + self.print_event_location(event) + + # Show the current location in the code. + def print_event_location(self, event): + process = event.get_process() + thread = event.get_thread() + self.print_current_location(process, thread) + + # Show the current location in the code. + def print_breakpoint_location(self, event): + process = event.get_process() + thread = event.get_thread() + pc = event.get_exception_address() + self.print_current_location(process, thread, pc) + + # Show the current location in any process and thread. + def print_current_location(self, process = None, thread = None, pc = None): + if not process: + if self.lastEvent is None: + raise CmdError("no current process set") + process = self.lastEvent.get_process() + if not thread: + if self.lastEvent is None: + raise CmdError("no current process set") + thread = self.lastEvent.get_thread() + thread.suspend() + try: + if pc is None: + pc = thread.get_pc() + ctx = thread.get_context() + finally: + thread.resume() + label = process.get_label_at_address(pc) + try: + disasm = process.disassemble(pc, 15) + except WindowsError: + disasm = None + except NotImplementedError: + disasm = None + print('') + print(CrashDump.dump_registers(ctx),) + print("%s:" % label) + if disasm: + print(CrashDump.dump_code_line(disasm[0], pc, bShowDump = True)) + else: + try: + data = process.peek(pc, 15) + except Exception: + data = None + if data: + print('%s: %s' % (HexDump.address(pc), HexDump.hexblock_byte(data))) + else: + print('%s: ???' % HexDump.address(pc)) + + # Display memory contents using a given method. + def print_memory_display(self, arg, method): + if not arg: + arg = self.default_display_target + token_list = self.split_tokens(arg, 1, 2) + pid, tid, address, size = self.input_display(token_list) + label = self.get_process(pid).get_label_at_address(address) + data = self.read_memory(address, size, pid) + if data: + print("%s:" % label) + print(method(data, address),) + +#------------------------------------------------------------------------------ +# Debugging + + # Get the process ID from the prefix or the last event. + def get_process_id_from_prefix(self): + if self.cmdprefix: + pid = self.input_process(self.cmdprefix) + else: + if self.lastEvent is None: + raise CmdError("no current process set") + pid = self.lastEvent.get_pid() + return pid + + # Get the thread ID from the prefix or the last event. + def get_thread_id_from_prefix(self): + if self.cmdprefix: + tid = self.input_thread(self.cmdprefix) + else: + if self.lastEvent is None: + raise CmdError("no current process set") + tid = self.lastEvent.get_tid() + return tid + + # Get the process from the prefix or the last event. + def get_process_from_prefix(self): + pid = self.get_process_id_from_prefix() + return self.get_process(pid) + + # Get the thread from the prefix or the last event. + def get_thread_from_prefix(self): + tid = self.get_thread_id_from_prefix() + return self.get_thread(tid) + + # Get the process and thread IDs from the prefix or the last event. + def get_process_and_thread_ids_from_prefix(self): + if self.cmdprefix: + try: + pid = self.input_process(self.cmdprefix) + tid = None + except CmdError: + try: + tid = self.input_thread(self.cmdprefix) + pid = self.debug.system.get_thread(tid).get_pid() + except CmdError: + msg = "unknown process or thread (%s)" % self.cmdprefix + raise CmdError(msg) + else: + if self.lastEvent is None: + raise CmdError("no current process set") + pid = self.lastEvent.get_pid() + tid = self.lastEvent.get_tid() + return pid, tid + + # Get the process and thread from the prefix or the last event. + def get_process_and_thread_from_prefix(self): + pid, tid = self.get_process_and_thread_ids_from_prefix() + process = self.get_process(pid) + thread = self.get_thread(tid) + return process, thread + + # Get the process object. + def get_process(self, pid = None): + if pid is None: + if self.lastEvent is None: + raise CmdError("no current process set") + process = self.lastEvent.get_process() + elif self.lastEvent is not None and pid == self.lastEvent.get_pid(): + process = self.lastEvent.get_process() + else: + try: + process = self.debug.system.get_process(pid) + except KeyError: + raise CmdError("process not found (%d)" % pid) + return process + + # Get the thread object. + def get_thread(self, tid = None): + if tid is None: + if self.lastEvent is None: + raise CmdError("no current process set") + thread = self.lastEvent.get_thread() + elif self.lastEvent is not None and tid == self.lastEvent.get_tid(): + thread = self.lastEvent.get_thread() + else: + try: + thread = self.debug.system.get_thread(tid) + except KeyError: + raise CmdError("thread not found (%d)" % tid) + return thread + + # Read the process memory. + def read_memory(self, address, size, pid = None): + process = self.get_process(pid) + try: + data = process.peek(address, size) + except WindowsError: + orig_address = HexOutput.integer(address) + next_address = HexOutput.integer(address + size) + msg = "error reading process %d, from %s to %s (%d bytes)" + msg = msg % (pid, orig_address, next_address, size) + raise CmdError(msg) + return data + + # Write the process memory. + def write_memory(self, address, data, pid = None): + process = self.get_process(pid) + try: + process.write(address, data) + except WindowsError: + size = len(data) + orig_address = HexOutput.integer(address) + next_address = HexOutput.integer(address + size) + msg = "error reading process %d, from %s to %s (%d bytes)" + msg = msg % (pid, orig_address, next_address, size) + raise CmdError(msg) + + # Change a register value. + def change_register(self, register, value, tid = None): + + # Get the thread. + if tid is None: + if self.lastEvent is None: + raise CmdError("no current process set") + thread = self.lastEvent.get_thread() + else: + try: + thread = self.debug.system.get_thread(tid) + except KeyError: + raise CmdError("thread not found (%d)" % tid) + + # Convert the value to integer type. + try: + value = self.input_integer(value) + except ValueError: + pid = thread.get_pid() + value = self.input_address(value, pid, tid) + + # Suspend the thread. + # The finally clause ensures the thread is resumed before returning. + thread.suspend() + try: + + # Get the current context. + ctx = thread.get_context() + + # Register name matching is case insensitive. + register = register.lower() + + # Integer 32 bits registers. + if register in self.register_names: + register = register.title() # eax -> Eax + + # Segment (16 bit) registers. + if register in self.segment_names: + register = 'Seg%s' % register.title() # cs -> SegCs + value = value & 0x0000FFFF + + # Integer 16 bits registers. + if register in self.register_alias_16: + register = self.register_alias_16[register] + previous = ctx.get(register) & 0xFFFF0000 + value = (value & 0x0000FFFF) | previous + + # Integer 8 bits registers (low part). + if register in self.register_alias_8_low: + register = self.register_alias_8_low[register] + previous = ctx.get(register) % 0xFFFFFF00 + value = (value & 0x000000FF) | previous + + # Integer 8 bits registers (high part). + if register in self.register_alias_8_high: + register = self.register_alias_8_high[register] + previous = ctx.get(register) % 0xFFFF00FF + value = ((value & 0x000000FF) << 8) | previous + + # Set the new context. + ctx.__setitem__(register, value) + thread.set_context(ctx) + + # Resume the thread. + finally: + thread.resume() + + # Very crude way to find data within the process memory. + # TODO: Perhaps pfind.py can be integrated here instead. + def find_in_memory(self, query, process): + for mbi in process.get_memory_map(): + if mbi.State != win32.MEM_COMMIT or mbi.Protect & win32.PAGE_GUARD: + continue + address = mbi.BaseAddress + size = mbi.RegionSize + try: + data = process.read(address, size) + except WindowsError: + msg = "*** Warning: read error at address %s" + msg = msg % HexDump.address(address) + print(msg) + width = min(len(query), 16) + p = data.find(query) + while p >= 0: + q = p + len(query) + d = data[ p : min(q, p + width) ] + h = HexDump.hexline(d, width = width) + a = HexDump.address(address + p) + print("%s: %s" % (a, h)) + p = data.find(query, q) + + # Kill a process. + def kill_process(self, pid): + process = self.debug.system.get_process(pid) + try: + process.kill() + if self.debug.is_debugee(pid): + self.debug.detach(pid) + print("Killed process (%d)" % pid) + except Exception: + print("Error trying to kill process (%d)" % pid) + + # Kill a thread. + def kill_thread(self, tid): + thread = self.debug.system.get_thread(tid) + try: + thread.kill() + process = thread.get_process() + pid = process.get_pid() + if self.debug.is_debugee(pid) and not process.is_alive(): + self.debug.detach(pid) + print("Killed thread (%d)" % tid) + except Exception: + print("Error trying to kill thread (%d)" % tid) + +#------------------------------------------------------------------------------ +# Command prompt input + + # Prompt the user for commands. + def prompt_user(self): + while not self.debuggerExit: + try: + self.cmdloop() + break + except CmdError: + e = sys.exc_info()[1] + print("*** Error: %s" % str(e)) + except Exception: + traceback.print_exc() +## self.debuggerExit = True + + # Prompt the user for a YES/NO kind of question. + def ask_user(self, msg, prompt = "Are you sure? (y/N): "): + print(msg) + answer = raw_input(prompt) + answer = answer.strip()[:1].lower() + return answer == 'y' + + # Autocomplete the given command when not ambiguous. + # Convert it to lowercase (so commands are seen as case insensitive). + def autocomplete(self, cmd): + cmd = cmd.lower() + completed = self.completenames(cmd) + if len(completed) == 1: + cmd = completed[0] + return cmd + + # Get the help text for the given list of command methods. + # Note it's NOT a list of commands, but a list of actual method names. + # Each line of text is stripped and all lines are sorted. + # Repeated text lines are removed. + # Returns a single, possibly multiline, string. + def get_help(self, commands): + msg = set() + for name in commands: + if name != 'do_help': + try: + doc = getattr(self, name).__doc__.split('\n') + except Exception: + return ( "No help available when Python" + " is run with the -OO switch." ) + for x in doc: + x = x.strip() + if x: + msg.add(' %s' % x) + msg = list(msg) + msg.sort() + msg = '\n'.join(msg) + return msg + + # Parse the prefix and remove it from the command line. + def split_prefix(self, line): + prefix = None + if line.startswith('~'): + pos = line.find(' ') + if pos == 1: + pos = line.find(' ', pos + 1) + if not pos < 0: + prefix = line[ 1 : pos ].strip() + line = line[ pos : ].strip() + return prefix, line + +#------------------------------------------------------------------------------ +# Cmd() hacks + + # Header for help page. + doc_header = 'Available commands (type help * or help )' + +## # Read and write directly to stdin and stdout. +## # This prevents the use of raw_input and print. +## use_rawinput = False + + @property + def prompt(self): + if self.lastEvent: + pid = self.lastEvent.get_pid() + tid = self.lastEvent.get_tid() + if self.debug.is_debugee(pid): +## return '~%d(%d)> ' % (tid, pid) + return '%d:%d> ' % (pid, tid) + return '> ' + + # Return a sorted list of method names. + # Only returns the methods that implement commands. + def get_names(self): + names = Cmd.get_names(self) + names = [ x for x in set(names) if x.startswith('do_') ] + names.sort() + return names + + # Automatically autocomplete commands, even if Tab wasn't pressed. + # The prefix is removed from the line and stored in self.cmdprefix. + # Also implement the commands that consist of a symbol character. + def parseline(self, line): + self.cmdprefix, line = self.split_prefix(line) + line = line.strip() + if line: + if line[0] == '.': + line = 'plugin ' + line[1:] + elif line[0] == '#': + line = 'python ' + line[1:] + cmd, arg, line = Cmd.parseline(self, line) + if cmd: + cmd = self.autocomplete(cmd) + return cmd, arg, line + +## # Don't repeat the last executed command. +## def emptyline(self): +## pass + + # Reset the defaults for some commands. + def preloop(self): + self.default_disasm_target = 'eip' + self.default_display_target = 'eip' + self.last_display_command = self.do_db + + # Put the prefix back in the command line. + def get_lastcmd(self): + return self.__lastcmd + def set_lastcmd(self, lastcmd): + if self.cmdprefix: + lastcmd = '~%s %s' % (self.cmdprefix, lastcmd) + self.__lastcmd = lastcmd + lastcmd = property(get_lastcmd, set_lastcmd) + + # Quit the command prompt if the debuggerExit flag is on. + def postcmd(self, stop, line): + return stop or self.debuggerExit + +#------------------------------------------------------------------------------ +# Commands + + # Each command contains a docstring with it's help text. + # The help text consist of independent text lines, + # where each line shows a command and it's parameters. + # Each command method has the help message for itself and all it's aliases. + # Only the docstring for the "help" command is shown as-is. + + # NOTE: Command methods MUST be all lowercase! + + # Extended help command. + def do_help(self, arg): + """ + ? - show the list of available commands + ? * - show help for all commands + ? [command...] - show help for the given command(s) + help - show the list of available commands + help * - show help for all commands + help [command...] - show help for the given command(s) + """ + if not arg: + Cmd.do_help(self, arg) + elif arg in ('?', 'help'): + # An easter egg :) + print(" Help! I need somebody...") + print(" Help! Not just anybody...") + print(" Help! You know, I need someone...") + print(" Heeelp!") + else: + if arg == '*': + commands = self.get_names() + commands = [ x for x in commands if x.startswith('do_') ] + else: + commands = set() + for x in arg.split(' '): + x = x.strip() + if x: + for n in self.completenames(x): + commands.add( 'do_%s' % n ) + commands = list(commands) + commands.sort() + print(self.get_help(commands)) + + def do_shell(self, arg): + """ + ! - spawn a system shell + shell - spawn a system shell + ! [arguments...] - execute a single shell command + shell [arguments...] - execute a single shell command + """ + if self.cmdprefix: + raise CmdError("prefix not allowed") + + # Try to use the environment to locate cmd.exe. + # If not found, it's usually OK to just use the filename, + # since cmd.exe is one of those "magic" programs that + # can be automatically found by CreateProcess. + shell = os.getenv('ComSpec', 'cmd.exe') + + # When given a command, run it and return. + # When no command is given, spawn a shell. + if arg: + arg = '%s /c %s' % (shell, arg) + else: + arg = shell + process = self.debug.system.start_process(arg, bConsole = True) + process.wait() + + # This hack fixes a bug in Python, the interpreter console is closing the + # stdin pipe when calling the exit() function (Ctrl+Z seems to work fine). + class _PythonExit(object): + def __repr__(self): + return "Use exit() or Ctrl-Z plus Return to exit" + def __call__(self): + raise SystemExit() + _python_exit = _PythonExit() + + # Spawns a Python shell with some handy local variables and the winappdbg + # module already imported. Also the console banner is improved. + def _spawn_python_shell(self, arg): + import winappdbg + banner = ('Python %s on %s\nType "help", "copyright", ' + '"credits" or "license" for more information.\n') + platform = winappdbg.version.lower() + platform = 'WinAppDbg %s' % platform + banner = banner % (sys.version, platform) + local = {} + local.update(__builtins__) + local.update({ + '__name__' : '__console__', + '__doc__' : None, + 'exit' : self._python_exit, + 'self' : self, + 'arg' : arg, + 'winappdbg' : winappdbg, + }) + try: + code.interact(banner=banner, local=local) + except SystemExit: + # We need to catch it so it doesn't kill our program. + pass + + def do_python(self, arg): + """ + # - spawn a python interpreter + python - spawn a python interpreter + # - execute a single python statement + python - execute a single python statement + """ + if self.cmdprefix: + raise CmdError("prefix not allowed") + + # When given a Python statement, execute it directly. + if arg: + try: + compat.exec_(arg, globals(), locals()) + except Exception: + traceback.print_exc() + + # When no statement is given, spawn a Python interpreter. + else: + try: + self._spawn_python_shell(arg) + except Exception: + e = sys.exc_info()[1] + raise CmdError( + "unhandled exception when running Python console: %s" % e) + + # The plugins interface is quite simple. + # + # Just place a .py file with the plugin name in the "plugins" folder, + # for example "do_example.py" would implement the "example" command. + # + # The plugin must have a function named "do", which implements the + # command functionality exactly like the do_* methods of Cmd instances. + # + # The docstring for the "do" function will be parsed exactly like + # one of the debugger's commands - that is, each line is treated + # independently. + # + def do_plugin(self, arg): + """ + [~prefix] . [arguments] - run a plugin command + [~prefix] plugin [arguments] - run a plugin command + """ + pos = arg.find(' ') + if pos < 0: + name = arg + arg = '' + else: + name = arg[:pos] + arg = arg[pos:].strip() + if not name: + raise CmdError("missing plugin name") + for c in name: + if c not in self.valid_plugin_name_chars: + raise CmdError("invalid plugin name: %r" % name) + name = 'winappdbg.plugins.do_%s' % name + try: + plugin = __import__(name) + components = name.split('.') + for comp in components[1:]: + plugin = getattr(plugin, comp) + reload(plugin) + except ImportError: + raise CmdError("plugin not found: %s" % name) + try: + return plugin.do(self, arg) + except CmdError: + raise + except Exception: + e = sys.exc_info()[1] +## traceback.print_exc(e) # XXX DEBUG + raise CmdError("unhandled exception in plugin: %s" % e) + + def do_quit(self, arg): + """ + quit - close the debugging session + q - close the debugging session + """ + if self.cmdprefix: + raise CmdError("prefix not allowed") + if arg: + raise CmdError("too many arguments") + if self.confirm_quit: + count = self.debug.get_debugee_count() + if count > 0: + if count == 1: + msg = "There's a program still running." + else: + msg = "There are %s programs still running." % count + if not self.ask_user(msg): + return False + self.debuggerExit = True + return True + + do_q = do_quit + + def do_attach(self, arg): + """ + attach [target...] - attach to the given process(es) + """ + if self.cmdprefix: + raise CmdError("prefix not allowed") + targets = self.input_process_list( self.split_tokens(arg, 1) ) + if not targets: + print("Error: missing parameters") + else: + debug = self.debug + for pid in targets: + try: + debug.attach(pid) + print("Attached to process (%d)" % pid) + except Exception: + print("Error: can't attach to process (%d)" % pid) + + def do_detach(self, arg): + """ + [~process] detach - detach from the current process + detach - detach from the current process + detach [target...] - detach from the given process(es) + """ + debug = self.debug + token_list = self.split_tokens(arg) + if self.cmdprefix: + token_list.insert(0, self.cmdprefix) + targets = self.input_process_list(token_list) + if not targets: + if self.lastEvent is None: + raise CmdError("no current process set") + targets = [ self.lastEvent.get_pid() ] + for pid in targets: + try: + debug.detach(pid) + print("Detached from process (%d)" % pid) + except Exception: + print("Error: can't detach from process (%d)" % pid) + + def do_windowed(self, arg): + """ + windowed [arguments...] - run a windowed program for debugging + """ + if self.cmdprefix: + raise CmdError("prefix not allowed") + cmdline = self.input_command_line(arg) + try: + process = self.debug.execl(arg, + bConsole = False, + bFollow = self.options.follow) + print("Spawned process (%d)" % process.get_pid()) + except Exception: + raise CmdError("can't execute") + self.set_fake_last_event(process) + + def do_console(self, arg): + """ + console [arguments...] - run a console program for debugging + """ + if self.cmdprefix: + raise CmdError("prefix not allowed") + cmdline = self.input_command_line(arg) + try: + process = self.debug.execl(arg, + bConsole = True, + bFollow = self.options.follow) + print("Spawned process (%d)" % process.get_pid()) + except Exception: + raise CmdError("can't execute") + self.set_fake_last_event(process) + + def do_continue(self, arg): + """ + continue - continue execution + g - continue execution + go - continue execution + """ + if self.cmdprefix: + raise CmdError("prefix not allowed") + if arg: + raise CmdError("too many arguments") + if self.debug.get_debugee_count() > 0: + return True + + do_g = do_continue + do_go = do_continue + + def do_gh(self, arg): + """ + gh - go with exception handled + """ + if self.cmdprefix: + raise CmdError("prefix not allowed") + if arg: + raise CmdError("too many arguments") + if self.lastEvent: + self.lastEvent.continueStatus = win32.DBG_EXCEPTION_HANDLED + return self.do_go(arg) + + def do_gn(self, arg): + """ + gn - go with exception not handled + """ + if self.cmdprefix: + raise CmdError("prefix not allowed") + if arg: + raise CmdError("too many arguments") + if self.lastEvent: + self.lastEvent.continueStatus = win32.DBG_EXCEPTION_NOT_HANDLED + return self.do_go(arg) + + def do_refresh(self, arg): + """ + refresh - refresh the list of running processes and threads + [~process] refresh - refresh the list of running threads + """ + if arg: + raise CmdError("too many arguments") + if self.cmdprefix: + process = self.get_process_from_prefix() + process.scan() + else: + self.debug.system.scan() + + def do_processlist(self, arg): + """ + pl - show the processes being debugged + processlist - show the processes being debugged + """ + if self.cmdprefix: + raise CmdError("prefix not allowed") + if arg: + raise CmdError("too many arguments") + system = self.debug.system + pid_list = self.debug.get_debugee_pids() + if pid_list: + print("Process ID File name") + for pid in pid_list: + if pid == 0: + filename = "System Idle Process" + elif pid == 4: + filename = "System" + else: + filename = system.get_process(pid).get_filename() + filename = PathOperations.pathname_to_filename(filename) + print("%-12d %s" % (pid, filename)) + + do_pl = do_processlist + + def do_threadlist(self, arg): + """ + tl - show the threads being debugged + threadlist - show the threads being debugged + """ + if arg: + raise CmdError("too many arguments") + if self.cmdprefix: + process = self.get_process_from_prefix() + for thread in process.iter_threads(): + tid = thread.get_tid() + name = thread.get_name() + print("%-12d %s" % (tid, name)) + else: + system = self.debug.system + pid_list = self.debug.get_debugee_pids() + if pid_list: + print("Thread ID Thread name") + for pid in pid_list: + process = system.get_process(pid) + for thread in process.iter_threads(): + tid = thread.get_tid() + name = thread.get_name() + print("%-12d %s" % (tid, name)) + + do_tl = do_threadlist + + def do_kill(self, arg): + """ + [~process] kill - kill a process + [~thread] kill - kill a thread + kill - kill the current process + kill * - kill all debugged processes + kill - kill the given processes and threads + """ + if arg: + if arg == '*': + target_pids = self.debug.get_debugee_pids() + target_tids = list() + else: + target_pids = set() + target_tids = set() + if self.cmdprefix: + pid, tid = self.get_process_and_thread_ids_from_prefix() + if tid is None: + target_tids.add(tid) + else: + target_pids.add(pid) + for token in self.split_tokens(arg): + try: + pid = self.input_process(token) + target_pids.add(pid) + except CmdError: + try: + tid = self.input_process(token) + target_pids.add(pid) + except CmdError: + msg = "unknown process or thread (%s)" % token + raise CmdError(msg) + target_pids = list(target_pids) + target_tids = list(target_tids) + target_pids.sort() + target_tids.sort() + msg = "You are about to kill %d processes and %d threads." + msg = msg % ( len(target_pids), len(target_tids) ) + if self.ask_user(msg): + for pid in target_pids: + self.kill_process(pid) + for tid in target_tids: + self.kill_thread(tid) + else: + if self.cmdprefix: + pid, tid = self.get_process_and_thread_ids_from_prefix() + if tid is None: + if self.lastEvent is not None and pid == self.lastEvent.get_pid(): + msg = "You are about to kill the current process." + else: + msg = "You are about to kill process %d." % pid + if self.ask_user(msg): + self.kill_process(pid) + else: + if self.lastEvent is not None and tid == self.lastEvent.get_tid(): + msg = "You are about to kill the current thread." + else: + msg = "You are about to kill thread %d." % tid + if self.ask_user(msg): + self.kill_thread(tid) + else: + if self.lastEvent is None: + raise CmdError("no current process set") + pid = self.lastEvent.get_pid() + if self.ask_user("You are about to kill the current process."): + self.kill_process(pid) + + # TODO: create hidden threads using undocumented API calls. + def do_modload(self, arg): + """ + [~process] modload - load a DLL module + """ + filename = self.split_tokens(arg, 1, 1)[0] + process = self.get_process_from_prefix() + try: + process.inject_dll(filename, bWait=False) + except RuntimeError: + print("Can't inject module: %r" % filename) + + # TODO: modunload + + def do_stack(self, arg): + """ + [~thread] k - show the stack trace + [~thread] stack - show the stack trace + """ + if arg: # XXX TODO add depth parameter + raise CmdError("too many arguments") + pid, tid = self.get_process_and_thread_ids_from_prefix() + process = self.get_process(pid) + thread = process.get_thread(tid) + try: + stack_trace = thread.get_stack_trace_with_labels() + if stack_trace: + print(CrashDump.dump_stack_trace_with_labels(stack_trace),) + else: + print("No stack trace available for thread (%d)" % tid) + except WindowsError: + print("Can't get stack trace for thread (%d)" % tid) + + do_k = do_stack + + def do_break(self, arg): + """ + break - force a debug break in all debugees + break [process...] - force a debug break + """ + debug = self.debug + system = debug.system + targets = self.input_process_list( self.split_tokens(arg) ) + if not targets: + targets = debug.get_debugee_pids() + targets.sort() + if self.lastEvent: + current = self.lastEvent.get_pid() + else: + current = None + for pid in targets: + if pid != current and debug.is_debugee(pid): + process = system.get_process(pid) + try: + process.debug_break() + except WindowsError: + print("Can't force a debug break on process (%d)") + + def do_step(self, arg): + """ + p - step on the current assembly instruction + next - step on the current assembly instruction + step - step on the current assembly instruction + """ + if self.cmdprefix: + raise CmdError("prefix not allowed") + if self.lastEvent is None: + raise CmdError("no current process set") + if arg: # XXX this check is to be removed + raise CmdError("too many arguments") + pid = self.lastEvent.get_pid() + thread = self.lastEvent.get_thread() + pc = thread.get_pc() + code = thread.disassemble(pc, 16)[0] + size = code[1] + opcode = code[2].lower() + if ' ' in opcode: + opcode = opcode[ : opcode.find(' ') ] + if opcode in self.jump_instructions or opcode in ('int', 'ret', 'retn'): + return self.do_trace(arg) + address = pc + size +## print(hex(pc), hex(address), size # XXX DEBUG + self.debug.stalk_at(pid, address) + return True + + do_p = do_step + do_next = do_step + + def do_trace(self, arg): + """ + t - trace at the current assembly instruction + trace - trace at the current assembly instruction + """ + if arg: # XXX this check is to be removed + raise CmdError("too many arguments") + if self.lastEvent is None: + raise CmdError("no current thread set") + self.lastEvent.get_thread().set_tf() + return True + + do_t = do_trace + + def do_bp(self, arg): + """ + [~process] bp
- set a code breakpoint + """ + pid = self.get_process_id_from_prefix() + if not self.debug.is_debugee(pid): + raise CmdError("target process is not being debugged") + process = self.get_process(pid) + token_list = self.split_tokens(arg, 1, 1) + try: + address = self.input_address(token_list[0], pid) + deferred = False + except Exception: + address = token_list[0] + deferred = True + if not address: + address = token_list[0] + deferred = True + self.debug.break_at(pid, address) + if deferred: + print("Deferred breakpoint set at %s" % address) + else: + print("Breakpoint set at %s" % address) + + def do_ba(self, arg): + """ + [~thread] ba <1|2|4|8>
- set hardware breakpoint + """ + debug = self.debug + thread = self.get_thread_from_prefix() + pid = thread.get_pid() + tid = thread.get_tid() + if not debug.is_debugee(pid): + raise CmdError("target thread is not being debugged") + token_list = self.split_tokens(arg, 3, 3) + access = token_list[0].lower() + size = token_list[1] + address = token_list[2] + if access == 'a': + access = debug.BP_BREAK_ON_ACCESS + elif access == 'w': + access = debug.BP_BREAK_ON_WRITE + elif access == 'e': + access = debug.BP_BREAK_ON_EXECUTION + else: + raise CmdError("bad access type: %s" % token_list[0]) + if size == '1': + size = debug.BP_WATCH_BYTE + elif size == '2': + size = debug.BP_WATCH_WORD + elif size == '4': + size = debug.BP_WATCH_DWORD + elif size == '8': + size = debug.BP_WATCH_QWORD + else: + raise CmdError("bad breakpoint size: %s" % size) + thread = self.get_thread_from_prefix() + tid = thread.get_tid() + pid = thread.get_pid() + if not debug.is_debugee(pid): + raise CmdError("target process is not being debugged") + address = self.input_address(address, pid) + if debug.has_hardware_breakpoint(tid, address): + debug.erase_hardware_breakpoint(tid, address) + debug.define_hardware_breakpoint(tid, address, access, size) + debug.enable_hardware_breakpoint(tid, address) + + def do_bm(self, arg): + """ + [~process] bm - set memory breakpoint + """ + pid = self.get_process_id_from_prefix() + if not self.debug.is_debugee(pid): + raise CmdError("target process is not being debugged") + process = self.get_process(pid) + token_list = self.split_tokens(arg, 1, 2) + address, size = self.input_address_range(token_list[0], pid) + self.debug.watch_buffer(pid, address, size) + + def do_bl(self, arg): + """ + bl - list the breakpoints for the current process + bl * - list the breakpoints for all processes + [~process] bl - list the breakpoints for the given process + bl [process...] - list the breakpoints for each given process + """ + debug = self.debug + if arg == '*': + if self.cmdprefix: + raise CmdError("prefix not supported") + breakpoints = debug.get_debugee_pids() + else: + targets = self.input_process_list( self.split_tokens(arg) ) + if self.cmdprefix: + targets.insert(0, self.input_process(self.cmdprefix)) + if not targets: + if self.lastEvent is None: + raise CmdError("no current process is set") + targets = [ self.lastEvent.get_pid() ] + for pid in targets: + bplist = debug.get_process_code_breakpoints(pid) + printed_process_banner = False + if bplist: + if not printed_process_banner: + print("Process %d:" % pid) + printed_process_banner = True + for bp in bplist: + address = repr(bp)[1:-1].replace('remote address ','') + print(" %s" % address) + dbplist = debug.get_process_deferred_code_breakpoints(pid) + if dbplist: + if not printed_process_banner: + print("Process %d:" % pid) + printed_process_banner = True + for (label, action, oneshot) in dbplist: + if oneshot: + address = " Deferred unconditional one-shot" \ + " code breakpoint at %s" + else: + address = " Deferred unconditional" \ + " code breakpoint at %s" + address = address % label + print(" %s" % address) + bplist = debug.get_process_page_breakpoints(pid) + if bplist: + if not printed_process_banner: + print("Process %d:" % pid) + printed_process_banner = True + for bp in bplist: + address = repr(bp)[1:-1].replace('remote address ','') + print(" %s" % address) + for tid in debug.system.get_process(pid).iter_thread_ids(): + bplist = debug.get_thread_hardware_breakpoints(tid) + if bplist: + print("Thread %d:" % tid) + for bp in bplist: + address = repr(bp)[1:-1].replace('remote address ','') + print(" %s" % address) + + def do_bo(self, arg): + """ + [~process] bo
- make a code breakpoint one-shot + [~thread] bo
- make a hardware breakpoint one-shot + [~process] bo - make a memory breakpoint one-shot + [~process] bo
- make a memory breakpoint one-shot + """ + token_list = self.split_tokens(arg, 1, 2) + pid, tid, address, size = self.input_breakpoint(token_list) + debug = self.debug + found = False + if size is None: + if tid is not None: + if debug.has_hardware_breakpoint(tid, address): + debug.enable_one_shot_hardware_breakpoint(tid, address) + found = True + if pid is not None: + if debug.has_code_breakpoint(pid, address): + debug.enable_one_shot_code_breakpoint(pid, address) + found = True + else: + if debug.has_page_breakpoint(pid, address): + debug.enable_one_shot_page_breakpoint(pid, address) + found = True + if not found: + print("Error: breakpoint not found.") + + def do_be(self, arg): + """ + [~process] be
- enable a code breakpoint + [~thread] be
- enable a hardware breakpoint + [~process] be - enable a memory breakpoint + [~process] be
- enable a memory breakpoint + """ + token_list = self.split_tokens(arg, 1, 2) + pid, tid, address, size = self.input_breakpoint(token_list) + debug = self.debug + found = False + if size is None: + if tid is not None: + if debug.has_hardware_breakpoint(tid, address): + debug.enable_hardware_breakpoint(tid, address) + found = True + if pid is not None: + if debug.has_code_breakpoint(pid, address): + debug.enable_code_breakpoint(pid, address) + found = True + else: + if debug.has_page_breakpoint(pid, address): + debug.enable_page_breakpoint(pid, address) + found = True + if not found: + print("Error: breakpoint not found.") + + def do_bd(self, arg): + """ + [~process] bd
- disable a code breakpoint + [~thread] bd
- disable a hardware breakpoint + [~process] bd - disable a memory breakpoint + [~process] bd
- disable a memory breakpoint + """ + token_list = self.split_tokens(arg, 1, 2) + pid, tid, address, size = self.input_breakpoint(token_list) + debug = self.debug + found = False + if size is None: + if tid is not None: + if debug.has_hardware_breakpoint(tid, address): + debug.disable_hardware_breakpoint(tid, address) + found = True + if pid is not None: + if debug.has_code_breakpoint(pid, address): + debug.disable_code_breakpoint(pid, address) + found = True + else: + if debug.has_page_breakpoint(pid, address): + debug.disable_page_breakpoint(pid, address) + found = True + if not found: + print("Error: breakpoint not found.") + + def do_bc(self, arg): + """ + [~process] bc
- clear a code breakpoint + [~thread] bc
- clear a hardware breakpoint + [~process] bc - clear a memory breakpoint + [~process] bc
- clear a memory breakpoint + """ + token_list = self.split_tokens(arg, 1, 2) + pid, tid, address, size = self.input_breakpoint(token_list) + debug = self.debug + found = False + if size is None: + if tid is not None: + if debug.has_hardware_breakpoint(tid, address): + debug.dont_watch_variable(tid, address) + found = True + if pid is not None: + if debug.has_code_breakpoint(pid, address): + debug.dont_break_at(pid, address) + found = True + else: + if debug.has_page_breakpoint(pid, address): + debug.dont_watch_buffer(pid, address, size) + found = True + if not found: + print("Error: breakpoint not found.") + + def do_disassemble(self, arg): + """ + [~thread] u [register] - show code disassembly + [~process] u [address] - show code disassembly + [~thread] disassemble [register] - show code disassembly + [~process] disassemble [address] - show code disassembly + """ + if not arg: + arg = self.default_disasm_target + token_list = self.split_tokens(arg, 1, 1) + pid, tid = self.get_process_and_thread_ids_from_prefix() + process = self.get_process(pid) + address = self.input_address(token_list[0], pid, tid) + try: + code = process.disassemble(address, 15*8)[:8] + except Exception: + msg = "can't disassemble address %s" + msg = msg % HexDump.address(address) + raise CmdError(msg) + if code: + label = process.get_label_at_address(address) + last_code = code[-1] + next_address = last_code[0] + last_code[1] + next_address = HexOutput.integer(next_address) + self.default_disasm_target = next_address + print("%s:" % label) +## print(CrashDump.dump_code(code)) + for line in code: + print(CrashDump.dump_code_line(line, bShowDump = False)) + + do_u = do_disassemble + + def do_search(self, arg): + """ + [~process] s [address-address] + [~process] search [address-address] + """ + token_list = self.split_tokens(arg, 1, 3) + pid, tid = self.get_process_and_thread_ids_from_prefix() + process = self.get_process(pid) + if len(token_list) == 1: + pattern = token_list[0] + minAddr = None + maxAddr = None + else: + pattern = token_list[-1] + addr, size = self.input_address_range(token_list[:-1], pid, tid) + minAddr = addr + maxAddr = addr + size + iter = process.search_bytes(pattern) + if process.get_bits() == 32: + addr_width = 8 + else: + addr_width = 16 + # TODO: need a prettier output here! + for addr in iter: + print(HexDump.address(addr, addr_width)) + + do_s = do_search + + def do_searchhex(self, arg): + """ + [~process] sh [address-address] + [~process] searchhex [address-address] + """ + token_list = self.split_tokens(arg, 1, 3) + pid, tid = self.get_process_and_thread_ids_from_prefix() + process = self.get_process(pid) + if len(token_list) == 1: + pattern = token_list[0] + minAddr = None + maxAddr = None + else: + pattern = token_list[-1] + addr, size = self.input_address_range(token_list[:-1], pid, tid) + minAddr = addr + maxAddr = addr + size + iter = process.search_hexa(pattern) + if process.get_bits() == 32: + addr_width = 8 + else: + addr_width = 16 + for addr, bytes in iter: + print(HexDump.hexblock(bytes, addr, addr_width),) + + do_sh = do_searchhex + +## def do_strings(self, arg): +## """ +## [~process] strings - extract ASCII strings from memory +## """ +## if arg: +## raise CmdError("too many arguments") +## pid, tid = self.get_process_and_thread_ids_from_prefix() +## process = self.get_process(pid) +## for addr, size, data in process.strings(): +## print("%s: %r" % (HexDump.address(addr), data) + + def do_d(self, arg): + """ + [~thread] d - show memory contents + [~thread] d - show memory contents + [~thread] d - show memory contents + [~process] d
- show memory contents + [~process] d - show memory contents + [~process] d
- show memory contents + """ + return self.last_display_command(arg) + + def do_db(self, arg): + """ + [~thread] db - show memory contents as bytes + [~thread] db - show memory contents as bytes + [~thread] db - show memory contents as bytes + [~process] db
- show memory contents as bytes + [~process] db - show memory contents as bytes + [~process] db
- show memory contents as bytes + """ + self.print_memory_display(arg, HexDump.hexblock) + self.last_display_command = self.do_db + + def do_dw(self, arg): + """ + [~thread] dw - show memory contents as words + [~thread] dw - show memory contents as words + [~thread] dw - show memory contents as words + [~process] dw
- show memory contents as words + [~process] dw - show memory contents as words + [~process] dw
- show memory contents as words + """ + self.print_memory_display(arg, HexDump.hexblock_word) + self.last_display_command = self.do_dw + + def do_dd(self, arg): + """ + [~thread] dd - show memory contents as dwords + [~thread] dd - show memory contents as dwords + [~thread] dd - show memory contents as dwords + [~process] dd
- show memory contents as dwords + [~process] dd - show memory contents as dwords + [~process] dd
- show memory contents as dwords + """ + self.print_memory_display(arg, HexDump.hexblock_dword) + self.last_display_command = self.do_dd + + def do_dq(self, arg): + """ + [~thread] dq - show memory contents as qwords + [~thread] dq - show memory contents as qwords + [~thread] dq - show memory contents as qwords + [~process] dq
- show memory contents as qwords + [~process] dq - show memory contents as qwords + [~process] dq
- show memory contents as qwords + """ + self.print_memory_display(arg, HexDump.hexblock_qword) + self.last_display_command = self.do_dq + + # XXX TODO + # Change the way the default is used with ds and du + + def do_ds(self, arg): + """ + [~thread] ds - show memory contents as ANSI string + [~process] ds
- show memory contents as ANSI string + """ + if not arg: + arg = self.default_display_target + token_list = self.split_tokens(arg, 1, 1) + pid, tid, address, size = self.input_display(token_list, 256) + process = self.get_process(pid) + data = process.peek_string(address, False, size) + if data: + print(repr(data)) + self.last_display_command = self.do_ds + + def do_du(self, arg): + """ + [~thread] du - show memory contents as Unicode string + [~process] du
- show memory contents as Unicode string + """ + if not arg: + arg = self.default_display_target + token_list = self.split_tokens(arg, 1, 2) + pid, tid, address, size = self.input_display(token_list, 256) + process = self.get_process(pid) + data = process.peek_string(address, True, size) + if data: + print(repr(data)) + self.last_display_command = self.do_du + + def do_register(self, arg): + """ + [~thread] r - print(the value of all registers + [~thread] r - print(the value of a register + [~thread] r = - change the value of a register + [~thread] register - print(the value of all registers + [~thread] register - print(the value of a register + [~thread] register = - change the value of a register + """ + arg = arg.strip() + if not arg: + self.print_current_location() + else: + equ = arg.find('=') + if equ >= 0: + register = arg[:equ].strip() + value = arg[equ+1:].strip() + if not value: + value = '0' + self.change_register(register, value) + else: + value = self.input_register(arg) + if value is None: + raise CmdError("unknown register: %s" % arg) + try: + label = None + thread = self.get_thread_from_prefix() + process = thread.get_process() + module = process.get_module_at_address(value) + if module: + label = module.get_label_at_address(value) + except RuntimeError: + label = None + reg = arg.upper() + val = HexDump.address(value) + if label: + print("%s: %s (%s)" % (reg, val, label)) + else: + print("%s: %s" % (reg, val)) + + do_r = do_register + + def do_eb(self, arg): + """ + [~process] eb
- write the data to the specified address + """ + # TODO + # data parameter should be optional, use a child Cmd here + pid = self.get_process_id_from_prefix() + token_list = self.split_tokens(arg, 2) + address = self.input_address(token_list[0], pid) + data = HexInput.hexadecimal(' '.join(token_list[1:])) + self.write_memory(address, data, pid) + + # XXX TODO + # add ew, ed and eq here + + def do_find(self, arg): + """ + [~process] f - find the string in the process memory + [~process] find - find the string in the process memory + """ + if not arg: + raise CmdError("missing parameter: string") + process = self.get_process_from_prefix() + self.find_in_memory(arg, process) + + do_f = do_find + + def do_memory(self, arg): + """ + [~process] m - show the process memory map + [~process] memory - show the process memory map + """ + if arg: # TODO: take min and max addresses + raise CmdError("too many arguments") + process = self.get_process_from_prefix() + try: + memoryMap = process.get_memory_map() + mappedFilenames = process.get_mapped_filenames() + print('') + print(CrashDump.dump_memory_map(memoryMap, mappedFilenames)) + except WindowsError: + msg = "can't get memory information for process (%d)" + raise CmdError(msg % process.get_pid()) + + do_m = do_memory + +#------------------------------------------------------------------------------ +# Event handling + +# TODO +# * add configurable stop/don't stop behavior on events and exceptions + + # Stop for all events, unless stated otherwise. + def event(self, event): + self.print_event(event) + self.prompt_user() + + # Stop for all exceptions, unless stated otherwise. + def exception(self, event): + self.print_exception(event) + self.prompt_user() + + # Stop for breakpoint exceptions. + def breakpoint(self, event): + if hasattr(event, 'breakpoint') and event.breakpoint: + self.print_breakpoint_location(event) + else: + self.print_exception(event) + self.prompt_user() + + # Stop for WOW64 breakpoint exceptions. + def wow64_breakpoint(self, event): + self.print_exception(event) + self.prompt_user() + + # Stop for single step exceptions. + def single_step(self, event): + if event.debug.is_tracing(event.get_tid()): + self.print_breakpoint_location(event) + else: + self.print_exception(event) + self.prompt_user() + + # Don't stop for C++ exceptions. + def ms_vc_exception(self, event): + self.print_exception(event) + event.continueStatus = win32.DBG_CONTINUE + + # Don't stop for process start. + def create_process(self, event): + self.print_process_start(event) + self.print_thread_start(event) + self.print_module_load(event) + + # Don't stop for process exit. + def exit_process(self, event): + self.print_process_end(event) + + # Don't stop for thread creation. + def create_thread(self, event): + self.print_thread_start(event) + + # Don't stop for thread exit. + def exit_thread(self, event): + self.print_thread_end(event) + + # Don't stop for DLL load. + def load_dll(self, event): + self.print_module_load(event) + + # Don't stop for DLL unload. + def unload_dll(self, event): + self.print_module_unload(event) + + # Don't stop for debug strings. + def output_string(self, event): + self.print_debug_string(event) + +#------------------------------------------------------------------------------ +# History file + + def load_history(self): + global readline + if readline is None: + try: + import readline + except ImportError: + return + if self.history_file_full_path is None: + folder = os.environ.get('USERPROFILE', '') + if not folder: + folder = os.environ.get('HOME', '') + if not folder: + folder = os.path.split(sys.argv[0])[1] + if not folder: + folder = os.path.curdir + self.history_file_full_path = os.path.join(folder, + self.history_file) + try: + if os.path.exists(self.history_file_full_path): + readline.read_history_file(self.history_file_full_path) + except IOError: + e = sys.exc_info()[1] + warnings.warn("Cannot load history file, reason: %s" % str(e)) + + def save_history(self): + if self.history_file_full_path is not None: + global readline + if readline is None: + try: + import readline + except ImportError: + return + try: + readline.write_history_file(self.history_file_full_path) + except IOError: + e = sys.exc_info()[1] + warnings.warn("Cannot save history file, reason: %s" % str(e)) + +#------------------------------------------------------------------------------ +# Main loop + + # Debugging loop. + def loop(self): + self.debuggerExit = False + debug = self.debug + + # Stop on the initial event, if any. + if self.lastEvent is not None: + self.cmdqueue.append('r') + self.prompt_user() + + # Loop until the debugger is told to quit. + while not self.debuggerExit: + + try: + + # If for some reason the last event wasn't continued, + # continue it here. This won't be done more than once + # for a given Event instance, though. + try: + debug.cont() + # On error, show the command prompt. + except Exception: + traceback.print_exc() + self.prompt_user() + + # While debugees are attached, handle debug events. + # Some debug events may cause the command prompt to be shown. + if self.debug.get_debugee_count() > 0: + try: + + # Get the next debug event. + debug.wait() + + # Dispatch the debug event. + try: + debug.dispatch() + + # Continue the debug event. + finally: + debug.cont() + + # On error, show the command prompt. + except Exception: + traceback.print_exc() + self.prompt_user() + + # While no debugees are attached, show the command prompt. + else: + self.prompt_user() + + # When the user presses Ctrl-C send a debug break to all debugees. + except KeyboardInterrupt: + success = False + try: + print("*** User requested debug break") + system = debug.system + for pid in debug.get_debugee_pids(): + try: + system.get_process(pid).debug_break() + success = True + except: + traceback.print_exc() + except: + traceback.print_exc() + if not success: + raise # This should never happen! diff --git a/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/module.py b/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/module.py new file mode 100644 index 00000000..6ae01831 --- /dev/null +++ b/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/module.py @@ -0,0 +1,2016 @@ +#!~/.wine/drive_c/Python25/python.exe +# -*- coding: utf-8 -*- + +# Copyright (c) 2009-2014, Mario Vilas +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright +# notice,this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# * Neither the name of the copyright holder nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. + +""" +Module instrumentation. + +@group Instrumentation: + Module + +@group Warnings: + DebugSymbolsWarning +""" + +from __future__ import with_statement + +__revision__ = "$Id$" + +__all__ = ['Module', 'DebugSymbolsWarning'] + +import sys +from winappdbg import win32 +from winappdbg import compat +from winappdbg.textio import HexInput, HexDump +from winappdbg.util import PathOperations + +# delayed imports +Process = None + +import os +import warnings +import traceback + +#============================================================================== + +class DebugSymbolsWarning (UserWarning): + """ + This warning is issued if the support for debug symbols + isn't working properly. + """ + +#============================================================================== + +class Module (object): + """ + Interface to a DLL library loaded in the context of another process. + + @group Properties: + get_base, get_filename, get_name, get_size, get_entry_point, + get_process, set_process, get_pid, + get_handle, set_handle, open_handle, close_handle + + @group Labels: + get_label, get_label_at_address, is_address_here, + resolve, resolve_label, match_name + + @group Symbols: + load_symbols, unload_symbols, get_symbols, iter_symbols, + resolve_symbol, get_symbol_at_address + + @group Modules snapshot: + clear + + @type unknown: str + @cvar unknown: Suggested tag for unknown modules. + + @type lpBaseOfDll: int + @ivar lpBaseOfDll: Base of DLL module. + Use L{get_base} instead. + + @type hFile: L{FileHandle} + @ivar hFile: Handle to the module file. + Use L{get_handle} instead. + + @type fileName: str + @ivar fileName: Module filename. + Use L{get_filename} instead. + + @type SizeOfImage: int + @ivar SizeOfImage: Size of the module. + Use L{get_size} instead. + + @type EntryPoint: int + @ivar EntryPoint: Entry point of the module. + Use L{get_entry_point} instead. + + @type process: L{Process} + @ivar process: Process where the module is loaded. + Use the L{get_process} method instead. + """ + + unknown = '' + + class _SymbolEnumerator (object): + """ + Internally used by L{Module} to enumerate symbols in a module. + """ + + def __init__(self, undecorate = False): + self.symbols = list() + self.undecorate = undecorate + + def __call__(self, SymbolName, SymbolAddress, SymbolSize, UserContext): + """ + Callback that receives symbols and stores them in a Python list. + """ + if self.undecorate: + try: + SymbolName = win32.UnDecorateSymbolName(SymbolName) + except Exception: + pass # not all symbols are decorated! + self.symbols.append( (SymbolName, SymbolAddress, SymbolSize) ) + return win32.TRUE + + def __init__(self, lpBaseOfDll, hFile = None, fileName = None, + SizeOfImage = None, + EntryPoint = None, + process = None): + """ + @type lpBaseOfDll: str + @param lpBaseOfDll: Base address of the module. + + @type hFile: L{FileHandle} + @param hFile: (Optional) Handle to the module file. + + @type fileName: str + @param fileName: (Optional) Module filename. + + @type SizeOfImage: int + @param SizeOfImage: (Optional) Size of the module. + + @type EntryPoint: int + @param EntryPoint: (Optional) Entry point of the module. + + @type process: L{Process} + @param process: (Optional) Process where the module is loaded. + """ + self.lpBaseOfDll = lpBaseOfDll + self.fileName = fileName + self.SizeOfImage = SizeOfImage + self.EntryPoint = EntryPoint + + self.__symbols = list() + + self.set_handle(hFile) + self.set_process(process) + + # Not really sure if it's a good idea... +## def __eq__(self, aModule): +## """ +## Compare two Module objects. The comparison is made using the process +## IDs and the module bases. +## +## @type aModule: L{Module} +## @param aModule: Another Module object. +## +## @rtype: bool +## @return: C{True} if the two process IDs and module bases are equal, +## C{False} otherwise. +## """ +## return isinstance(aModule, Module) and \ +## self.get_pid() == aModule.get_pid() and \ +## self.get_base() == aModule.get_base() + + def get_handle(self): + """ + @rtype: L{Handle} + @return: File handle. + Returns C{None} if unknown. + """ + # no way to guess! + return self.__hFile + + def set_handle(self, hFile): + """ + @type hFile: L{Handle} + @param hFile: File handle. Use C{None} to clear. + """ + if hFile == win32.INVALID_HANDLE_VALUE: + hFile = None + self.__hFile = hFile + + hFile = property(get_handle, set_handle, doc="") + + def get_process(self): + """ + @rtype: L{Process} + @return: Parent Process object. + Returns C{None} if unknown. + """ + # no way to guess! + return self.__process + + def set_process(self, process = None): + """ + Manually set the parent process. Use with care! + + @type process: L{Process} + @param process: (Optional) Process object. Use C{None} for no process. + """ + if process is None: + self.__process = None + else: + global Process # delayed import + if Process is None: + from winappdbg.process import Process + if not isinstance(process, Process): + msg = "Parent process must be a Process instance, " + msg += "got %s instead" % type(process) + raise TypeError(msg) + self.__process = process + + process = property(get_process, set_process, doc="") + + def get_pid(self): + """ + @rtype: int or None + @return: Parent process global ID. + Returns C{None} on error. + """ + process = self.get_process() + if process is not None: + return process.get_pid() + + def get_base(self): + """ + @rtype: int or None + @return: Base address of the module. + Returns C{None} if unknown. + """ + return self.lpBaseOfDll + + def get_size(self): + """ + @rtype: int or None + @return: Base size of the module. + Returns C{None} if unknown. + """ + if not self.SizeOfImage: + self.__get_size_and_entry_point() + return self.SizeOfImage + + def get_entry_point(self): + """ + @rtype: int or None + @return: Entry point of the module. + Returns C{None} if unknown. + """ + if not self.EntryPoint: + self.__get_size_and_entry_point() + return self.EntryPoint + + def __get_size_and_entry_point(self): + "Get the size and entry point of the module using the Win32 API." + process = self.get_process() + if process: + try: + handle = process.get_handle( win32.PROCESS_VM_READ | + win32.PROCESS_QUERY_INFORMATION ) + base = self.get_base() + mi = win32.GetModuleInformation(handle, base) + self.SizeOfImage = mi.SizeOfImage + self.EntryPoint = mi.EntryPoint + except WindowsError: + e = sys.exc_info()[1] + warnings.warn( + "Cannot get size and entry point of module %s, reason: %s"\ + % (self.get_name(), e.strerror), RuntimeWarning) + + def get_filename(self): + """ + @rtype: str or None + @return: Module filename. + Returns C{None} if unknown. + """ + if self.fileName is None: + if self.hFile not in (None, win32.INVALID_HANDLE_VALUE): + fileName = self.hFile.get_filename() + if fileName: + fileName = PathOperations.native_to_win32_pathname(fileName) + self.fileName = fileName + return self.fileName + + def __filename_to_modname(self, pathname): + """ + @type pathname: str + @param pathname: Pathname to a module. + + @rtype: str + @return: Module name. + """ + filename = PathOperations.pathname_to_filename(pathname) + if filename: + filename = filename.lower() + filepart, extpart = PathOperations.split_extension(filename) + if filepart and extpart: + modName = filepart + else: + modName = filename + else: + modName = pathname + return modName + + def get_name(self): + """ + @rtype: str + @return: Module name, as used in labels. + + @warning: Names are B{NOT} guaranteed to be unique. + + If you need unique identification for a loaded module, + use the base address instead. + + @see: L{get_label} + """ + pathname = self.get_filename() + if pathname: + modName = self.__filename_to_modname(pathname) + if isinstance(modName, compat.unicode): + try: + modName = modName.encode('cp1252') + except UnicodeEncodeError: + e = sys.exc_info()[1] + warnings.warn(str(e)) + else: + modName = "0x%x" % self.get_base() + return modName + + def match_name(self, name): + """ + @rtype: bool + @return: + C{True} if the given name could refer to this module. + It may not be exactly the same returned by L{get_name}. + """ + + # If the given name is exactly our name, return True. + # Comparison is case insensitive. + my_name = self.get_name().lower() + if name.lower() == my_name: + return True + + # If the given name is a base address, compare it with ours. + try: + base = HexInput.integer(name) + except ValueError: + base = None + if base is not None and base == self.get_base(): + return True + + # If the given name is a filename, convert it to a module name. + # Then compare it with ours, case insensitive. + modName = self.__filename_to_modname(name) + if modName.lower() == my_name: + return True + + # No match. + return False + +#------------------------------------------------------------------------------ + + def open_handle(self): + """ + Opens a new handle to the module. + + The new handle is stored in the L{hFile} property. + """ + + if not self.get_filename(): + msg = "Cannot retrieve filename for module at %s" + msg = msg % HexDump.address( self.get_base() ) + raise Exception(msg) + + hFile = win32.CreateFile(self.get_filename(), + dwShareMode = win32.FILE_SHARE_READ, + dwCreationDisposition = win32.OPEN_EXISTING) + + # In case hFile was set to an actual handle value instead of a Handle + # object. This shouldn't happen unless the user tinkered with hFile. + if not hasattr(self.hFile, '__del__'): + self.close_handle() + + self.hFile = hFile + + def close_handle(self): + """ + Closes the handle to the module. + + @note: Normally you don't need to call this method. All handles + created by I{WinAppDbg} are automatically closed when the garbage + collector claims them. So unless you've been tinkering with it, + setting L{hFile} to C{None} should be enough. + """ + try: + if hasattr(self.hFile, 'close'): + self.hFile.close() + elif self.hFile not in (None, win32.INVALID_HANDLE_VALUE): + win32.CloseHandle(self.hFile) + finally: + self.hFile = None + + def get_handle(self): + """ + @rtype: L{FileHandle} + @return: Handle to the module file. + """ + if self.hFile in (None, win32.INVALID_HANDLE_VALUE): + self.open_handle() + return self.hFile + + def clear(self): + """ + Clears the resources held by this object. + """ + try: + self.set_process(None) + finally: + self.close_handle() + +#------------------------------------------------------------------------------ + + # XXX FIXME + # I've been told sometimes the debugging symbols APIs don't correctly + # handle redirected exports (for example ws2_32!recv). + # I haven't been able to reproduce the bug yet. + def load_symbols(self): + """ + Loads the debugging symbols for a module. + Automatically called by L{get_symbols}. + """ + if win32.PROCESS_ALL_ACCESS == win32.PROCESS_ALL_ACCESS_VISTA: + dwAccess = win32.PROCESS_QUERY_LIMITED_INFORMATION + else: + dwAccess = win32.PROCESS_QUERY_INFORMATION + hProcess = self.get_process().get_handle(dwAccess) + hFile = self.hFile + BaseOfDll = self.get_base() + SizeOfDll = self.get_size() + Enumerator = self._SymbolEnumerator() + try: + win32.SymInitialize(hProcess) + SymOptions = win32.SymGetOptions() + SymOptions |= ( + win32.SYMOPT_ALLOW_ZERO_ADDRESS | + win32.SYMOPT_CASE_INSENSITIVE | + win32.SYMOPT_FAVOR_COMPRESSED | + win32.SYMOPT_INCLUDE_32BIT_MODULES | + win32.SYMOPT_UNDNAME + ) + SymOptions &= ~( + win32.SYMOPT_LOAD_LINES | + win32.SYMOPT_NO_IMAGE_SEARCH | + win32.SYMOPT_NO_CPP | + win32.SYMOPT_IGNORE_NT_SYMPATH + ) + win32.SymSetOptions(SymOptions) + try: + win32.SymSetOptions( + SymOptions | win32.SYMOPT_ALLOW_ABSOLUTE_SYMBOLS) + except WindowsError: + pass + try: + try: + success = win32.SymLoadModule64( + hProcess, hFile, None, None, BaseOfDll, SizeOfDll) + except WindowsError: + success = 0 + if not success: + ImageName = self.get_filename() + success = win32.SymLoadModule64( + hProcess, None, ImageName, None, BaseOfDll, SizeOfDll) + if success: + try: + win32.SymEnumerateSymbols64( + hProcess, BaseOfDll, Enumerator) + finally: + win32.SymUnloadModule64(hProcess, BaseOfDll) + finally: + win32.SymCleanup(hProcess) + except WindowsError: + e = sys.exc_info()[1] + msg = "Cannot load debug symbols for process ID %d, reason:\n%s" + msg = msg % (self.get_pid(), traceback.format_exc(e)) + warnings.warn(msg, DebugSymbolsWarning) + self.__symbols = Enumerator.symbols + + def unload_symbols(self): + """ + Unloads the debugging symbols for a module. + """ + self.__symbols = list() + + def get_symbols(self): + """ + Returns the debugging symbols for a module. + The symbols are automatically loaded when needed. + + @rtype: list of tuple( str, int, int ) + @return: List of symbols. + Each symbol is represented by a tuple that contains: + - Symbol name + - Symbol memory address + - Symbol size in bytes + """ + if not self.__symbols: + self.load_symbols() + return list(self.__symbols) + + def iter_symbols(self): + """ + Returns an iterator for the debugging symbols in a module, + in no particular order. + The symbols are automatically loaded when needed. + + @rtype: iterator of tuple( str, int, int ) + @return: Iterator of symbols. + Each symbol is represented by a tuple that contains: + - Symbol name + - Symbol memory address + - Symbol size in bytes + """ + if not self.__symbols: + self.load_symbols() + return self.__symbols.__iter__() + + def resolve_symbol(self, symbol, bCaseSensitive = False): + """ + Resolves a debugging symbol's address. + + @type symbol: str + @param symbol: Name of the symbol to resolve. + + @type bCaseSensitive: bool + @param bCaseSensitive: C{True} for case sensitive matches, + C{False} for case insensitive. + + @rtype: int or None + @return: Memory address of symbol. C{None} if not found. + """ + if bCaseSensitive: + for (SymbolName, SymbolAddress, SymbolSize) in self.iter_symbols(): + if symbol == SymbolName: + return SymbolAddress + for (SymbolName, SymbolAddress, SymbolSize) in self.iter_symbols(): + try: + SymbolName = win32.UnDecorateSymbolName(SymbolName) + except Exception: + continue + if symbol == SymbolName: + return SymbolAddress + else: + symbol = symbol.lower() + for (SymbolName, SymbolAddress, SymbolSize) in self.iter_symbols(): + if symbol == SymbolName.lower(): + return SymbolAddress + for (SymbolName, SymbolAddress, SymbolSize) in self.iter_symbols(): + try: + SymbolName = win32.UnDecorateSymbolName(SymbolName) + except Exception: + continue + if symbol == SymbolName.lower(): + return SymbolAddress + + def get_symbol_at_address(self, address): + """ + Tries to find the closest matching symbol for the given address. + + @type address: int + @param address: Memory address to query. + + @rtype: None or tuple( str, int, int ) + @return: Returns a tuple consisting of: + - Name + - Address + - Size (in bytes) + Returns C{None} if no symbol could be matched. + """ + found = None + for (SymbolName, SymbolAddress, SymbolSize) in self.iter_symbols(): + if SymbolAddress > address: + continue + if SymbolAddress + SymbolSize > address: + if not found or found[1] < SymbolAddress: + found = (SymbolName, SymbolAddress, SymbolSize) + return found + +#------------------------------------------------------------------------------ + + def get_label(self, function = None, offset = None): + """ + Retrieves the label for the given function of this module or the module + base address if no function name is given. + + @type function: str + @param function: (Optional) Exported function name. + + @type offset: int + @param offset: (Optional) Offset from the module base address. + + @rtype: str + @return: Label for the module base address, plus the offset if given. + """ + return _ModuleContainer.parse_label(self.get_name(), function, offset) + + def get_label_at_address(self, address, offset = None): + """ + Creates a label from the given memory address. + + If the address belongs to the module, the label is made relative to + it's base address. + + @type address: int + @param address: Memory address. + + @type offset: None or int + @param offset: (Optional) Offset value. + + @rtype: str + @return: Label pointing to the given address. + """ + + # Add the offset to the address. + if offset: + address = address + offset + + # Make the label relative to the base address if no match is found. + module = self.get_name() + function = None + offset = address - self.get_base() + + # Make the label relative to the entrypoint if no other match is found. + # Skip if the entry point is unknown. + start = self.get_entry_point() + if start and start <= address: + function = "start" + offset = address - start + + # Enumerate exported functions and debug symbols, + # then find the closest match, if possible. + try: + symbol = self.get_symbol_at_address(address) + if symbol: + (SymbolName, SymbolAddress, SymbolSize) = symbol + new_offset = address - SymbolAddress + if new_offset <= offset: + function = SymbolName + offset = new_offset + except WindowsError: + pass + + # Parse the label and return it. + return _ModuleContainer.parse_label(module, function, offset) + + def is_address_here(self, address): + """ + Tries to determine if the given address belongs to this module. + + @type address: int + @param address: Memory address. + + @rtype: bool or None + @return: C{True} if the address belongs to the module, + C{False} if it doesn't, + and C{None} if it can't be determined. + """ + base = self.get_base() + size = self.get_size() + if base and size: + return base <= address < (base + size) + return None + + def resolve(self, function): + """ + Resolves a function exported by this module. + + @type function: str or int + @param function: + str: Name of the function. + int: Ordinal of the function. + + @rtype: int + @return: Memory address of the exported function in the process. + Returns None on error. + """ + + # Unknown DLL filename, there's nothing we can do. + filename = self.get_filename() + if not filename: + return None + + # If the DLL is already mapped locally, resolve the function. + try: + hlib = win32.GetModuleHandle(filename) + address = win32.GetProcAddress(hlib, function) + except WindowsError: + + # Load the DLL locally, resolve the function and unload it. + try: + hlib = win32.LoadLibraryEx(filename, + win32.DONT_RESOLVE_DLL_REFERENCES) + try: + address = win32.GetProcAddress(hlib, function) + finally: + win32.FreeLibrary(hlib) + except WindowsError: + return None + + # A NULL pointer means the function was not found. + if address in (None, 0): + return None + + # Compensate for DLL base relocations locally and remotely. + return address - hlib + self.lpBaseOfDll + + def resolve_label(self, label): + """ + Resolves a label for this module only. If the label refers to another + module, an exception is raised. + + @type label: str + @param label: Label to resolve. + + @rtype: int + @return: Memory address pointed to by the label. + + @raise ValueError: The label is malformed or impossible to resolve. + @raise RuntimeError: Cannot resolve the module or function. + """ + + # Split the label into it's components. + # Use the fuzzy mode whenever possible. + aProcess = self.get_process() + if aProcess is not None: + (module, procedure, offset) = aProcess.split_label(label) + else: + (module, procedure, offset) = _ModuleContainer.split_label(label) + + # If a module name is given that doesn't match ours, + # raise an exception. + if module and not self.match_name(module): + raise RuntimeError("Label does not belong to this module") + + # Resolve the procedure if given. + if procedure: + address = self.resolve(procedure) + if address is None: + + # If it's a debug symbol, use the symbol. + address = self.resolve_symbol(procedure) + + # If it's the keyword "start" use the entry point. + if address is None and procedure == "start": + address = self.get_entry_point() + + # The procedure was not found. + if address is None: + if not module: + module = self.get_name() + msg = "Can't find procedure %s in module %s" + raise RuntimeError(msg % (procedure, module)) + + # If no procedure is given use the base address of the module. + else: + address = self.get_base() + + # Add the offset if given and return the resolved address. + if offset: + address = address + offset + return address + +#============================================================================== + +# TODO +# An alternative approach to the toolhelp32 snapshots: parsing the PEB and +# fetching the list of loaded modules from there. That would solve the problem +# of toolhelp32 not working when the process hasn't finished initializing. +# See: http://pferrie.host22.com/misc/lowlevel3.htm + +class _ModuleContainer (object): + """ + Encapsulates the capability to contain Module objects. + + @note: Labels are an approximated way of referencing memory locations + across different executions of the same process, or different processes + with common modules. They are not meant to be perfectly unique, and + some errors may occur when multiple modules with the same name are + loaded, or when module filenames can't be retrieved. + + @group Modules snapshot: + scan_modules, + get_module, get_module_bases, get_module_count, + get_module_at_address, get_module_by_name, + has_module, iter_modules, iter_module_addresses, + clear_modules + + @group Labels: + parse_label, split_label, sanitize_label, resolve_label, + resolve_label_components, get_label_at_address, split_label_strict, + split_label_fuzzy + + @group Symbols: + load_symbols, unload_symbols, get_symbols, iter_symbols, + resolve_symbol, get_symbol_at_address + + @group Debugging: + is_system_defined_breakpoint, get_system_breakpoint, + get_user_breakpoint, get_breakin_breakpoint, + get_wow64_system_breakpoint, get_wow64_user_breakpoint, + get_wow64_breakin_breakpoint, get_break_on_error_ptr + """ + + def __init__(self): + self.__moduleDict = dict() + self.__system_breakpoints = dict() + + # Replace split_label with the fuzzy version on object instances. + self.split_label = self.__use_fuzzy_mode + + def __initialize_snapshot(self): + """ + Private method to automatically initialize the snapshot + when you try to use it without calling any of the scan_* + methods first. You don't need to call this yourself. + """ + if not self.__moduleDict: + try: + self.scan_modules() + except WindowsError: + pass + + def __contains__(self, anObject): + """ + @type anObject: L{Module}, int + @param anObject: + - C{Module}: Module object to look for. + - C{int}: Base address of the DLL to look for. + + @rtype: bool + @return: C{True} if the snapshot contains + a L{Module} object with the same base address. + """ + if isinstance(anObject, Module): + anObject = anObject.lpBaseOfDll + return self.has_module(anObject) + + def __iter__(self): + """ + @see: L{iter_modules} + @rtype: dictionary-valueiterator + @return: Iterator of L{Module} objects in this snapshot. + """ + return self.iter_modules() + + def __len__(self): + """ + @see: L{get_module_count} + @rtype: int + @return: Count of L{Module} objects in this snapshot. + """ + return self.get_module_count() + + def has_module(self, lpBaseOfDll): + """ + @type lpBaseOfDll: int + @param lpBaseOfDll: Base address of the DLL to look for. + + @rtype: bool + @return: C{True} if the snapshot contains a + L{Module} object with the given base address. + """ + self.__initialize_snapshot() + return lpBaseOfDll in self.__moduleDict + + def get_module(self, lpBaseOfDll): + """ + @type lpBaseOfDll: int + @param lpBaseOfDll: Base address of the DLL to look for. + + @rtype: L{Module} + @return: Module object with the given base address. + """ + self.__initialize_snapshot() + if lpBaseOfDll not in self.__moduleDict: + msg = "Unknown DLL base address %s" + msg = msg % HexDump.address(lpBaseOfDll) + raise KeyError(msg) + return self.__moduleDict[lpBaseOfDll] + + def iter_module_addresses(self): + """ + @see: L{iter_modules} + @rtype: dictionary-keyiterator + @return: Iterator of DLL base addresses in this snapshot. + """ + self.__initialize_snapshot() + return compat.iterkeys(self.__moduleDict) + + def iter_modules(self): + """ + @see: L{iter_module_addresses} + @rtype: dictionary-valueiterator + @return: Iterator of L{Module} objects in this snapshot. + """ + self.__initialize_snapshot() + return compat.itervalues(self.__moduleDict) + + def get_module_bases(self): + """ + @see: L{iter_module_addresses} + @rtype: list( int... ) + @return: List of DLL base addresses in this snapshot. + """ + self.__initialize_snapshot() + return compat.keys(self.__moduleDict) + + def get_module_count(self): + """ + @rtype: int + @return: Count of L{Module} objects in this snapshot. + """ + self.__initialize_snapshot() + return len(self.__moduleDict) + +#------------------------------------------------------------------------------ + + def get_module_by_name(self, modName): + """ + @type modName: int + @param modName: + Name of the module to look for, as returned by L{Module.get_name}. + If two or more modules with the same name are loaded, only one + of the matching modules is returned. + + You can also pass a full pathname to the DLL file. + This works correctly even if two modules with the same name + are loaded from different paths. + + @rtype: L{Module} + @return: C{Module} object that best matches the given name. + Returns C{None} if no C{Module} can be found. + """ + + # Convert modName to lowercase. + # This helps make case insensitive string comparisons. + modName = modName.lower() + + # modName is an absolute pathname. + if PathOperations.path_is_absolute(modName): + for lib in self.iter_modules(): + if modName == lib.get_filename().lower(): + return lib + return None # Stop trying to match the name. + + # Get all the module names. + # This prevents having to iterate through the module list + # more than once. + modDict = [ ( lib.get_name(), lib ) for lib in self.iter_modules() ] + modDict = dict(modDict) + + # modName is a base filename. + if modName in modDict: + return modDict[modName] + + # modName is a base filename without extension. + filepart, extpart = PathOperations.split_extension(modName) + if filepart and extpart: + if filepart in modDict: + return modDict[filepart] + + # modName is a base address. + try: + baseAddress = HexInput.integer(modName) + except ValueError: + return None + if self.has_module(baseAddress): + return self.get_module(baseAddress) + + # Module not found. + return None + + def get_module_at_address(self, address): + """ + @type address: int + @param address: Memory address to query. + + @rtype: L{Module} + @return: C{Module} object that best matches the given address. + Returns C{None} if no C{Module} can be found. + """ + bases = self.get_module_bases() + bases.sort() + bases.append(long(0x10000000000000000)) # max. 64 bit address + 1 + if address >= bases[0]: + i = 0 + max_i = len(bases) - 1 + while i < max_i: + begin, end = bases[i:i+2] + if begin <= address < end: + module = self.get_module(begin) + here = module.is_address_here(address) + if here is False: + break + else: # True or None + return module + i = i + 1 + return None + + # XXX this method musn't end up calling __initialize_snapshot by accident! + def scan_modules(self): + """ + Populates the snapshot with loaded modules. + """ + + # The module filenames may be spoofed by malware, + # since this information resides in usermode space. + # See: http://www.ragestorm.net/blogs/?p=163 + + # Ignore special process IDs. + # PID 0: System Idle Process. Also has a special meaning to the + # toolhelp APIs (current process). + # PID 4: System Integrity Group. See this forum post for more info: + # http://tinyurl.com/ycza8jo + # (points to social.technet.microsoft.com) + # Only on XP and above + # PID 8: System (?) only in Windows 2000 and below AFAIK. + # It's probably the same as PID 4 in XP and above. + dwProcessId = self.get_pid() + if dwProcessId in (0, 4, 8): + return + + # It would seem easier to clear the snapshot first. + # But then all open handles would be closed. + found_bases = set() + with win32.CreateToolhelp32Snapshot(win32.TH32CS_SNAPMODULE, + dwProcessId) as hSnapshot: + me = win32.Module32First(hSnapshot) + while me is not None: + lpBaseAddress = me.modBaseAddr + fileName = me.szExePath # full pathname + if not fileName: + fileName = me.szModule # filename only + if not fileName: + fileName = None + else: + fileName = PathOperations.native_to_win32_pathname(fileName) + found_bases.add(lpBaseAddress) +## if not self.has_module(lpBaseAddress): # XXX triggers a scan + if lpBaseAddress not in self.__moduleDict: + aModule = Module(lpBaseAddress, fileName = fileName, + SizeOfImage = me.modBaseSize, + process = self) + self._add_module(aModule) + else: + aModule = self.get_module(lpBaseAddress) + if not aModule.fileName: + aModule.fileName = fileName + if not aModule.SizeOfImage: + aModule.SizeOfImage = me.modBaseSize + if not aModule.process: + aModule.process = self + me = win32.Module32Next(hSnapshot) +## for base in self.get_module_bases(): # XXX triggers a scan + for base in compat.keys(self.__moduleDict): + if base not in found_bases: + self._del_module(base) + + def clear_modules(self): + """ + Clears the modules snapshot. + """ + for aModule in compat.itervalues(self.__moduleDict): + aModule.clear() + self.__moduleDict = dict() + +#------------------------------------------------------------------------------ + + @staticmethod + def parse_label(module = None, function = None, offset = None): + """ + Creates a label from a module and a function name, plus an offset. + + @warning: This method only creates the label, it doesn't make sure the + label actually points to a valid memory location. + + @type module: None or str + @param module: (Optional) Module name. + + @type function: None, str or int + @param function: (Optional) Function name or ordinal. + + @type offset: None or int + @param offset: (Optional) Offset value. + + If C{function} is specified, offset from the function. + + If C{function} is C{None}, offset from the module. + + @rtype: str + @return: + Label representing the given function in the given module. + + @raise ValueError: + The module or function name contain invalid characters. + """ + + # TODO + # Invalid characters should be escaped or filtered. + + # Convert ordinals to strings. + try: + function = "#0x%x" % function + except TypeError: + pass + + # Validate the parameters. + if module is not None and ('!' in module or '+' in module): + raise ValueError("Invalid module name: %s" % module) + if function is not None and ('!' in function or '+' in function): + raise ValueError("Invalid function name: %s" % function) + + # Parse the label. + if module: + if function: + if offset: + label = "%s!%s+0x%x" % (module, function, offset) + else: + label = "%s!%s" % (module, function) + else: + if offset: +## label = "%s+0x%x!" % (module, offset) + label = "%s!0x%x" % (module, offset) + else: + label = "%s!" % module + else: + if function: + if offset: + label = "!%s+0x%x" % (function, offset) + else: + label = "!%s" % function + else: + if offset: + label = "0x%x" % offset + else: + label = "0x0" + + return label + + @staticmethod + def split_label_strict(label): + """ + Splits a label created with L{parse_label}. + + To parse labels with a less strict syntax, use the L{split_label_fuzzy} + method instead. + + @warning: This method only parses the label, it doesn't make sure the + label actually points to a valid memory location. + + @type label: str + @param label: Label to split. + + @rtype: tuple( str or None, str or int or None, int or None ) + @return: Tuple containing the C{module} name, + the C{function} name or ordinal, and the C{offset} value. + + If the label doesn't specify a module, + then C{module} is C{None}. + + If the label doesn't specify a function, + then C{function} is C{None}. + + If the label doesn't specify an offset, + then C{offset} is C{0}. + + @raise ValueError: The label is malformed. + """ + module = function = None + offset = 0 + + # Special case: None + if not label: + label = "0x0" + else: + + # Remove all blanks. + label = label.replace(' ', '') + label = label.replace('\t', '') + label = label.replace('\r', '') + label = label.replace('\n', '') + + # Special case: empty label. + if not label: + label = "0x0" + + # * ! * + if '!' in label: + try: + module, function = label.split('!') + except ValueError: + raise ValueError("Malformed label: %s" % label) + + # module ! function + if function: + if '+' in module: + raise ValueError("Malformed label: %s" % label) + + # module ! function + offset + if '+' in function: + try: + function, offset = function.split('+') + except ValueError: + raise ValueError("Malformed label: %s" % label) + try: + offset = HexInput.integer(offset) + except ValueError: + raise ValueError("Malformed label: %s" % label) + else: + + # module ! offset + try: + offset = HexInput.integer(function) + function = None + except ValueError: + pass + else: + + # module + offset ! + if '+' in module: + try: + module, offset = module.split('+') + except ValueError: + raise ValueError("Malformed label: %s" % label) + try: + offset = HexInput.integer(offset) + except ValueError: + raise ValueError("Malformed label: %s" % label) + + else: + + # module ! + try: + offset = HexInput.integer(module) + module = None + + # offset ! + except ValueError: + pass + + if not module: + module = None + if not function: + function = None + + # * + else: + + # offset + try: + offset = HexInput.integer(label) + + # # ordinal + except ValueError: + if label.startswith('#'): + function = label + try: + HexInput.integer(function[1:]) + + # module? + # function? + except ValueError: + raise ValueError("Ambiguous label: %s" % label) + + # module? + # function? + else: + raise ValueError("Ambiguous label: %s" % label) + + # Convert function ordinal strings into integers. + if function and function.startswith('#'): + try: + function = HexInput.integer(function[1:]) + except ValueError: + pass + + # Convert null offsets to None. + if not offset: + offset = None + + return (module, function, offset) + + def split_label_fuzzy(self, label): + """ + Splits a label entered as user input. + + It's more flexible in it's syntax parsing than the L{split_label_strict} + method, as it allows the exclamation mark (B{C{!}}) to be omitted. The + ambiguity is resolved by searching the modules in the snapshot to guess + if a label refers to a module or a function. It also tries to rebuild + labels when they contain hardcoded addresses. + + @warning: This method only parses the label, it doesn't make sure the + label actually points to a valid memory location. + + @type label: str + @param label: Label to split. + + @rtype: tuple( str or None, str or int or None, int or None ) + @return: Tuple containing the C{module} name, + the C{function} name or ordinal, and the C{offset} value. + + If the label doesn't specify a module, + then C{module} is C{None}. + + If the label doesn't specify a function, + then C{function} is C{None}. + + If the label doesn't specify an offset, + then C{offset} is C{0}. + + @raise ValueError: The label is malformed. + """ + module = function = None + offset = 0 + + # Special case: None + if not label: + label = compat.b("0x0") + else: + + # Remove all blanks. + label = label.replace(compat.b(' '), compat.b('')) + label = label.replace(compat.b('\t'), compat.b('')) + label = label.replace(compat.b('\r'), compat.b('')) + label = label.replace(compat.b('\n'), compat.b('')) + + # Special case: empty label. + if not label: + label = compat.b("0x0") + + # If an exclamation sign is present, we know we can parse it strictly. + if compat.b('!') in label: + return self.split_label_strict(label) + +## # Try to parse it strictly, on error do it the fuzzy way. +## try: +## return self.split_label(label) +## except ValueError: +## pass + + # * + offset + if compat.b('+') in label: + try: + prefix, offset = label.split(compat.b('+')) + except ValueError: + raise ValueError("Malformed label: %s" % label) + try: + offset = HexInput.integer(offset) + except ValueError: + raise ValueError("Malformed label: %s" % label) + label = prefix + + # This parses both filenames and base addresses. + modobj = self.get_module_by_name(label) + if modobj: + + # module + # module + offset + module = modobj.get_name() + + else: + + # TODO + # If 0xAAAAAAAA + 0xBBBBBBBB is given, + # A is interpreted as a module base address, + # and B as an offset. + # If that fails, it'd be good to add A+B and try to + # use the nearest loaded module. + + # offset + # base address + offset (when no module has that base address) + try: + address = HexInput.integer(label) + + if offset: + # If 0xAAAAAAAA + 0xBBBBBBBB is given, + # A is interpreted as a module base address, + # and B as an offset. + # If that fails, we get here, meaning no module was found + # at A. Then add up A+B and work with that as a hardcoded + # address. + offset = address + offset + else: + # If the label is a hardcoded address, we get here. + offset = address + + # If only a hardcoded address is given, + # rebuild the label using get_label_at_address. + # Then parse it again, but this time strictly, + # both because there is no need for fuzzy syntax and + # to prevent an infinite recursion if there's a bug here. + try: + new_label = self.get_label_at_address(offset) + module, function, offset = \ + self.split_label_strict(new_label) + except ValueError: + pass + + # function + # function + offset + except ValueError: + function = label + + # Convert function ordinal strings into integers. + if function and function.startswith(compat.b('#')): + try: + function = HexInput.integer(function[1:]) + except ValueError: + pass + + # Convert null offsets to None. + if not offset: + offset = None + + return (module, function, offset) + + @classmethod + def split_label(cls, label): + """ +Splits a label into it's C{module}, C{function} and C{offset} +components, as used in L{parse_label}. + +When called as a static method, the strict syntax mode is used:: + + winappdbg.Process.split_label( "kernel32!CreateFileA" ) + +When called as an instance method, the fuzzy syntax mode is used:: + + aProcessInstance.split_label( "CreateFileA" ) + +@see: L{split_label_strict}, L{split_label_fuzzy} + +@type label: str +@param label: Label to split. + +@rtype: tuple( str or None, str or int or None, int or None ) +@return: + Tuple containing the C{module} name, + the C{function} name or ordinal, and the C{offset} value. + + If the label doesn't specify a module, + then C{module} is C{None}. + + If the label doesn't specify a function, + then C{function} is C{None}. + + If the label doesn't specify an offset, + then C{offset} is C{0}. + +@raise ValueError: The label is malformed. + """ + + # XXX + # Docstring indentation was removed so epydoc doesn't complain + # when parsing the docs for __use_fuzzy_mode(). + + # This function is overwritten by __init__ + # so here is the static implementation only. + return cls.split_label_strict(label) + + # The split_label method is replaced with this function by __init__. + def __use_fuzzy_mode(self, label): + "@see: L{split_label}" + return self.split_label_fuzzy(label) +## __use_fuzzy_mode.__doc__ = split_label.__doc__ + + def sanitize_label(self, label): + """ + Converts a label taken from user input into a well-formed label. + + @type label: str + @param label: Label taken from user input. + + @rtype: str + @return: Sanitized label. + """ + (module, function, offset) = self.split_label_fuzzy(label) + label = self.parse_label(module, function, offset) + return label + + def resolve_label(self, label): + """ + Resolve the memory address of the given label. + + @note: + If multiple modules with the same name are loaded, + the label may be resolved at any of them. For a more precise + way to resolve functions use the base address to get the L{Module} + object (see L{Process.get_module}) and then call L{Module.resolve}. + + If no module name is specified in the label, the function may be + resolved in any loaded module. If you want to resolve all functions + with that name in all processes, call L{Process.iter_modules} to + iterate through all loaded modules, and then try to resolve the + function in each one of them using L{Module.resolve}. + + @type label: str + @param label: Label to resolve. + + @rtype: int + @return: Memory address pointed to by the label. + + @raise ValueError: The label is malformed or impossible to resolve. + @raise RuntimeError: Cannot resolve the module or function. + """ + + # Split the label into module, function and offset components. + module, function, offset = self.split_label_fuzzy(label) + + # Resolve the components into a memory address. + address = self.resolve_label_components(module, function, offset) + + # Return the memory address. + return address + + def resolve_label_components(self, module = None, + function = None, + offset = None): + """ + Resolve the memory address of the given module, function and/or offset. + + @note: + If multiple modules with the same name are loaded, + the label may be resolved at any of them. For a more precise + way to resolve functions use the base address to get the L{Module} + object (see L{Process.get_module}) and then call L{Module.resolve}. + + If no module name is specified in the label, the function may be + resolved in any loaded module. If you want to resolve all functions + with that name in all processes, call L{Process.iter_modules} to + iterate through all loaded modules, and then try to resolve the + function in each one of them using L{Module.resolve}. + + @type module: None or str + @param module: (Optional) Module name. + + @type function: None, str or int + @param function: (Optional) Function name or ordinal. + + @type offset: None or int + @param offset: (Optional) Offset value. + + If C{function} is specified, offset from the function. + + If C{function} is C{None}, offset from the module. + + @rtype: int + @return: Memory address pointed to by the label. + + @raise ValueError: The label is malformed or impossible to resolve. + @raise RuntimeError: Cannot resolve the module or function. + """ + # Default address if no module or function are given. + # An offset may be added later. + address = 0 + + # Resolve the module. + # If the module is not found, check for the special symbol "main". + if module: + modobj = self.get_module_by_name(module) + if not modobj: + if module == "main": + modobj = self.get_main_module() + else: + raise RuntimeError("Module %r not found" % module) + + # Resolve the exported function or debugging symbol. + # If all else fails, check for the special symbol "start". + if function: + address = modobj.resolve(function) + if address is None: + address = modobj.resolve_symbol(function) + if address is None: + if function == "start": + address = modobj.get_entry_point() + if address is None: + msg = "Symbol %r not found in module %s" + raise RuntimeError(msg % (function, module)) + + # No function, use the base address. + else: + address = modobj.get_base() + + # Resolve the function in any module. + # If all else fails, check for the special symbols "main" and "start". + elif function: + for modobj in self.iter_modules(): + address = modobj.resolve(function) + if address is not None: + break + if address is None: + if function == "start": + modobj = self.get_main_module() + address = modobj.get_entry_point() + elif function == "main": + modobj = self.get_main_module() + address = modobj.get_base() + else: + msg = "Function %r not found in any module" % function + raise RuntimeError(msg) + + # Return the address plus the offset. + if offset: + address = address + offset + return address + + def get_label_at_address(self, address, offset = None): + """ + Creates a label from the given memory address. + + @warning: This method uses the name of the nearest currently loaded + module. If that module is unloaded later, the label becomes + impossible to resolve. + + @type address: int + @param address: Memory address. + + @type offset: None or int + @param offset: (Optional) Offset value. + + @rtype: str + @return: Label pointing to the given address. + """ + if offset: + address = address + offset + modobj = self.get_module_at_address(address) + if modobj: + label = modobj.get_label_at_address(address) + else: + label = self.parse_label(None, None, address) + return label + +#------------------------------------------------------------------------------ + + # The memory addresses of system breakpoints are be cached, since they're + # all in system libraries it's not likely they'll ever change their address + # during the lifetime of the process... I don't suppose a program could + # happily unload ntdll.dll and survive. + def __get_system_breakpoint(self, label): + try: + return self.__system_breakpoints[label] + except KeyError: + try: + address = self.resolve_label(label) + except Exception: + return None + self.__system_breakpoints[label] = address + return address + + # It's in kernel32 in Windows Server 2003, in ntdll since Windows Vista. + # It can only be resolved if we have the debug symbols. + def get_break_on_error_ptr(self): + """ + @rtype: int + @return: + If present, returns the address of the C{g_dwLastErrorToBreakOn} + global variable for this process. If not, returns C{None}. + """ + address = self.__get_system_breakpoint("ntdll!g_dwLastErrorToBreakOn") + if not address: + address = self.__get_system_breakpoint( + "kernel32!g_dwLastErrorToBreakOn") + # cheat a little :) + self.__system_breakpoints["ntdll!g_dwLastErrorToBreakOn"] = address + return address + + def is_system_defined_breakpoint(self, address): + """ + @type address: int + @param address: Memory address. + + @rtype: bool + @return: C{True} if the given address points to a system defined + breakpoint. System defined breakpoints are hardcoded into + system libraries. + """ + if address: + module = self.get_module_at_address(address) + if module: + return module.match_name("ntdll") or \ + module.match_name("kernel32") + return False + + # FIXME + # In Wine, the system breakpoint seems to be somewhere in kernel32. + def get_system_breakpoint(self): + """ + @rtype: int or None + @return: Memory address of the system breakpoint + within the process address space. + Returns C{None} on error. + """ + return self.__get_system_breakpoint("ntdll!DbgBreakPoint") + + # I don't know when this breakpoint is actually used... + def get_user_breakpoint(self): + """ + @rtype: int or None + @return: Memory address of the user breakpoint + within the process address space. + Returns C{None} on error. + """ + return self.__get_system_breakpoint("ntdll!DbgUserBreakPoint") + + # On some platforms, this breakpoint can only be resolved + # when the debugging symbols for ntdll.dll are loaded. + def get_breakin_breakpoint(self): + """ + @rtype: int or None + @return: Memory address of the remote breakin breakpoint + within the process address space. + Returns C{None} on error. + """ + return self.__get_system_breakpoint("ntdll!DbgUiRemoteBreakin") + + # Equivalent of ntdll!DbgBreakPoint in Wow64. + def get_wow64_system_breakpoint(self): + """ + @rtype: int or None + @return: Memory address of the Wow64 system breakpoint + within the process address space. + Returns C{None} on error. + """ + return self.__get_system_breakpoint("ntdll32!DbgBreakPoint") + + # Equivalent of ntdll!DbgUserBreakPoint in Wow64. + def get_wow64_user_breakpoint(self): + """ + @rtype: int or None + @return: Memory address of the Wow64 user breakpoint + within the process address space. + Returns C{None} on error. + """ + return self.__get_system_breakpoint("ntdll32!DbgUserBreakPoint") + + # Equivalent of ntdll!DbgUiRemoteBreakin in Wow64. + def get_wow64_breakin_breakpoint(self): + """ + @rtype: int or None + @return: Memory address of the Wow64 remote breakin breakpoint + within the process address space. + Returns C{None} on error. + """ + return self.__get_system_breakpoint("ntdll32!DbgUiRemoteBreakin") + +#------------------------------------------------------------------------------ + + def load_symbols(self): + """ + Loads the debugging symbols for all modules in this snapshot. + Automatically called by L{get_symbols}. + """ + for aModule in self.iter_modules(): + aModule.load_symbols() + + def unload_symbols(self): + """ + Unloads the debugging symbols for all modules in this snapshot. + """ + for aModule in self.iter_modules(): + aModule.unload_symbols() + + def get_symbols(self): + """ + Returns the debugging symbols for all modules in this snapshot. + The symbols are automatically loaded when needed. + + @rtype: list of tuple( str, int, int ) + @return: List of symbols. + Each symbol is represented by a tuple that contains: + - Symbol name + - Symbol memory address + - Symbol size in bytes + """ + symbols = list() + for aModule in self.iter_modules(): + for symbol in aModule.iter_symbols(): + symbols.append(symbol) + return symbols + + def iter_symbols(self): + """ + Returns an iterator for the debugging symbols in all modules in this + snapshot, in no particular order. + The symbols are automatically loaded when needed. + + @rtype: iterator of tuple( str, int, int ) + @return: Iterator of symbols. + Each symbol is represented by a tuple that contains: + - Symbol name + - Symbol memory address + - Symbol size in bytes + """ + for aModule in self.iter_modules(): + for symbol in aModule.iter_symbols(): + yield symbol + + def resolve_symbol(self, symbol, bCaseSensitive = False): + """ + Resolves a debugging symbol's address. + + @type symbol: str + @param symbol: Name of the symbol to resolve. + + @type bCaseSensitive: bool + @param bCaseSensitive: C{True} for case sensitive matches, + C{False} for case insensitive. + + @rtype: int or None + @return: Memory address of symbol. C{None} if not found. + """ + if bCaseSensitive: + for (SymbolName, SymbolAddress, SymbolSize) in self.iter_symbols(): + if symbol == SymbolName: + return SymbolAddress + else: + symbol = symbol.lower() + for (SymbolName, SymbolAddress, SymbolSize) in self.iter_symbols(): + if symbol == SymbolName.lower(): + return SymbolAddress + + def get_symbol_at_address(self, address): + """ + Tries to find the closest matching symbol for the given address. + + @type address: int + @param address: Memory address to query. + + @rtype: None or tuple( str, int, int ) + @return: Returns a tuple consisting of: + - Name + - Address + - Size (in bytes) + Returns C{None} if no symbol could be matched. + """ + # Any module may have symbols pointing anywhere in memory, so there's + # no easy way to optimize this. I guess we're stuck with brute force. + found = None + for (SymbolName, SymbolAddress, SymbolSize) in self.iter_symbols(): + if SymbolAddress > address: + continue + + if SymbolAddress == address: + found = (SymbolName, SymbolAddress, SymbolSize) + break + + if SymbolAddress < address: + if found and (address - found[1]) < (address - SymbolAddress): + continue + else: + found = (SymbolName, SymbolAddress, SymbolSize) + return found +#------------------------------------------------------------------------------ + + # XXX _notify_* methods should not trigger a scan + + def _add_module(self, aModule): + """ + Private method to add a module object to the snapshot. + + @type aModule: L{Module} + @param aModule: Module object. + """ +## if not isinstance(aModule, Module): +## if hasattr(aModule, '__class__'): +## typename = aModule.__class__.__name__ +## else: +## typename = str(type(aModule)) +## msg = "Expected Module, got %s instead" % typename +## raise TypeError(msg) + lpBaseOfDll = aModule.get_base() +## if lpBaseOfDll in self.__moduleDict: +## msg = "Module already exists: %d" % lpBaseOfDll +## raise KeyError(msg) + aModule.set_process(self) + self.__moduleDict[lpBaseOfDll] = aModule + + def _del_module(self, lpBaseOfDll): + """ + Private method to remove a module object from the snapshot. + + @type lpBaseOfDll: int + @param lpBaseOfDll: Module base address. + """ + try: + aModule = self.__moduleDict[lpBaseOfDll] + del self.__moduleDict[lpBaseOfDll] + except KeyError: + aModule = None + msg = "Unknown base address %d" % HexDump.address(lpBaseOfDll) + warnings.warn(msg, RuntimeWarning) + if aModule: + aModule.clear() # remove circular references + + def __add_loaded_module(self, event): + """ + Private method to automatically add new module objects from debug events. + + @type event: L{Event} + @param event: Event object. + """ + lpBaseOfDll = event.get_module_base() + hFile = event.get_file_handle() +## if not self.has_module(lpBaseOfDll): # XXX this would trigger a scan + if lpBaseOfDll not in self.__moduleDict: + fileName = event.get_filename() + if not fileName: + fileName = None + if hasattr(event, 'get_start_address'): + EntryPoint = event.get_start_address() + else: + EntryPoint = None + aModule = Module(lpBaseOfDll, hFile, fileName = fileName, + EntryPoint = EntryPoint, + process = self) + self._add_module(aModule) + else: + aModule = self.get_module(lpBaseOfDll) + if not aModule.hFile and hFile not in (None, 0, + win32.INVALID_HANDLE_VALUE): + aModule.hFile = hFile + if not aModule.process: + aModule.process = self + if aModule.EntryPoint is None and \ + hasattr(event, 'get_start_address'): + aModule.EntryPoint = event.get_start_address() + if not aModule.fileName: + fileName = event.get_filename() + if fileName: + aModule.fileName = fileName + + def _notify_create_process(self, event): + """ + Notify the load of the main module. + + This is done automatically by the L{Debug} class, you shouldn't need + to call it yourself. + + @type event: L{CreateProcessEvent} + @param event: Create process event. + + @rtype: bool + @return: C{True} to call the user-defined handle, C{False} otherwise. + """ + self.__add_loaded_module(event) + return True + + def _notify_load_dll(self, event): + """ + Notify the load of a new module. + + This is done automatically by the L{Debug} class, you shouldn't need + to call it yourself. + + @type event: L{LoadDLLEvent} + @param event: Load DLL event. + + @rtype: bool + @return: C{True} to call the user-defined handle, C{False} otherwise. + """ + self.__add_loaded_module(event) + return True + + def _notify_unload_dll(self, event): + """ + Notify the release of a loaded module. + + This is done automatically by the L{Debug} class, you shouldn't need + to call it yourself. + + @type event: L{UnloadDLLEvent} + @param event: Unload DLL event. + + @rtype: bool + @return: C{True} to call the user-defined handle, C{False} otherwise. + """ + lpBaseOfDll = event.get_module_base() +## if self.has_module(lpBaseOfDll): # XXX this would trigger a scan + if lpBaseOfDll in self.__moduleDict: + self._del_module(lpBaseOfDll) + return True diff --git a/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/plugins/README b/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/plugins/README new file mode 100644 index 00000000..9d0fea9f --- /dev/null +++ b/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/plugins/README @@ -0,0 +1 @@ +Here go the plugins for the interactive debugger. \ No newline at end of file diff --git a/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/plugins/__init__.py b/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/plugins/__init__.py new file mode 100644 index 00000000..3836e09d --- /dev/null +++ b/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/plugins/__init__.py @@ -0,0 +1,35 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# Copyright (c) 2009-2014, Mario Vilas +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright +# notice,this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# * Neither the name of the copyright holder nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. + +""" +Plugins folder for the WinAppDbg interactive debugger. +""" + +__revision__ = "$Id: __init__.py 1125 2012-10-22 14:54:39Z qvasimodo $" diff --git a/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/plugins/do_example.py b/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/plugins/do_example.py new file mode 100644 index 00000000..591ce68b --- /dev/null +++ b/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/plugins/do_example.py @@ -0,0 +1,41 @@ +#!~/.wine/drive_c/Python25/python.exe +# -*- coding: utf-8 -*- + +# Command line debugger using WinAppDbg +# Example command +# Copyright (c) 2009-2014, Mario Vilas +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright +# notice,this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# * Neither the name of the copyright holder nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. + +__revision__ = "$Id$" + +def do(self, arg): + ".example - This is an example plugin for the command line debugger" + print "This is an example command." + print "%s.do(%r, %r):" % (__name__, self, arg) + print " last event", self.lastEvent + print " prefix", self.cmdprefix + print " arguments", self.split_tokens(arg) diff --git a/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/plugins/do_exchain.py b/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/plugins/do_exchain.py new file mode 100644 index 00000000..aa97fec7 --- /dev/null +++ b/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/plugins/do_exchain.py @@ -0,0 +1,51 @@ +#!~/.wine/drive_c/Python25/python.exe +# -*- coding: utf-8 -*- + +# Command line debugger using WinAppDbg +# Show exception handlers list +# Copyright (c) 2009-2014, Mario Vilas +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright +# notice,this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# * Neither the name of the copyright holder nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. + +__revision__ = "$Id$" + +from winappdbg import HexDump, Table + +def do(self, arg): + ".exchain - Show the SEH chain" + thread = self.get_thread_from_prefix() + print "Exception handlers for thread %d" % thread.get_tid() + print + table = Table() + table.addRow("Block", "Function") + bits = thread.get_bits() + for (seh, seh_func) in thread.get_seh_chain(): + if seh is not None: + seh = HexDump.address(seh, bits) + if seh_func is not None: + seh_func = HexDump.address(seh_func, bits) + table.addRow(seh, seh_func) + print table.getOutput() diff --git a/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/plugins/do_exploitable.py b/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/plugins/do_exploitable.py new file mode 100644 index 00000000..64e93f6f --- /dev/null +++ b/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/plugins/do_exploitable.py @@ -0,0 +1,50 @@ +#!~/.wine/drive_c/Python25/python.exe +# -*- coding: utf-8 -*- + +# Command line debugger using WinAppDbg +# Determine the approximate exploitability rating +# Copyright (c) 2009-2014, Mario Vilas +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright +# notice,this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# * Neither the name of the copyright holder nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. + +__revision__ = "$Id$" + +def do(self, arg): + ".exploitable - Determine the approximate exploitability rating" + + from winappdbg import Crash + + event = self.debug.lastEvent + crash = Crash(event) + crash.fetch_extra_data(event) + + status, rule, description = crash.isExploitable() + + print "-" * 79 + print "Exploitability: %s" % status + print "Matched rule: %s" % rule + print "Description: %s" % description + print "-" * 79 diff --git a/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/plugins/do_symfix.py b/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/plugins/do_symfix.py new file mode 100644 index 00000000..cccfbe96 --- /dev/null +++ b/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/plugins/do_symfix.py @@ -0,0 +1,37 @@ +#!~/.wine/drive_c/Python25/python.exe +# -*- coding: utf-8 -*- + +# Command line debugger using WinAppDbg +# Fix the symbol store path +# Copyright (c) 2009-2014, Mario Vilas +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright +# notice,this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# * Neither the name of the copyright holder nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. + +__revision__ = "$Id$" + +def do(self, arg): + ".symfix - Set the default Microsoft Symbol Store settings if missing" + self.debug.system.fix_symbol_store_path(remote = True, force = False) diff --git a/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/process.py b/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/process.py new file mode 100644 index 00000000..6d75f803 --- /dev/null +++ b/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/process.py @@ -0,0 +1,5021 @@ +#!~/.wine/drive_c/Python25/python.exe +# -*- coding: utf-8 -*- + +# Copyright (c) 2009-2014, Mario Vilas +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright +# notice,this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# * Neither the name of the copyright holder nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. + +""" +Process instrumentation. + +@group Instrumentation: + Process +""" + +from __future__ import with_statement + +# FIXME +# I've been told the host process for the latest versions of VMWare +# can't be instrumented, because they try to stop code injection into the VMs. +# The solution appears to be to run the debugger from a user account that +# belongs to the VMware group. I haven't confirmed this yet. + +__revision__ = "$Id$" + +__all__ = ['Process'] + +import sys +from winappdbg import win32 +from winappdbg import compat +from winappdbg.textio import HexDump, HexInput +from winappdbg.util import Regenerator, PathOperations, MemoryAddresses +from winappdbg.module import Module, _ModuleContainer +from winappdbg.thread import Thread, _ThreadContainer +from winappdbg.window import Window +from winappdbg.search import Search, \ + Pattern, BytePattern, TextPattern, RegExpPattern, HexPattern +from winappdbg.disasm import Disassembler + +import re +import os +import os.path +import ctypes +import struct +import warnings +import traceback + +# delayed import +System = None + +#============================================================================== + +# TODO +# * Remote GetLastError() +# * The memory operation methods do not take into account that code breakpoints +# change the memory. This object should talk to BreakpointContainer to +# retrieve the original memory contents where code breakpoints are enabled. +# * A memory cache could be implemented here. + +class Process (_ThreadContainer, _ModuleContainer): + """ + Interface to a process. Contains threads and modules snapshots. + + @group Properties: + get_pid, is_alive, is_debugged, is_wow64, get_arch, get_bits, + get_filename, get_exit_code, + get_start_time, get_exit_time, get_running_time, + get_services, get_dep_policy, get_peb, get_peb_address, + get_entry_point, get_main_module, get_image_base, get_image_name, + get_command_line, get_environment, + get_command_line_block, + get_environment_block, get_environment_variables, + get_handle, open_handle, close_handle + + @group Instrumentation: + kill, wait, suspend, resume, inject_code, inject_dll, clean_exit + + @group Disassembly: + disassemble, disassemble_around, disassemble_around_pc, + disassemble_string, disassemble_instruction, disassemble_current + + @group Debugging: + flush_instruction_cache, debug_break, peek_pointers_in_data + + @group Memory mapping: + take_memory_snapshot, generate_memory_snapshot, iter_memory_snapshot, + restore_memory_snapshot, get_memory_map, get_mapped_filenames, + generate_memory_map, iter_memory_map, + is_pointer, is_address_valid, is_address_free, is_address_reserved, + is_address_commited, is_address_guard, is_address_readable, + is_address_writeable, is_address_copy_on_write, is_address_executable, + is_address_executable_and_writeable, + is_buffer, + is_buffer_readable, is_buffer_writeable, is_buffer_executable, + is_buffer_executable_and_writeable, is_buffer_copy_on_write + + @group Memory allocation: + malloc, free, mprotect, mquery + + @group Memory read: + read, read_char, read_int, read_uint, read_float, read_double, + read_dword, read_qword, read_pointer, read_string, read_structure, + peek, peek_char, peek_int, peek_uint, peek_float, peek_double, + peek_dword, peek_qword, peek_pointer, peek_string + + @group Memory write: + write, write_char, write_int, write_uint, write_float, write_double, + write_dword, write_qword, write_pointer, + poke, poke_char, poke_int, poke_uint, poke_float, poke_double, + poke_dword, poke_qword, poke_pointer + + @group Memory search: + search, search_bytes, search_hexa, search_text, search_regexp, strings + + @group Processes snapshot: + scan, clear, __contains__, __iter__, __len__ + + @group Deprecated: + get_environment_data, parse_environment_data + + @type dwProcessId: int + @ivar dwProcessId: Global process ID. Use L{get_pid} instead. + + @type hProcess: L{ProcessHandle} + @ivar hProcess: Handle to the process. Use L{get_handle} instead. + + @type fileName: str + @ivar fileName: Filename of the main module. Use L{get_filename} instead. + """ + + def __init__(self, dwProcessId, hProcess = None, fileName = None): + """ + @type dwProcessId: int + @param dwProcessId: Global process ID. + + @type hProcess: L{ProcessHandle} + @param hProcess: Handle to the process. + + @type fileName: str + @param fileName: (Optional) Filename of the main module. + """ + _ThreadContainer.__init__(self) + _ModuleContainer.__init__(self) + + self.dwProcessId = dwProcessId + self.hProcess = hProcess + self.fileName = fileName + + def get_pid(self): + """ + @rtype: int + @return: Process global ID. + """ + return self.dwProcessId + + def get_filename(self): + """ + @rtype: str + @return: Filename of the main module of the process. + """ + if not self.fileName: + self.fileName = self.get_image_name() + return self.fileName + + def open_handle(self, dwDesiredAccess = win32.PROCESS_ALL_ACCESS): + """ + Opens a new handle to the process. + + The new handle is stored in the L{hProcess} property. + + @warn: Normally you should call L{get_handle} instead, since it's much + "smarter" and tries to reuse handles and merge access rights. + + @type dwDesiredAccess: int + @param dwDesiredAccess: Desired access rights. + Defaults to L{win32.PROCESS_ALL_ACCESS}. + See: U{http://msdn.microsoft.com/en-us/library/windows/desktop/ms684880(v=vs.85).aspx} + + @raise WindowsError: It's not possible to open a handle to the process + with the requested access rights. This tipically happens because + the target process is a system process and the debugger is not + runnning with administrative rights. + """ + hProcess = win32.OpenProcess(dwDesiredAccess, win32.FALSE, self.dwProcessId) + + try: + self.close_handle() + except Exception: + warnings.warn( + "Failed to close process handle: %s" % traceback.format_exc()) + + self.hProcess = hProcess + + def close_handle(self): + """ + Closes the handle to the process. + + @note: Normally you don't need to call this method. All handles + created by I{WinAppDbg} are automatically closed when the garbage + collector claims them. So unless you've been tinkering with it, + setting L{hProcess} to C{None} should be enough. + """ + try: + if hasattr(self.hProcess, 'close'): + self.hProcess.close() + elif self.hProcess not in (None, win32.INVALID_HANDLE_VALUE): + win32.CloseHandle(self.hProcess) + finally: + self.hProcess = None + + def get_handle(self, dwDesiredAccess = win32.PROCESS_ALL_ACCESS): + """ + Returns a handle to the process with I{at least} the access rights + requested. + + @note: + If a handle was previously opened and has the required access + rights, it's reused. If not, a new handle is opened with the + combination of the old and new access rights. + + @type dwDesiredAccess: int + @param dwDesiredAccess: Desired access rights. + Defaults to L{win32.PROCESS_ALL_ACCESS}. + See: U{http://msdn.microsoft.com/en-us/library/windows/desktop/ms684880(v=vs.85).aspx} + + @rtype: L{ProcessHandle} + @return: Handle to the process. + + @raise WindowsError: It's not possible to open a handle to the process + with the requested access rights. This tipically happens because + the target process is a system process and the debugger is not + runnning with administrative rights. + """ + if self.hProcess in (None, win32.INVALID_HANDLE_VALUE): + self.open_handle(dwDesiredAccess) + else: + dwAccess = self.hProcess.dwAccess + if (dwAccess | dwDesiredAccess) != dwAccess: + self.open_handle(dwAccess | dwDesiredAccess) + return self.hProcess + +#------------------------------------------------------------------------------ + + # Not really sure if it's a good idea... +## def __eq__(self, aProcess): +## """ +## Compare two Process objects. The comparison is made using the IDs. +## +## @warning: +## If you have two Process instances with different handles the +## equality operator still returns C{True}, so be careful! +## +## @type aProcess: L{Process} +## @param aProcess: Another Process object. +## +## @rtype: bool +## @return: C{True} if the two process IDs are equal, +## C{False} otherwise. +## """ +## return isinstance(aProcess, Process) and \ +## self.get_pid() == aProcess.get_pid() + + def __contains__(self, anObject): + """ + The same as: C{self.has_thread(anObject) or self.has_module(anObject)} + + @type anObject: L{Thread}, L{Module} or int + @param anObject: Object to look for. + Can be a Thread, Module, thread global ID or module base address. + + @rtype: bool + @return: C{True} if the requested object was found in the snapshot. + """ + return _ThreadContainer.__contains__(self, anObject) or \ + _ModuleContainer.__contains__(self, anObject) + + def __len__(self): + """ + @see: L{get_thread_count}, L{get_module_count} + @rtype: int + @return: Count of L{Thread} and L{Module} objects in this snapshot. + """ + return _ThreadContainer.__len__(self) + \ + _ModuleContainer.__len__(self) + + class __ThreadsAndModulesIterator (object): + """ + Iterator object for L{Process} objects. + Iterates through L{Thread} objects first, L{Module} objects next. + """ + + def __init__(self, container): + """ + @type container: L{Process} + @param container: L{Thread} and L{Module} container. + """ + self.__container = container + self.__iterator = None + self.__state = 0 + + def __iter__(self): + 'x.__iter__() <==> iter(x)' + return self + + def next(self): + 'x.next() -> the next value, or raise StopIteration' + if self.__state == 0: + self.__iterator = self.__container.iter_threads() + self.__state = 1 + if self.__state == 1: + try: + return self.__iterator.next() + except StopIteration: + self.__iterator = self.__container.iter_modules() + self.__state = 2 + if self.__state == 2: + try: + return self.__iterator.next() + except StopIteration: + self.__iterator = None + self.__state = 3 + raise StopIteration + + def __iter__(self): + """ + @see: L{iter_threads}, L{iter_modules} + @rtype: iterator + @return: Iterator of L{Thread} and L{Module} objects in this snapshot. + All threads are iterated first, then all modules. + """ + return self.__ThreadsAndModulesIterator(self) + +#------------------------------------------------------------------------------ + + def wait(self, dwTimeout = None): + """ + Waits for the process to finish executing. + + @raise WindowsError: On error an exception is raised. + """ + self.get_handle(win32.SYNCHRONIZE).wait(dwTimeout) + + def kill(self, dwExitCode = 0): + """ + Terminates the execution of the process. + + @raise WindowsError: On error an exception is raised. + """ + hProcess = self.get_handle(win32.PROCESS_TERMINATE) + win32.TerminateProcess(hProcess, dwExitCode) + + def suspend(self): + """ + Suspends execution on all threads of the process. + + @raise WindowsError: On error an exception is raised. + """ + self.scan_threads() # force refresh the snapshot + suspended = list() + try: + for aThread in self.iter_threads(): + aThread.suspend() + suspended.append(aThread) + except Exception: + for aThread in suspended: + try: + aThread.resume() + except Exception: + pass + raise + + def resume(self): + """ + Resumes execution on all threads of the process. + + @raise WindowsError: On error an exception is raised. + """ + if self.get_thread_count() == 0: + self.scan_threads() # only refresh the snapshot if empty + resumed = list() + try: + for aThread in self.iter_threads(): + aThread.resume() + resumed.append(aThread) + except Exception: + for aThread in resumed: + try: + aThread.suspend() + except Exception: + pass + raise + + def is_debugged(self): + """ + Tries to determine if the process is being debugged by another process. + It may detect other debuggers besides WinAppDbg. + + @rtype: bool + @return: C{True} if the process has a debugger attached. + + @warning: + May return inaccurate results when some anti-debug techniques are + used by the target process. + + @note: To know if a process currently being debugged by a L{Debug} + object, call L{Debug.is_debugee} instead. + """ + # FIXME the MSDN docs don't say what access rights are needed here! + hProcess = self.get_handle(win32.PROCESS_QUERY_INFORMATION) + return win32.CheckRemoteDebuggerPresent(hProcess) + + def is_alive(self): + """ + @rtype: bool + @return: C{True} if the process is currently running. + """ + try: + self.wait(0) + except WindowsError: + e = sys.exc_info()[1] + return e.winerror == win32.WAIT_TIMEOUT + return False + + def get_exit_code(self): + """ + @rtype: int + @return: Process exit code, or C{STILL_ACTIVE} if it's still alive. + + @warning: If a process returns C{STILL_ACTIVE} as it's exit code, + you may not be able to determine if it's active or not with this + method. Use L{is_alive} to check if the process is still active. + Alternatively you can call L{get_handle} to get the handle object + and then L{ProcessHandle.wait} on it to wait until the process + finishes running. + """ + if win32.PROCESS_ALL_ACCESS == win32.PROCESS_ALL_ACCESS_VISTA: + dwAccess = win32.PROCESS_QUERY_LIMITED_INFORMATION + else: + dwAccess = win32.PROCESS_QUERY_INFORMATION + return win32.GetExitCodeProcess( self.get_handle(dwAccess) ) + +#------------------------------------------------------------------------------ + + def scan(self): + """ + Populates the snapshot of threads and modules. + """ + self.scan_threads() + self.scan_modules() + + def clear(self): + """ + Clears the snapshot of threads and modules. + """ + try: + try: + self.clear_threads() + finally: + self.clear_modules() + finally: + self.close_handle() + +#------------------------------------------------------------------------------ + + # Regular expression to find hexadecimal values of any size. + __hexa_parameter = re.compile('0x[0-9A-Fa-f]+') + + def __fixup_labels(self, disasm): + """ + Private method used when disassembling from process memory. + + It has no return value because the list is modified in place. On return + all raw memory addresses are replaced by labels when possible. + + @type disasm: list of tuple(int, int, str, str) + @param disasm: Output of one of the dissassembly functions. + """ + for index in compat.xrange(len(disasm)): + (address, size, text, dump) = disasm[index] + m = self.__hexa_parameter.search(text) + while m: + s, e = m.span() + value = text[s:e] + try: + label = self.get_label_at_address( int(value, 0x10) ) + except Exception: + label = None + if label: + text = text[:s] + label + text[e:] + e = s + len(value) + m = self.__hexa_parameter.search(text, e) + disasm[index] = (address, size, text, dump) + + def disassemble_string(self, lpAddress, code): + """ + Disassemble instructions from a block of binary code. + + @type lpAddress: int + @param lpAddress: Memory address where the code was read from. + + @type code: str + @param code: Binary code to disassemble. + + @rtype: list of tuple( long, int, str, str ) + @return: List of tuples. Each tuple represents an assembly instruction + and contains: + - Memory address of instruction. + - Size of instruction in bytes. + - Disassembly line of instruction. + - Hexadecimal dump of instruction. + + @raise NotImplementedError: + No compatible disassembler was found for the current platform. + """ + try: + disasm = self.__disasm + except AttributeError: + disasm = self.__disasm = Disassembler( self.get_arch() ) + return disasm.decode(lpAddress, code) + + def disassemble(self, lpAddress, dwSize): + """ + Disassemble instructions from the address space of the process. + + @type lpAddress: int + @param lpAddress: Memory address where to read the code from. + + @type dwSize: int + @param dwSize: Size of binary code to disassemble. + + @rtype: list of tuple( long, int, str, str ) + @return: List of tuples. Each tuple represents an assembly instruction + and contains: + - Memory address of instruction. + - Size of instruction in bytes. + - Disassembly line of instruction. + - Hexadecimal dump of instruction. + """ + data = self.read(lpAddress, dwSize) + disasm = self.disassemble_string(lpAddress, data) + self.__fixup_labels(disasm) + return disasm + + # FIXME + # This algorithm really sucks, I've got to write a better one :P + def disassemble_around(self, lpAddress, dwSize = 64): + """ + Disassemble around the given address. + + @type lpAddress: int + @param lpAddress: Memory address where to read the code from. + + @type dwSize: int + @param dwSize: Delta offset. + Code will be read from lpAddress - dwSize to lpAddress + dwSize. + + @rtype: list of tuple( long, int, str, str ) + @return: List of tuples. Each tuple represents an assembly instruction + and contains: + - Memory address of instruction. + - Size of instruction in bytes. + - Disassembly line of instruction. + - Hexadecimal dump of instruction. + """ + dwDelta = int(float(dwSize) / 2.0) + addr_1 = lpAddress - dwDelta + addr_2 = lpAddress + size_1 = dwDelta + size_2 = dwSize - dwDelta + data = self.read(addr_1, dwSize) + data_1 = data[:size_1] + data_2 = data[size_1:] + disasm_1 = self.disassemble_string(addr_1, data_1) + disasm_2 = self.disassemble_string(addr_2, data_2) + disasm = disasm_1 + disasm_2 + self.__fixup_labels(disasm) + return disasm + + def disassemble_around_pc(self, dwThreadId, dwSize = 64): + """ + Disassemble around the program counter of the given thread. + + @type dwThreadId: int + @param dwThreadId: Global thread ID. + The program counter for this thread will be used as the disassembly + address. + + @type dwSize: int + @param dwSize: Delta offset. + Code will be read from pc - dwSize to pc + dwSize. + + @rtype: list of tuple( long, int, str, str ) + @return: List of tuples. Each tuple represents an assembly instruction + and contains: + - Memory address of instruction. + - Size of instruction in bytes. + - Disassembly line of instruction. + - Hexadecimal dump of instruction. + """ + aThread = self.get_thread(dwThreadId) + return self.disassemble_around(aThread.get_pc(), dwSize) + + def disassemble_instruction(self, lpAddress): + """ + Disassemble the instruction at the given memory address. + + @type lpAddress: int + @param lpAddress: Memory address where to read the code from. + + @rtype: tuple( long, int, str, str ) + @return: The tuple represents an assembly instruction + and contains: + - Memory address of instruction. + - Size of instruction in bytes. + - Disassembly line of instruction. + - Hexadecimal dump of instruction. + """ + return self.disassemble(lpAddress, 15)[0] + + def disassemble_current(self, dwThreadId): + """ + Disassemble the instruction at the program counter of the given thread. + + @type dwThreadId: int + @param dwThreadId: Global thread ID. + The program counter for this thread will be used as the disassembly + address. + + @rtype: tuple( long, int, str, str ) + @return: The tuple represents an assembly instruction + and contains: + - Memory address of instruction. + - Size of instruction in bytes. + - Disassembly line of instruction. + - Hexadecimal dump of instruction. + """ + aThread = self.get_thread(dwThreadId) + return self.disassemble_instruction(aThread.get_pc()) + +#------------------------------------------------------------------------------ + + def flush_instruction_cache(self): + """ + Flush the instruction cache. This is required if the process memory is + modified and one or more threads are executing nearby the modified + memory region. + + @see: U{http://blogs.msdn.com/oldnewthing/archive/2003/12/08/55954.aspx#55958} + + @raise WindowsError: Raises exception on error. + """ + # FIXME + # No idea what access rights are required here! + # Maybe PROCESS_VM_OPERATION ??? + # In any case we're only calling this from the debugger, + # so it should be fine (we already have PROCESS_ALL_ACCESS). + win32.FlushInstructionCache( self.get_handle() ) + + def debug_break(self): + """ + Triggers the system breakpoint in the process. + + @raise WindowsError: On error an exception is raised. + """ + # The exception is raised by a new thread. + # When continuing the exception, the thread dies by itself. + # This thread is hidden from the debugger. + win32.DebugBreakProcess( self.get_handle() ) + + def is_wow64(self): + """ + Determines if the process is running under WOW64. + + @rtype: bool + @return: + C{True} if the process is running under WOW64. That is, a 32-bit + application running in a 64-bit Windows. + + C{False} if the process is either a 32-bit application running in + a 32-bit Windows, or a 64-bit application running in a 64-bit + Windows. + + @raise WindowsError: On error an exception is raised. + + @see: U{http://msdn.microsoft.com/en-us/library/aa384249(VS.85).aspx} + """ + try: + wow64 = self.__wow64 + except AttributeError: + if (win32.bits == 32 and not win32.wow64): + wow64 = False + else: + if win32.PROCESS_ALL_ACCESS == win32.PROCESS_ALL_ACCESS_VISTA: + dwAccess = win32.PROCESS_QUERY_LIMITED_INFORMATION + else: + dwAccess = win32.PROCESS_QUERY_INFORMATION + hProcess = self.get_handle(dwAccess) + try: + wow64 = win32.IsWow64Process(hProcess) + except AttributeError: + wow64 = False + self.__wow64 = wow64 + return wow64 + + def get_arch(self): + """ + @rtype: str + @return: The architecture in which this process believes to be running. + For example, if running a 32 bit binary in a 64 bit machine, the + architecture returned by this method will be L{win32.ARCH_I386}, + but the value of L{System.arch} will be L{win32.ARCH_AMD64}. + """ + + # Are we in a 32 bit machine? + if win32.bits == 32 and not win32.wow64: + return win32.arch + + # Is the process outside of WOW64? + if not self.is_wow64(): + return win32.arch + + # In WOW64, "amd64" becomes "i386". + if win32.arch == win32.ARCH_AMD64: + return win32.ARCH_I386 + + # We don't know the translation for other architectures. + raise NotImplementedError() + + def get_bits(self): + """ + @rtype: str + @return: The number of bits in which this process believes to be + running. For example, if running a 32 bit binary in a 64 bit + machine, the number of bits returned by this method will be C{32}, + but the value of L{System.arch} will be C{64}. + """ + + # Are we in a 32 bit machine? + if win32.bits == 32 and not win32.wow64: + + # All processes are 32 bits. + return 32 + + # Is the process inside WOW64? + if self.is_wow64(): + + # The process is 32 bits. + return 32 + + # The process is 64 bits. + return 64 + + # TODO: get_os, to test compatibility run + # See: http://msdn.microsoft.com/en-us/library/windows/desktop/ms683224(v=vs.85).aspx + +#------------------------------------------------------------------------------ + + def get_start_time(self): + """ + Determines when has this process started running. + + @rtype: win32.SYSTEMTIME + @return: Process start time. + """ + if win32.PROCESS_ALL_ACCESS == win32.PROCESS_ALL_ACCESS_VISTA: + dwAccess = win32.PROCESS_QUERY_LIMITED_INFORMATION + else: + dwAccess = win32.PROCESS_QUERY_INFORMATION + hProcess = self.get_handle(dwAccess) + CreationTime = win32.GetProcessTimes(hProcess)[0] + return win32.FileTimeToSystemTime(CreationTime) + + def get_exit_time(self): + """ + Determines when has this process finished running. + If the process is still alive, the current time is returned instead. + + @rtype: win32.SYSTEMTIME + @return: Process exit time. + """ + if self.is_alive(): + ExitTime = win32.GetSystemTimeAsFileTime() + else: + if win32.PROCESS_ALL_ACCESS == win32.PROCESS_ALL_ACCESS_VISTA: + dwAccess = win32.PROCESS_QUERY_LIMITED_INFORMATION + else: + dwAccess = win32.PROCESS_QUERY_INFORMATION + hProcess = self.get_handle(dwAccess) + ExitTime = win32.GetProcessTimes(hProcess)[1] + return win32.FileTimeToSystemTime(ExitTime) + + def get_running_time(self): + """ + Determines how long has this process been running. + + @rtype: long + @return: Process running time in milliseconds. + """ + if win32.PROCESS_ALL_ACCESS == win32.PROCESS_ALL_ACCESS_VISTA: + dwAccess = win32.PROCESS_QUERY_LIMITED_INFORMATION + else: + dwAccess = win32.PROCESS_QUERY_INFORMATION + hProcess = self.get_handle(dwAccess) + (CreationTime, ExitTime, _, _) = win32.GetProcessTimes(hProcess) + if self.is_alive(): + ExitTime = win32.GetSystemTimeAsFileTime() + CreationTime = CreationTime.dwLowDateTime + (CreationTime.dwHighDateTime << 32) + ExitTime = ExitTime.dwLowDateTime + ( ExitTime.dwHighDateTime << 32) + RunningTime = ExitTime - CreationTime + return RunningTime / 10000 # 100 nanoseconds steps => milliseconds + +#------------------------------------------------------------------------------ + + def __load_System_class(self): + global System # delayed import + if System is None: + from system import System + + def get_services(self): + """ + Retrieves the list of system services that are currently running in + this process. + + @see: L{System.get_services} + + @rtype: list( L{win32.ServiceStatusProcessEntry} ) + @return: List of service status descriptors. + """ + self.__load_System_class() + pid = self.get_pid() + return [d for d in System.get_active_services() if d.ProcessId == pid] + +#------------------------------------------------------------------------------ + + def get_dep_policy(self): + """ + Retrieves the DEP (Data Execution Prevention) policy for this process. + + @note: This method is only available in Windows XP SP3 and above, and + only for 32 bit processes. It will fail in any other circumstance. + + @see: U{http://msdn.microsoft.com/en-us/library/bb736297(v=vs.85).aspx} + + @rtype: tuple(int, int) + @return: + The first member of the tuple is the DEP flags. It can be a + combination of the following values: + - 0: DEP is disabled for this process. + - 1: DEP is enabled for this process. (C{PROCESS_DEP_ENABLE}) + - 2: DEP-ATL thunk emulation is disabled for this process. + (C{PROCESS_DEP_DISABLE_ATL_THUNK_EMULATION}) + + The second member of the tuple is the permanent flag. If C{TRUE} + the DEP settings cannot be changed in runtime for this process. + + @raise WindowsError: On error an exception is raised. + """ + hProcess = self.get_handle(win32.PROCESS_QUERY_INFORMATION) + try: + return win32.kernel32.GetProcessDEPPolicy(hProcess) + except AttributeError: + msg = "This method is only available in Windows XP SP3 and above." + raise NotImplementedError(msg) + +#------------------------------------------------------------------------------ + + def get_peb(self): + """ + Returns a copy of the PEB. + To dereference pointers in it call L{Process.read_structure}. + + @rtype: L{win32.PEB} + @return: PEB structure. + @raise WindowsError: An exception is raised on error. + """ + self.get_handle( win32.PROCESS_VM_READ | + win32.PROCESS_QUERY_INFORMATION ) + return self.read_structure(self.get_peb_address(), win32.PEB) + + def get_peb_address(self): + """ + Returns a remote pointer to the PEB. + + @rtype: int + @return: Remote pointer to the L{win32.PEB} structure. + Returns C{None} on error. + """ + try: + return self._peb_ptr + except AttributeError: + hProcess = self.get_handle(win32.PROCESS_QUERY_INFORMATION) + pbi = win32.NtQueryInformationProcess(hProcess, + win32.ProcessBasicInformation) + address = pbi.PebBaseAddress + self._peb_ptr = address + return address + + def get_entry_point(self): + """ + Alias to C{process.get_main_module().get_entry_point()}. + + @rtype: int + @return: Address of the entry point of the main module. + """ + return self.get_main_module().get_entry_point() + + def get_main_module(self): + """ + @rtype: L{Module} + @return: Module object for the process main module. + """ + return self.get_module(self.get_image_base()) + + def get_image_base(self): + """ + @rtype: int + @return: Image base address for the process main module. + """ + return self.get_peb().ImageBaseAddress + + def get_image_name(self): + """ + @rtype: int + @return: Filename of the process main module. + + This method does it's best to retrieve the filename. + However sometimes this is not possible, so C{None} may + be returned instead. + """ + + # Method 1: Module.fileName + # It's cached if the filename was already found by the other methods, + # if it came with the corresponding debug event, or it was found by the + # toolhelp API. + mainModule = None + try: + mainModule = self.get_main_module() + name = mainModule.fileName + if not name: + name = None + except (KeyError, AttributeError, WindowsError): +## traceback.print_exc() # XXX DEBUG + name = None + + # Method 2: QueryFullProcessImageName() + # Not implemented until Windows Vista. + if not name: + try: + hProcess = self.get_handle( + win32.PROCESS_QUERY_LIMITED_INFORMATION) + name = win32.QueryFullProcessImageName(hProcess) + except (AttributeError, WindowsError): +## traceback.print_exc() # XXX DEBUG + name = None + + # Method 3: GetProcessImageFileName() + # + # Not implemented until Windows XP. + # For more info see: + # https://voidnish.wordpress.com/2005/06/20/getprocessimagefilenamequerydosdevice-trivia/ + if not name: + try: + hProcess = self.get_handle(win32.PROCESS_QUERY_INFORMATION) + name = win32.GetProcessImageFileName(hProcess) + if name: + name = PathOperations.native_to_win32_pathname(name) + else: + name = None + except (AttributeError, WindowsError): +## traceback.print_exc() # XXX DEBUG + if not name: + name = None + + # Method 4: GetModuleFileNameEx() + # Not implemented until Windows 2000. + # + # May be spoofed by malware, since this information resides + # in usermode space (see http://www.ragestorm.net/blogs/?p=163). + if not name: + try: + hProcess = self.get_handle( win32.PROCESS_VM_READ | + win32.PROCESS_QUERY_INFORMATION ) + try: + name = win32.GetModuleFileNameEx(hProcess) + except WindowsError: +## traceback.print_exc() # XXX DEBUG + name = win32.GetModuleFileNameEx( + hProcess, self.get_image_base()) + if name: + name = PathOperations.native_to_win32_pathname(name) + else: + name = None + except (AttributeError, WindowsError): +## traceback.print_exc() # XXX DEBUG + if not name: + name = None + + # Method 5: PEB.ProcessParameters->ImagePathName + # + # May fail since it's using an undocumented internal structure. + # + # May be spoofed by malware, since this information resides + # in usermode space (see http://www.ragestorm.net/blogs/?p=163). + if not name: + try: + peb = self.get_peb() + pp = self.read_structure(peb.ProcessParameters, + win32.RTL_USER_PROCESS_PARAMETERS) + s = pp.ImagePathName + name = self.peek_string(s.Buffer, + dwMaxSize=s.MaximumLength, fUnicode=True) + if name: + name = PathOperations.native_to_win32_pathname(name) + else: + name = None + except (AttributeError, WindowsError): +## traceback.print_exc() # XXX DEBUG + name = None + + # Method 6: Module.get_filename() + # It tries to get the filename from the file handle. + # + # There are currently some problems due to the strange way the API + # works - it returns the pathname without the drive letter, and I + # couldn't figure out a way to fix it. + if not name and mainModule is not None: + try: + name = mainModule.get_filename() + if not name: + name = None + except (AttributeError, WindowsError): +## traceback.print_exc() # XXX DEBUG + name = None + + # Remember the filename. + if name and mainModule is not None: + mainModule.fileName = name + + # Return the image filename, or None on error. + return name + + def get_command_line_block(self): + """ + Retrieves the command line block memory address and size. + + @rtype: tuple(int, int) + @return: Tuple with the memory address of the command line block + and it's maximum size in Unicode characters. + + @raise WindowsError: On error an exception is raised. + """ + peb = self.get_peb() + pp = self.read_structure(peb.ProcessParameters, + win32.RTL_USER_PROCESS_PARAMETERS) + s = pp.CommandLine + return (s.Buffer, s.MaximumLength) + + def get_environment_block(self): + """ + Retrieves the environment block memory address for the process. + + @note: The size is always enough to contain the environment data, but + it may not be an exact size. It's best to read the memory and + scan for two null wide chars to find the actual size. + + @rtype: tuple(int, int) + @return: Tuple with the memory address of the environment block + and it's size. + + @raise WindowsError: On error an exception is raised. + """ + peb = self.get_peb() + pp = self.read_structure(peb.ProcessParameters, + win32.RTL_USER_PROCESS_PARAMETERS) + Environment = pp.Environment + try: + EnvironmentSize = pp.EnvironmentSize + except AttributeError: + mbi = self.mquery(Environment) + EnvironmentSize = mbi.RegionSize + mbi.BaseAddress - Environment + return (Environment, EnvironmentSize) + + def get_command_line(self): + """ + Retrieves the command line with wich the program was started. + + @rtype: str + @return: Command line string. + + @raise WindowsError: On error an exception is raised. + """ + (Buffer, MaximumLength) = self.get_command_line_block() + CommandLine = self.peek_string(Buffer, dwMaxSize=MaximumLength, + fUnicode=True) + gst = win32.GuessStringType + if gst.t_default == gst.t_ansi: + CommandLine = CommandLine.encode('cp1252') + return CommandLine + + def get_environment_variables(self): + """ + Retrieves the environment variables with wich the program is running. + + @rtype: list of tuple(compat.unicode, compat.unicode) + @return: Environment keys and values as found in the process memory. + + @raise WindowsError: On error an exception is raised. + """ + + # Note: the first bytes are garbage and must be skipped. Then the first + # two environment entries are the current drive and directory as key + # and value pairs, followed by the ExitCode variable (it's what batch + # files know as "errorlevel"). After that, the real environment vars + # are there in alphabetical order. In theory that's where it stops, + # but I've always seen one more "variable" tucked at the end which + # may be another environment block but in ANSI. I haven't examined it + # yet, I'm just skipping it because if it's parsed as Unicode it just + # renders garbage. + + # Read the environment block contents. + data = self.peek( *self.get_environment_block() ) + + # Put them into a Unicode buffer. + tmp = ctypes.create_string_buffer(data) + buffer = ctypes.create_unicode_buffer(len(data)) + ctypes.memmove(buffer, tmp, len(data)) + del tmp + + # Skip until the first Unicode null char is found. + pos = 0 + while buffer[pos] != u'\0': + pos += 1 + pos += 1 + + # Loop for each environment variable... + environment = [] + while buffer[pos] != u'\0': + + # Until we find a null char... + env_name_pos = pos + env_name = u'' + found_name = False + while buffer[pos] != u'\0': + + # Get the current char. + char = buffer[pos] + + # Is it an equal sign? + if char == u'=': + + # Skip leading equal signs. + if env_name_pos == pos: + env_name_pos += 1 + pos += 1 + continue + + # Otherwise we found the separator equal sign. + pos += 1 + found_name = True + break + + # Add the char to the variable name. + env_name += char + + # Next char. + pos += 1 + + # If the name was not parsed properly, stop. + if not found_name: + break + + # Read the variable value until we find a null char. + env_value = u'' + while buffer[pos] != u'\0': + env_value += buffer[pos] + pos += 1 + + # Skip the null char. + pos += 1 + + # Add to the list of environment variables found. + environment.append( (env_name, env_value) ) + + # Remove the last entry, it's garbage. + if environment: + environment.pop() + + # Return the environment variables. + return environment + + def get_environment_data(self, fUnicode = None): + """ + Retrieves the environment block data with wich the program is running. + + @warn: Deprecated since WinAppDbg 1.5. + + @see: L{win32.GuessStringType} + + @type fUnicode: bool or None + @param fUnicode: C{True} to return a list of Unicode strings, C{False} + to return a list of ANSI strings, or C{None} to return whatever + the default is for string types. + + @rtype: list of str + @return: Environment keys and values separated by a (C{=}) character, + as found in the process memory. + + @raise WindowsError: On error an exception is raised. + """ + + # Issue a deprecation warning. + warnings.warn( + "Process.get_environment_data() is deprecated" \ + " since WinAppDbg 1.5.", + DeprecationWarning) + + # Get the environment variables. + block = [ key + u'=' + value for (key, value) \ + in self.get_environment_variables() ] + + # Convert the data to ANSI if requested. + if fUnicode is None: + gst = win32.GuessStringType + fUnicode = gst.t_default == gst.t_unicode + if not fUnicode: + block = [x.encode('cp1252') for x in block] + + # Return the environment data. + return block + + @staticmethod + def parse_environment_data(block): + """ + Parse the environment block into a Python dictionary. + + @warn: Deprecated since WinAppDbg 1.5. + + @note: Values of duplicated keys are joined using null characters. + + @type block: list of str + @param block: List of strings as returned by L{get_environment_data}. + + @rtype: dict(str S{->} str) + @return: Dictionary of environment keys and values. + """ + + # Issue a deprecation warning. + warnings.warn( + "Process.parse_environment_data() is deprecated" \ + " since WinAppDbg 1.5.", + DeprecationWarning) + + # Create an empty environment dictionary. + environment = dict() + + # End here if the environment block is empty. + if not block: + return environment + + # Prepare the tokens (ANSI or Unicode). + gst = win32.GuessStringType + if type(block[0]) == gst.t_ansi: + equals = '=' + terminator = '\0' + else: + equals = u'=' + terminator = u'\0' + + # Split the blocks into key/value pairs. + for chunk in block: + sep = chunk.find(equals, 1) + if sep < 0: +## raise Exception() + continue # corrupted environment block? + key, value = chunk[:sep], chunk[sep+1:] + + # For duplicated keys, append the value. + # Values are separated using null terminators. + if key not in environment: + environment[key] = value + else: + environment[key] += terminator + value + + # Return the environment dictionary. + return environment + + def get_environment(self, fUnicode = None): + """ + Retrieves the environment with wich the program is running. + + @note: Duplicated keys are joined using null characters. + To avoid this behavior, call L{get_environment_variables} instead + and convert the results to a dictionary directly, like this: + C{dict(process.get_environment_variables())} + + @see: L{win32.GuessStringType} + + @type fUnicode: bool or None + @param fUnicode: C{True} to return a list of Unicode strings, C{False} + to return a list of ANSI strings, or C{None} to return whatever + the default is for string types. + + @rtype: dict(str S{->} str) + @return: Dictionary of environment keys and values. + + @raise WindowsError: On error an exception is raised. + """ + + # Get the environment variables. + variables = self.get_environment_variables() + + # Convert the strings to ANSI if requested. + if fUnicode is None: + gst = win32.GuessStringType + fUnicode = gst.t_default == gst.t_unicode + if not fUnicode: + variables = [ ( key.encode('cp1252'), value.encode('cp1252') ) \ + for (key, value) in variables ] + + # Add the variables to a dictionary, concatenating duplicates. + environment = dict() + for key, value in variables: + if key in environment: + environment[key] = environment[key] + u'\0' + value + else: + environment[key] = value + + # Return the dictionary. + return environment + +#------------------------------------------------------------------------------ + + def search(self, pattern, minAddr = None, maxAddr = None): + """ + Search for the given pattern within the process memory. + + @type pattern: str, compat.unicode or L{Pattern} + @param pattern: Pattern to search for. + It may be a byte string, a Unicode string, or an instance of + L{Pattern}. + + The following L{Pattern} subclasses are provided by WinAppDbg: + - L{BytePattern} + - L{TextPattern} + - L{RegExpPattern} + - L{HexPattern} + + You can also write your own subclass of L{Pattern} for customized + searches. + + @type minAddr: int + @param minAddr: (Optional) Start the search at this memory address. + + @type maxAddr: int + @param maxAddr: (Optional) Stop the search at this memory address. + + @rtype: iterator of tuple( int, int, str ) + @return: An iterator of tuples. Each tuple contains the following: + - The memory address where the pattern was found. + - The size of the data that matches the pattern. + - The data that matches the pattern. + + @raise WindowsError: An error occurred when querying or reading the + process memory. + """ + if isinstance(pattern, str): + return self.search_bytes(pattern, minAddr, maxAddr) + if isinstance(pattern, compat.unicode): + return self.search_bytes(pattern.encode("utf-16le"), + minAddr, maxAddr) + if isinstance(pattern, Pattern): + return Search.search_process(self, pattern, minAddr, maxAddr) + raise TypeError("Unknown pattern type: %r" % type(pattern)) + + def search_bytes(self, bytes, minAddr = None, maxAddr = None): + """ + Search for the given byte pattern within the process memory. + + @type bytes: str + @param bytes: Bytes to search for. + + @type minAddr: int + @param minAddr: (Optional) Start the search at this memory address. + + @type maxAddr: int + @param maxAddr: (Optional) Stop the search at this memory address. + + @rtype: iterator of int + @return: An iterator of memory addresses where the pattern was found. + + @raise WindowsError: An error occurred when querying or reading the + process memory. + """ + pattern = BytePattern(bytes) + matches = Search.search_process(self, pattern, minAddr, maxAddr) + for addr, size, data in matches: + yield addr + + def search_text(self, text, encoding = "utf-16le", + caseSensitive = False, + minAddr = None, + maxAddr = None): + """ + Search for the given text within the process memory. + + @type text: str or compat.unicode + @param text: Text to search for. + + @type encoding: str + @param encoding: (Optional) Encoding for the text parameter. + Only used when the text to search for is a Unicode string. + Don't change unless you know what you're doing! + + @type caseSensitive: bool + @param caseSensitive: C{True} of the search is case sensitive, + C{False} otherwise. + + @type minAddr: int + @param minAddr: (Optional) Start the search at this memory address. + + @type maxAddr: int + @param maxAddr: (Optional) Stop the search at this memory address. + + @rtype: iterator of tuple( int, str ) + @return: An iterator of tuples. Each tuple contains the following: + - The memory address where the pattern was found. + - The text that matches the pattern. + + @raise WindowsError: An error occurred when querying or reading the + process memory. + """ + pattern = TextPattern(text, encoding, caseSensitive) + matches = Search.search_process(self, pattern, minAddr, maxAddr) + for addr, size, data in matches: + yield addr, data + + def search_regexp(self, regexp, flags = 0, + minAddr = None, + maxAddr = None, + bufferPages = -1): + """ + Search for the given regular expression within the process memory. + + @type regexp: str + @param regexp: Regular expression string. + + @type flags: int + @param flags: Regular expression flags. + + @type minAddr: int + @param minAddr: (Optional) Start the search at this memory address. + + @type maxAddr: int + @param maxAddr: (Optional) Stop the search at this memory address. + + @type bufferPages: int + @param bufferPages: (Optional) Number of memory pages to buffer when + performing the search. Valid values are: + - C{0} or C{None}: + Automatically determine the required buffer size. May not give + complete results for regular expressions that match variable + sized strings. + - C{> 0}: Set the buffer size, in memory pages. + - C{< 0}: Disable buffering entirely. This may give you a little + speed gain at the cost of an increased memory usage. If the + target process has very large contiguous memory regions it may + actually be slower or even fail. It's also the only way to + guarantee complete results for regular expressions that match + variable sized strings. + + @rtype: iterator of tuple( int, int, str ) + @return: An iterator of tuples. Each tuple contains the following: + - The memory address where the pattern was found. + - The size of the data that matches the pattern. + - The data that matches the pattern. + + @raise WindowsError: An error occurred when querying or reading the + process memory. + """ + pattern = RegExpPattern(regexp, flags) + return Search.search_process(self, pattern, + minAddr, maxAddr, + bufferPages) + + def search_hexa(self, hexa, minAddr = None, maxAddr = None): + """ + Search for the given hexadecimal pattern within the process memory. + + Hex patterns must be in this form:: + "68 65 6c 6c 6f 20 77 6f 72 6c 64" # "hello world" + + Spaces are optional. Capitalization of hex digits doesn't matter. + This is exactly equivalent to the previous example:: + "68656C6C6F20776F726C64" # "hello world" + + Wildcards are allowed, in the form of a C{?} sign in any hex digit:: + "5? 5? c3" # pop register / pop register / ret + "b8 ?? ?? ?? ??" # mov eax, immediate value + + @type hexa: str + @param hexa: Pattern to search for. + + @type minAddr: int + @param minAddr: (Optional) Start the search at this memory address. + + @type maxAddr: int + @param maxAddr: (Optional) Stop the search at this memory address. + + @rtype: iterator of tuple( int, str ) + @return: An iterator of tuples. Each tuple contains the following: + - The memory address where the pattern was found. + - The bytes that match the pattern. + + @raise WindowsError: An error occurred when querying or reading the + process memory. + """ + pattern = HexPattern(hexa) + matches = Search.search_process(self, pattern, minAddr, maxAddr) + for addr, size, data in matches: + yield addr, data + + def strings(self, minSize = 4, maxSize = 1024): + """ + Extract ASCII strings from the process memory. + + @type minSize: int + @param minSize: (Optional) Minimum size of the strings to search for. + + @type maxSize: int + @param maxSize: (Optional) Maximum size of the strings to search for. + + @rtype: iterator of tuple(int, int, str) + @return: Iterator of strings extracted from the process memory. + Each tuple contains the following: + - The memory address where the string was found. + - The size of the string. + - The string. + """ + return Search.extract_ascii_strings(self, minSize = minSize, + maxSize = maxSize) + +#------------------------------------------------------------------------------ + + def __read_c_type(self, address, format, c_type): + size = ctypes.sizeof(c_type) + packed = self.read(address, size) + if len(packed) != size: + raise ctypes.WinError() + return struct.unpack(format, packed)[0] + + def __write_c_type(self, address, format, unpacked): + packed = struct.pack('@L', unpacked) + self.write(address, packed) + + # XXX TODO + # + Maybe change page permissions before trying to read? + def read(self, lpBaseAddress, nSize): + """ + Reads from the memory of the process. + + @see: L{peek} + + @type lpBaseAddress: int + @param lpBaseAddress: Memory address to begin reading. + + @type nSize: int + @param nSize: Number of bytes to read. + + @rtype: str + @return: Bytes read from the process memory. + + @raise WindowsError: On error an exception is raised. + """ + hProcess = self.get_handle( win32.PROCESS_VM_READ | + win32.PROCESS_QUERY_INFORMATION ) + if not self.is_buffer(lpBaseAddress, nSize): + raise ctypes.WinError(win32.ERROR_INVALID_ADDRESS) + data = win32.ReadProcessMemory(hProcess, lpBaseAddress, nSize) + if len(data) != nSize: + raise ctypes.WinError() + return data + + def write(self, lpBaseAddress, lpBuffer): + """ + Writes to the memory of the process. + + @note: Page permissions may be changed temporarily while writing. + + @see: L{poke} + + @type lpBaseAddress: int + @param lpBaseAddress: Memory address to begin writing. + + @type lpBuffer: str + @param lpBuffer: Bytes to write. + + @raise WindowsError: On error an exception is raised. + """ + r = self.poke(lpBaseAddress, lpBuffer) + if r != len(lpBuffer): + raise ctypes.WinError() + + def read_char(self, lpBaseAddress): + """ + Reads a single character to the memory of the process. + + @see: L{peek_char} + + @type lpBaseAddress: int + @param lpBaseAddress: Memory address to begin writing. + + @rtype: int + @return: Character value read from the process memory. + + @raise WindowsError: On error an exception is raised. + """ + return ord( self.read(lpBaseAddress, 1) ) + + def write_char(self, lpBaseAddress, char): + """ + Writes a single character to the memory of the process. + + @note: Page permissions may be changed temporarily while writing. + + @see: L{poke_char} + + @type lpBaseAddress: int + @param lpBaseAddress: Memory address to begin writing. + + @type char: int + @param char: Character to write. + + @raise WindowsError: On error an exception is raised. + """ + self.write(lpBaseAddress, chr(char)) + + def read_int(self, lpBaseAddress): + """ + Reads a signed integer from the memory of the process. + + @see: L{peek_int} + + @type lpBaseAddress: int + @param lpBaseAddress: Memory address to begin reading. + + @rtype: int + @return: Integer value read from the process memory. + + @raise WindowsError: On error an exception is raised. + """ + return self.__read_c_type(lpBaseAddress, compat.b('@l'), ctypes.c_int) + + def write_int(self, lpBaseAddress, unpackedValue): + """ + Writes a signed integer to the memory of the process. + + @note: Page permissions may be changed temporarily while writing. + + @see: L{poke_int} + + @type lpBaseAddress: int + @param lpBaseAddress: Memory address to begin writing. + + @type unpackedValue: int, long + @param unpackedValue: Value to write. + + @raise WindowsError: On error an exception is raised. + """ + self.__write_c_type(lpBaseAddress, '@l', unpackedValue) + + def read_uint(self, lpBaseAddress): + """ + Reads an unsigned integer from the memory of the process. + + @see: L{peek_uint} + + @type lpBaseAddress: int + @param lpBaseAddress: Memory address to begin reading. + + @rtype: int + @return: Integer value read from the process memory. + + @raise WindowsError: On error an exception is raised. + """ + return self.__read_c_type(lpBaseAddress, '@L', ctypes.c_uint) + + def write_uint(self, lpBaseAddress, unpackedValue): + """ + Writes an unsigned integer to the memory of the process. + + @note: Page permissions may be changed temporarily while writing. + + @see: L{poke_uint} + + @type lpBaseAddress: int + @param lpBaseAddress: Memory address to begin writing. + + @type unpackedValue: int, long + @param unpackedValue: Value to write. + + @raise WindowsError: On error an exception is raised. + """ + self.__write_c_type(lpBaseAddress, '@L', unpackedValue) + + def read_float(self, lpBaseAddress): + """ + Reads a float from the memory of the process. + + @see: L{peek_float} + + @type lpBaseAddress: int + @param lpBaseAddress: Memory address to begin reading. + + @rtype: int + @return: Floating point value read from the process memory. + + @raise WindowsError: On error an exception is raised. + """ + return self.__read_c_type(lpBaseAddress, '@f', ctypes.c_float) + + def write_float(self, lpBaseAddress, unpackedValue): + """ + Writes a float to the memory of the process. + + @note: Page permissions may be changed temporarily while writing. + + @see: L{poke_float} + + @type lpBaseAddress: int + @param lpBaseAddress: Memory address to begin writing. + + @type unpackedValue: int, long + @param unpackedValue: Floating point value to write. + + @raise WindowsError: On error an exception is raised. + """ + self.__write_c_type(lpBaseAddress, '@f', unpackedValue) + + def read_double(self, lpBaseAddress): + """ + Reads a double from the memory of the process. + + @see: L{peek_double} + + @type lpBaseAddress: int + @param lpBaseAddress: Memory address to begin reading. + + @rtype: int + @return: Floating point value read from the process memory. + + @raise WindowsError: On error an exception is raised. + """ + return self.__read_c_type(lpBaseAddress, '@d', ctypes.c_double) + + def write_double(self, lpBaseAddress, unpackedValue): + """ + Writes a double to the memory of the process. + + @note: Page permissions may be changed temporarily while writing. + + @see: L{poke_double} + + @type lpBaseAddress: int + @param lpBaseAddress: Memory address to begin writing. + + @type unpackedValue: int, long + @param unpackedValue: Floating point value to write. + + @raise WindowsError: On error an exception is raised. + """ + self.__write_c_type(lpBaseAddress, '@d', unpackedValue) + + def read_pointer(self, lpBaseAddress): + """ + Reads a pointer value from the memory of the process. + + @see: L{peek_pointer} + + @type lpBaseAddress: int + @param lpBaseAddress: Memory address to begin reading. + + @rtype: int + @return: Pointer value read from the process memory. + + @raise WindowsError: On error an exception is raised. + """ + return self.__read_c_type(lpBaseAddress, '@P', ctypes.c_void_p) + + def write_pointer(self, lpBaseAddress, unpackedValue): + """ + Writes a pointer value to the memory of the process. + + @note: Page permissions may be changed temporarily while writing. + + @see: L{poke_pointer} + + @type lpBaseAddress: int + @param lpBaseAddress: Memory address to begin writing. + + @type unpackedValue: int, long + @param unpackedValue: Value to write. + + @raise WindowsError: On error an exception is raised. + """ + self.__write_c_type(lpBaseAddress, '@P', unpackedValue) + + def read_dword(self, lpBaseAddress): + """ + Reads a DWORD from the memory of the process. + + @see: L{peek_dword} + + @type lpBaseAddress: int + @param lpBaseAddress: Memory address to begin reading. + + @rtype: int + @return: Integer value read from the process memory. + + @raise WindowsError: On error an exception is raised. + """ + return self.__read_c_type(lpBaseAddress, '=L', win32.DWORD) + + def write_dword(self, lpBaseAddress, unpackedValue): + """ + Writes a DWORD to the memory of the process. + + @note: Page permissions may be changed temporarily while writing. + + @see: L{poke_dword} + + @type lpBaseAddress: int + @param lpBaseAddress: Memory address to begin writing. + + @type unpackedValue: int, long + @param unpackedValue: Value to write. + + @raise WindowsError: On error an exception is raised. + """ + self.__write_c_type(lpBaseAddress, '=L', unpackedValue) + + def read_qword(self, lpBaseAddress): + """ + Reads a QWORD from the memory of the process. + + @see: L{peek_qword} + + @type lpBaseAddress: int + @param lpBaseAddress: Memory address to begin reading. + + @rtype: int + @return: Integer value read from the process memory. + + @raise WindowsError: On error an exception is raised. + """ + return self.__read_c_type(lpBaseAddress, '=Q', win32.QWORD) + + def write_qword(self, lpBaseAddress, unpackedValue): + """ + Writes a QWORD to the memory of the process. + + @note: Page permissions may be changed temporarily while writing. + + @see: L{poke_qword} + + @type lpBaseAddress: int + @param lpBaseAddress: Memory address to begin writing. + + @type unpackedValue: int, long + @param unpackedValue: Value to write. + + @raise WindowsError: On error an exception is raised. + """ + self.__write_c_type(lpBaseAddress, '=Q', unpackedValue) + + def read_structure(self, lpBaseAddress, stype): + """ + Reads a ctypes structure from the memory of the process. + + @see: L{read} + + @type lpBaseAddress: int + @param lpBaseAddress: Memory address to begin reading. + + @type stype: class ctypes.Structure or a subclass. + @param stype: Structure definition. + + @rtype: int + @return: Structure instance filled in with data + read from the process memory. + + @raise WindowsError: On error an exception is raised. + """ + if type(lpBaseAddress) not in (type(0), type(long(0))): + lpBaseAddress = ctypes.cast(lpBaseAddress, ctypes.c_void_p) + data = self.read(lpBaseAddress, ctypes.sizeof(stype)) + buff = ctypes.create_string_buffer(data) + ptr = ctypes.cast(ctypes.pointer(buff), ctypes.POINTER(stype)) + return ptr.contents + +# XXX TODO +## def write_structure(self, lpBaseAddress, sStructure): +## """ +## Writes a ctypes structure into the memory of the process. +## +## @note: Page permissions may be changed temporarily while writing. +## +## @see: L{write} +## +## @type lpBaseAddress: int +## @param lpBaseAddress: Memory address to begin writing. +## +## @type sStructure: ctypes.Structure or a subclass' instance. +## @param sStructure: Structure definition. +## +## @rtype: int +## @return: Structure instance filled in with data +## read from the process memory. +## +## @raise WindowsError: On error an exception is raised. +## """ +## size = ctypes.sizeof(sStructure) +## data = ctypes.create_string_buffer("", size = size) +## win32.CopyMemory(ctypes.byref(data), ctypes.byref(sStructure), size) +## self.write(lpBaseAddress, data.raw) + + def read_string(self, lpBaseAddress, nChars, fUnicode = False): + """ + Reads an ASCII or Unicode string + from the address space of the process. + + @see: L{peek_string} + + @type lpBaseAddress: int + @param lpBaseAddress: Memory address to begin reading. + + @type nChars: int + @param nChars: String length to read, in characters. + Remember that Unicode strings have two byte characters. + + @type fUnicode: bool + @param fUnicode: C{True} is the string is expected to be Unicode, + C{False} if it's expected to be ANSI. + + @rtype: str, compat.unicode + @return: String read from the process memory space. + + @raise WindowsError: On error an exception is raised. + """ + if fUnicode: + nChars = nChars * 2 + szString = self.read(lpBaseAddress, nChars) + if fUnicode: + szString = compat.unicode(szString, 'U16', 'ignore') + return szString + +#------------------------------------------------------------------------------ + + # FIXME this won't work properly with a different endianness! + def __peek_c_type(self, address, format, c_type): + size = ctypes.sizeof(c_type) + packed = self.peek(address, size) + if len(packed) < size: + packed = '\0' * (size - len(packed)) + packed + elif len(packed) > size: + packed = packed[:size] + return struct.unpack(format, packed)[0] + + def __poke_c_type(self, address, format, unpacked): + packed = struct.pack('@L', unpacked) + return self.poke(address, packed) + + def peek(self, lpBaseAddress, nSize): + """ + Reads the memory of the process. + + @see: L{read} + + @type lpBaseAddress: int + @param lpBaseAddress: Memory address to begin reading. + + @type nSize: int + @param nSize: Number of bytes to read. + + @rtype: str + @return: Bytes read from the process memory. + Returns an empty string on error. + """ + # XXX TODO + # + Maybe change page permissions before trying to read? + # + Maybe use mquery instead of get_memory_map? + # (less syscalls if we break out of the loop earlier) + data = '' + if nSize > 0: + try: + hProcess = self.get_handle( win32.PROCESS_VM_READ | + win32.PROCESS_QUERY_INFORMATION ) + for mbi in self.get_memory_map(lpBaseAddress, + lpBaseAddress + nSize): + if not mbi.is_readable(): + nSize = mbi.BaseAddress - lpBaseAddress + break + if nSize > 0: + data = win32.ReadProcessMemory( + hProcess, lpBaseAddress, nSize) + except WindowsError: + e = sys.exc_info()[1] + msg = "Error reading process %d address %s: %s" + msg %= (self.get_pid(), + HexDump.address(lpBaseAddress), + e.strerror) + warnings.warn(msg) + return data + + def poke(self, lpBaseAddress, lpBuffer): + """ + Writes to the memory of the process. + + @note: Page permissions may be changed temporarily while writing. + + @see: L{write} + + @type lpBaseAddress: int + @param lpBaseAddress: Memory address to begin writing. + + @type lpBuffer: str + @param lpBuffer: Bytes to write. + + @rtype: int + @return: Number of bytes written. + May be less than the number of bytes to write. + """ + assert isinstance(lpBuffer, compat.bytes) + hProcess = self.get_handle( win32.PROCESS_VM_WRITE | + win32.PROCESS_VM_OPERATION | + win32.PROCESS_QUERY_INFORMATION ) + mbi = self.mquery(lpBaseAddress) + if not mbi.has_content(): + raise ctypes.WinError(win32.ERROR_INVALID_ADDRESS) + if mbi.is_image() or mbi.is_mapped(): + prot = win32.PAGE_WRITECOPY + elif mbi.is_writeable(): + prot = None + elif mbi.is_executable(): + prot = win32.PAGE_EXECUTE_READWRITE + else: + prot = win32.PAGE_READWRITE + if prot is not None: + try: + self.mprotect(lpBaseAddress, len(lpBuffer), prot) + except Exception: + prot = None + msg = ("Failed to adjust page permissions" + " for process %s at address %s: %s") + msg = msg % (self.get_pid(), + HexDump.address(lpBaseAddress, self.get_bits()), + traceback.format_exc()) + warnings.warn(msg, RuntimeWarning) + try: + r = win32.WriteProcessMemory(hProcess, lpBaseAddress, lpBuffer) + finally: + if prot is not None: + self.mprotect(lpBaseAddress, len(lpBuffer), mbi.Protect) + return r + + def peek_char(self, lpBaseAddress): + """ + Reads a single character from the memory of the process. + + @see: L{read_char} + + @type lpBaseAddress: int + @param lpBaseAddress: Memory address to begin reading. + + @rtype: int + @return: Character read from the process memory. + Returns zero on error. + """ + char = self.peek(lpBaseAddress, 1) + if char: + return ord(char) + return 0 + + def poke_char(self, lpBaseAddress, char): + """ + Writes a single character to the memory of the process. + + @note: Page permissions may be changed temporarily while writing. + + @see: L{write_char} + + @type lpBaseAddress: int + @param lpBaseAddress: Memory address to begin writing. + + @type char: str + @param char: Character to write. + + @rtype: int + @return: Number of bytes written. + May be less than the number of bytes to write. + """ + return self.poke(lpBaseAddress, chr(char)) + + def peek_int(self, lpBaseAddress): + """ + Reads a signed integer from the memory of the process. + + @see: L{read_int} + + @type lpBaseAddress: int + @param lpBaseAddress: Memory address to begin reading. + + @rtype: int + @return: Integer value read from the process memory. + Returns zero on error. + """ + return self.__peek_c_type(lpBaseAddress, '@l', ctypes.c_int) + + def poke_int(self, lpBaseAddress, unpackedValue): + """ + Writes a signed integer to the memory of the process. + + @note: Page permissions may be changed temporarily while writing. + + @see: L{write_int} + + @type lpBaseAddress: int + @param lpBaseAddress: Memory address to begin writing. + + @type unpackedValue: int, long + @param unpackedValue: Value to write. + + @rtype: int + @return: Number of bytes written. + May be less than the number of bytes to write. + """ + return self.__poke_c_type(lpBaseAddress, '@l', unpackedValue) + + def peek_uint(self, lpBaseAddress): + """ + Reads an unsigned integer from the memory of the process. + + @see: L{read_uint} + + @type lpBaseAddress: int + @param lpBaseAddress: Memory address to begin reading. + + @rtype: int + @return: Integer value read from the process memory. + Returns zero on error. + """ + return self.__peek_c_type(lpBaseAddress, '@L', ctypes.c_uint) + + def poke_uint(self, lpBaseAddress, unpackedValue): + """ + Writes an unsigned integer to the memory of the process. + + @note: Page permissions may be changed temporarily while writing. + + @see: L{write_uint} + + @type lpBaseAddress: int + @param lpBaseAddress: Memory address to begin writing. + + @type unpackedValue: int, long + @param unpackedValue: Value to write. + + @rtype: int + @return: Number of bytes written. + May be less than the number of bytes to write. + """ + return self.__poke_c_type(lpBaseAddress, '@L', unpackedValue) + + def peek_float(self, lpBaseAddress): + """ + Reads a float from the memory of the process. + + @see: L{read_float} + + @type lpBaseAddress: int + @param lpBaseAddress: Memory address to begin reading. + + @rtype: int + @return: Integer value read from the process memory. + Returns zero on error. + """ + return self.__peek_c_type(lpBaseAddress, '@f', ctypes.c_float) + + def poke_float(self, lpBaseAddress, unpackedValue): + """ + Writes a float to the memory of the process. + + @note: Page permissions may be changed temporarily while writing. + + @see: L{write_float} + + @type lpBaseAddress: int + @param lpBaseAddress: Memory address to begin writing. + + @type unpackedValue: int, long + @param unpackedValue: Value to write. + + @rtype: int + @return: Number of bytes written. + May be less than the number of bytes to write. + """ + return self.__poke_c_type(lpBaseAddress, '@f', unpackedValue) + + def peek_double(self, lpBaseAddress): + """ + Reads a double from the memory of the process. + + @see: L{read_double} + + @type lpBaseAddress: int + @param lpBaseAddress: Memory address to begin reading. + + @rtype: int + @return: Integer value read from the process memory. + Returns zero on error. + """ + return self.__peek_c_type(lpBaseAddress, '@d', ctypes.c_double) + + def poke_double(self, lpBaseAddress, unpackedValue): + """ + Writes a double to the memory of the process. + + @note: Page permissions may be changed temporarily while writing. + + @see: L{write_double} + + @type lpBaseAddress: int + @param lpBaseAddress: Memory address to begin writing. + + @type unpackedValue: int, long + @param unpackedValue: Value to write. + + @rtype: int + @return: Number of bytes written. + May be less than the number of bytes to write. + """ + return self.__poke_c_type(lpBaseAddress, '@d', unpackedValue) + + def peek_dword(self, lpBaseAddress): + """ + Reads a DWORD from the memory of the process. + + @see: L{read_dword} + + @type lpBaseAddress: int + @param lpBaseAddress: Memory address to begin reading. + + @rtype: int + @return: Integer value read from the process memory. + Returns zero on error. + """ + return self.__peek_c_type(lpBaseAddress, '=L', win32.DWORD) + + def poke_dword(self, lpBaseAddress, unpackedValue): + """ + Writes a DWORD to the memory of the process. + + @note: Page permissions may be changed temporarily while writing. + + @see: L{write_dword} + + @type lpBaseAddress: int + @param lpBaseAddress: Memory address to begin writing. + + @type unpackedValue: int, long + @param unpackedValue: Value to write. + + @rtype: int + @return: Number of bytes written. + May be less than the number of bytes to write. + """ + return self.__poke_c_type(lpBaseAddress, '=L', unpackedValue) + + def peek_qword(self, lpBaseAddress): + """ + Reads a QWORD from the memory of the process. + + @see: L{read_qword} + + @type lpBaseAddress: int + @param lpBaseAddress: Memory address to begin reading. + + @rtype: int + @return: Integer value read from the process memory. + Returns zero on error. + """ + return self.__peek_c_type(lpBaseAddress, '=Q', win32.QWORD) + + def poke_qword(self, lpBaseAddress, unpackedValue): + """ + Writes a QWORD to the memory of the process. + + @note: Page permissions may be changed temporarily while writing. + + @see: L{write_qword} + + @type lpBaseAddress: int + @param lpBaseAddress: Memory address to begin writing. + + @type unpackedValue: int, long + @param unpackedValue: Value to write. + + @rtype: int + @return: Number of bytes written. + May be less than the number of bytes to write. + """ + return self.__poke_c_type(lpBaseAddress, '=Q', unpackedValue) + + def peek_pointer(self, lpBaseAddress): + """ + Reads a pointer value from the memory of the process. + + @see: L{read_pointer} + + @type lpBaseAddress: int + @param lpBaseAddress: Memory address to begin reading. + + @rtype: int + @return: Pointer value read from the process memory. + Returns zero on error. + """ + return self.__peek_c_type(lpBaseAddress, '@P', ctypes.c_void_p) + + def poke_pointer(self, lpBaseAddress, unpackedValue): + """ + Writes a pointer value to the memory of the process. + + @note: Page permissions may be changed temporarily while writing. + + @see: L{write_pointer} + + @type lpBaseAddress: int + @param lpBaseAddress: Memory address to begin writing. + + @type unpackedValue: int, long + @param unpackedValue: Value to write. + + @rtype: int + @return: Number of bytes written. + May be less than the number of bytes to write. + """ + return self.__poke_c_type(lpBaseAddress, '@P', unpackedValue) + + def peek_string(self, lpBaseAddress, fUnicode = False, dwMaxSize = 0x1000): + """ + Tries to read an ASCII or Unicode string + from the address space of the process. + + @see: L{read_string} + + @type lpBaseAddress: int + @param lpBaseAddress: Memory address to begin reading. + + @type fUnicode: bool + @param fUnicode: C{True} is the string is expected to be Unicode, + C{False} if it's expected to be ANSI. + + @type dwMaxSize: int + @param dwMaxSize: Maximum allowed string length to read, in bytes. + + @rtype: str, compat.unicode + @return: String read from the process memory space. + It B{doesn't} include the terminating null character. + Returns an empty string on failure. + """ + + # Validate the parameters. + if not lpBaseAddress or dwMaxSize == 0: + if fUnicode: + return u'' + return '' + if not dwMaxSize: + dwMaxSize = 0x1000 + + # Read the string. + szString = self.peek(lpBaseAddress, dwMaxSize) + + # If the string is Unicode... + if fUnicode: + + # Decode the string. + szString = compat.unicode(szString, 'U16', 'replace') +## try: +## szString = compat.unicode(szString, 'U16') +## except UnicodeDecodeError: +## szString = struct.unpack('H' * (len(szString) / 2), szString) +## szString = [ unichr(c) for c in szString ] +## szString = u''.join(szString) + + # Truncate the string when the first null char is found. + szString = szString[ : szString.find(u'\0') ] + + # If the string is ANSI... + else: + + # Truncate the string when the first null char is found. + szString = szString[ : szString.find('\0') ] + + # Return the decoded string. + return szString + + # TODO + # try to avoid reading the same page twice by caching it + def peek_pointers_in_data(self, data, peekSize = 16, peekStep = 1): + """ + Tries to guess which values in the given data are valid pointers, + and reads some data from them. + + @see: L{peek} + + @type data: str + @param data: Binary data to find pointers in. + + @type peekSize: int + @param peekSize: Number of bytes to read from each pointer found. + + @type peekStep: int + @param peekStep: Expected data alignment. + Tipically you specify 1 when data alignment is unknown, + or 4 when you expect data to be DWORD aligned. + Any other value may be specified. + + @rtype: dict( str S{->} str ) + @return: Dictionary mapping stack offsets to the data they point to. + """ + result = dict() + ptrSize = win32.sizeof(win32.LPVOID) + if ptrSize == 4: + ptrFmt = ' 0: + for i in compat.xrange(0, len(data), peekStep): + packed = data[i:i+ptrSize] + if len(packed) == ptrSize: + address = struct.unpack(ptrFmt, packed)[0] +## if not address & (~0xFFFF): continue + peek_data = self.peek(address, peekSize) + if peek_data: + result[i] = peek_data + return result + +#------------------------------------------------------------------------------ + + def malloc(self, dwSize, lpAddress = None): + """ + Allocates memory into the address space of the process. + + @see: L{free} + + @type dwSize: int + @param dwSize: Number of bytes to allocate. + + @type lpAddress: int + @param lpAddress: (Optional) + Desired address for the newly allocated memory. + This is only a hint, the memory could still be allocated somewhere + else. + + @rtype: int + @return: Address of the newly allocated memory. + + @raise WindowsError: On error an exception is raised. + """ + hProcess = self.get_handle(win32.PROCESS_VM_OPERATION) + return win32.VirtualAllocEx(hProcess, lpAddress, dwSize) + + def mprotect(self, lpAddress, dwSize, flNewProtect): + """ + Set memory protection in the address space of the process. + + @see: U{http://msdn.microsoft.com/en-us/library/aa366899.aspx} + + @type lpAddress: int + @param lpAddress: Address of memory to protect. + + @type dwSize: int + @param dwSize: Number of bytes to protect. + + @type flNewProtect: int + @param flNewProtect: New protect flags. + + @rtype: int + @return: Old protect flags. + + @raise WindowsError: On error an exception is raised. + """ + hProcess = self.get_handle(win32.PROCESS_VM_OPERATION) + return win32.VirtualProtectEx(hProcess, lpAddress, dwSize, flNewProtect) + + def mquery(self, lpAddress): + """ + Query memory information from the address space of the process. + Returns a L{win32.MemoryBasicInformation} object. + + @see: U{http://msdn.microsoft.com/en-us/library/aa366907(VS.85).aspx} + + @type lpAddress: int + @param lpAddress: Address of memory to query. + + @rtype: L{win32.MemoryBasicInformation} + @return: Memory region information. + + @raise WindowsError: On error an exception is raised. + """ + hProcess = self.get_handle(win32.PROCESS_QUERY_INFORMATION) + return win32.VirtualQueryEx(hProcess, lpAddress) + + def free(self, lpAddress): + """ + Frees memory from the address space of the process. + + @see: U{http://msdn.microsoft.com/en-us/library/aa366894(v=vs.85).aspx} + + @type lpAddress: int + @param lpAddress: Address of memory to free. + Must be the base address returned by L{malloc}. + + @raise WindowsError: On error an exception is raised. + """ + hProcess = self.get_handle(win32.PROCESS_VM_OPERATION) + win32.VirtualFreeEx(hProcess, lpAddress) + +#------------------------------------------------------------------------------ + + def is_pointer(self, address): + """ + Determines if an address is a valid code or data pointer. + + That is, the address must be valid and must point to code or data in + the target process. + + @type address: int + @param address: Memory address to query. + + @rtype: bool + @return: C{True} if the address is a valid code or data pointer. + + @raise WindowsError: An exception is raised on error. + """ + try: + mbi = self.mquery(address) + except WindowsError: + e = sys.exc_info()[1] + if e.winerror == win32.ERROR_INVALID_PARAMETER: + return False + raise + return mbi.has_content() + + def is_address_valid(self, address): + """ + Determines if an address is a valid user mode address. + + @type address: int + @param address: Memory address to query. + + @rtype: bool + @return: C{True} if the address is a valid user mode address. + + @raise WindowsError: An exception is raised on error. + """ + try: + mbi = self.mquery(address) + except WindowsError: + e = sys.exc_info()[1] + if e.winerror == win32.ERROR_INVALID_PARAMETER: + return False + raise + return True + + def is_address_free(self, address): + """ + Determines if an address belongs to a free page. + + @note: Returns always C{False} for kernel mode addresses. + + @type address: int + @param address: Memory address to query. + + @rtype: bool + @return: C{True} if the address belongs to a free page. + + @raise WindowsError: An exception is raised on error. + """ + try: + mbi = self.mquery(address) + except WindowsError: + e = sys.exc_info()[1] + if e.winerror == win32.ERROR_INVALID_PARAMETER: + return False + raise + return mbi.is_free() + + def is_address_reserved(self, address): + """ + Determines if an address belongs to a reserved page. + + @note: Returns always C{False} for kernel mode addresses. + + @type address: int + @param address: Memory address to query. + + @rtype: bool + @return: C{True} if the address belongs to a reserved page. + + @raise WindowsError: An exception is raised on error. + """ + try: + mbi = self.mquery(address) + except WindowsError: + e = sys.exc_info()[1] + if e.winerror == win32.ERROR_INVALID_PARAMETER: + return False + raise + return mbi.is_reserved() + + def is_address_commited(self, address): + """ + Determines if an address belongs to a commited page. + + @note: Returns always C{False} for kernel mode addresses. + + @type address: int + @param address: Memory address to query. + + @rtype: bool + @return: C{True} if the address belongs to a commited page. + + @raise WindowsError: An exception is raised on error. + """ + try: + mbi = self.mquery(address) + except WindowsError: + e = sys.exc_info()[1] + if e.winerror == win32.ERROR_INVALID_PARAMETER: + return False + raise + return mbi.is_commited() + + def is_address_guard(self, address): + """ + Determines if an address belongs to a guard page. + + @note: Returns always C{False} for kernel mode addresses. + + @type address: int + @param address: Memory address to query. + + @rtype: bool + @return: C{True} if the address belongs to a guard page. + + @raise WindowsError: An exception is raised on error. + """ + try: + mbi = self.mquery(address) + except WindowsError: + e = sys.exc_info()[1] + if e.winerror == win32.ERROR_INVALID_PARAMETER: + return False + raise + return mbi.is_guard() + + def is_address_readable(self, address): + """ + Determines if an address belongs to a commited and readable page. + The page may or may not have additional permissions. + + @note: Returns always C{False} for kernel mode addresses. + + @type address: int + @param address: Memory address to query. + + @rtype: bool + @return: + C{True} if the address belongs to a commited and readable page. + + @raise WindowsError: An exception is raised on error. + """ + try: + mbi = self.mquery(address) + except WindowsError: + e = sys.exc_info()[1] + if e.winerror == win32.ERROR_INVALID_PARAMETER: + return False + raise + return mbi.is_readable() + + def is_address_writeable(self, address): + """ + Determines if an address belongs to a commited and writeable page. + The page may or may not have additional permissions. + + @note: Returns always C{False} for kernel mode addresses. + + @type address: int + @param address: Memory address to query. + + @rtype: bool + @return: + C{True} if the address belongs to a commited and writeable page. + + @raise WindowsError: An exception is raised on error. + """ + try: + mbi = self.mquery(address) + except WindowsError: + e = sys.exc_info()[1] + if e.winerror == win32.ERROR_INVALID_PARAMETER: + return False + raise + return mbi.is_writeable() + + def is_address_copy_on_write(self, address): + """ + Determines if an address belongs to a commited, copy-on-write page. + The page may or may not have additional permissions. + + @note: Returns always C{False} for kernel mode addresses. + + @type address: int + @param address: Memory address to query. + + @rtype: bool + @return: + C{True} if the address belongs to a commited, copy-on-write page. + + @raise WindowsError: An exception is raised on error. + """ + try: + mbi = self.mquery(address) + except WindowsError: + e = sys.exc_info()[1] + if e.winerror == win32.ERROR_INVALID_PARAMETER: + return False + raise + return mbi.is_copy_on_write() + + def is_address_executable(self, address): + """ + Determines if an address belongs to a commited and executable page. + The page may or may not have additional permissions. + + @note: Returns always C{False} for kernel mode addresses. + + @type address: int + @param address: Memory address to query. + + @rtype: bool + @return: + C{True} if the address belongs to a commited and executable page. + + @raise WindowsError: An exception is raised on error. + """ + try: + mbi = self.mquery(address) + except WindowsError: + e = sys.exc_info()[1] + if e.winerror == win32.ERROR_INVALID_PARAMETER: + return False + raise + return mbi.is_executable() + + def is_address_executable_and_writeable(self, address): + """ + Determines if an address belongs to a commited, writeable and + executable page. The page may or may not have additional permissions. + + Looking for writeable and executable pages is important when + exploiting a software vulnerability. + + @note: Returns always C{False} for kernel mode addresses. + + @type address: int + @param address: Memory address to query. + + @rtype: bool + @return: + C{True} if the address belongs to a commited, writeable and + executable page. + + @raise WindowsError: An exception is raised on error. + """ + try: + mbi = self.mquery(address) + except WindowsError: + e = sys.exc_info()[1] + if e.winerror == win32.ERROR_INVALID_PARAMETER: + return False + raise + return mbi.is_executable_and_writeable() + + def is_buffer(self, address, size): + """ + Determines if the given memory area is a valid code or data buffer. + + @note: Returns always C{False} for kernel mode addresses. + + @see: L{mquery} + + @type address: int + @param address: Memory address. + + @type size: int + @param size: Number of bytes. Must be greater than zero. + + @rtype: bool + @return: C{True} if the memory area is a valid code or data buffer, + C{False} otherwise. + + @raise ValueError: The size argument must be greater than zero. + @raise WindowsError: On error an exception is raised. + """ + if size <= 0: + raise ValueError("The size argument must be greater than zero") + while size > 0: + try: + mbi = self.mquery(address) + except WindowsError: + e = sys.exc_info()[1] + if e.winerror == win32.ERROR_INVALID_PARAMETER: + return False + raise + if not mbi.has_content(): + return False + size = size - mbi.RegionSize + return True + + def is_buffer_readable(self, address, size): + """ + Determines if the given memory area is readable. + + @note: Returns always C{False} for kernel mode addresses. + + @see: L{mquery} + + @type address: int + @param address: Memory address. + + @type size: int + @param size: Number of bytes. Must be greater than zero. + + @rtype: bool + @return: C{True} if the memory area is readable, C{False} otherwise. + + @raise ValueError: The size argument must be greater than zero. + @raise WindowsError: On error an exception is raised. + """ + if size <= 0: + raise ValueError("The size argument must be greater than zero") + while size > 0: + try: + mbi = self.mquery(address) + except WindowsError: + e = sys.exc_info()[1] + if e.winerror == win32.ERROR_INVALID_PARAMETER: + return False + raise + if not mbi.is_readable(): + return False + size = size - mbi.RegionSize + return True + + def is_buffer_writeable(self, address, size): + """ + Determines if the given memory area is writeable. + + @note: Returns always C{False} for kernel mode addresses. + + @see: L{mquery} + + @type address: int + @param address: Memory address. + + @type size: int + @param size: Number of bytes. Must be greater than zero. + + @rtype: bool + @return: C{True} if the memory area is writeable, C{False} otherwise. + + @raise ValueError: The size argument must be greater than zero. + @raise WindowsError: On error an exception is raised. + """ + if size <= 0: + raise ValueError("The size argument must be greater than zero") + while size > 0: + try: + mbi = self.mquery(address) + except WindowsError: + e = sys.exc_info()[1] + if e.winerror == win32.ERROR_INVALID_PARAMETER: + return False + raise + if not mbi.is_writeable(): + return False + size = size - mbi.RegionSize + return True + + def is_buffer_copy_on_write(self, address, size): + """ + Determines if the given memory area is marked as copy-on-write. + + @note: Returns always C{False} for kernel mode addresses. + + @see: L{mquery} + + @type address: int + @param address: Memory address. + + @type size: int + @param size: Number of bytes. Must be greater than zero. + + @rtype: bool + @return: C{True} if the memory area is marked as copy-on-write, + C{False} otherwise. + + @raise ValueError: The size argument must be greater than zero. + @raise WindowsError: On error an exception is raised. + """ + if size <= 0: + raise ValueError("The size argument must be greater than zero") + while size > 0: + try: + mbi = self.mquery(address) + except WindowsError: + e = sys.exc_info()[1] + if e.winerror == win32.ERROR_INVALID_PARAMETER: + return False + raise + if not mbi.is_copy_on_write(): + return False + size = size - mbi.RegionSize + return True + + def is_buffer_executable(self, address, size): + """ + Determines if the given memory area is executable. + + @note: Returns always C{False} for kernel mode addresses. + + @see: L{mquery} + + @type address: int + @param address: Memory address. + + @type size: int + @param size: Number of bytes. Must be greater than zero. + + @rtype: bool + @return: C{True} if the memory area is executable, C{False} otherwise. + + @raise ValueError: The size argument must be greater than zero. + @raise WindowsError: On error an exception is raised. + """ + if size <= 0: + raise ValueError("The size argument must be greater than zero") + while size > 0: + try: + mbi = self.mquery(address) + except WindowsError: + e = sys.exc_info()[1] + if e.winerror == win32.ERROR_INVALID_PARAMETER: + return False + raise + if not mbi.is_executable(): + return False + size = size - mbi.RegionSize + return True + + def is_buffer_executable_and_writeable(self, address, size): + """ + Determines if the given memory area is writeable and executable. + + Looking for writeable and executable pages is important when + exploiting a software vulnerability. + + @note: Returns always C{False} for kernel mode addresses. + + @see: L{mquery} + + @type address: int + @param address: Memory address. + + @type size: int + @param size: Number of bytes. Must be greater than zero. + + @rtype: bool + @return: C{True} if the memory area is writeable and executable, + C{False} otherwise. + + @raise ValueError: The size argument must be greater than zero. + @raise WindowsError: On error an exception is raised. + """ + if size <= 0: + raise ValueError("The size argument must be greater than zero") + while size > 0: + try: + mbi = self.mquery(address) + except WindowsError: + e = sys.exc_info()[1] + if e.winerror == win32.ERROR_INVALID_PARAMETER: + return False + raise + if not mbi.is_executable(): + return False + size = size - mbi.RegionSize + return True + + def get_memory_map(self, minAddr = None, maxAddr = None): + """ + Produces a memory map to the process address space. + + Optionally restrict the map to the given address range. + + @see: L{mquery} + + @type minAddr: int + @param minAddr: (Optional) Starting address in address range to query. + + @type maxAddr: int + @param maxAddr: (Optional) Ending address in address range to query. + + @rtype: list( L{win32.MemoryBasicInformation} ) + @return: List of memory region information objects. + """ + return list(self.iter_memory_map(minAddr, maxAddr)) + + def generate_memory_map(self, minAddr = None, maxAddr = None): + """ + Returns a L{Regenerator} that can iterate indefinitely over the memory + map to the process address space. + + Optionally restrict the map to the given address range. + + @see: L{mquery} + + @type minAddr: int + @param minAddr: (Optional) Starting address in address range to query. + + @type maxAddr: int + @param maxAddr: (Optional) Ending address in address range to query. + + @rtype: L{Regenerator} of L{win32.MemoryBasicInformation} + @return: List of memory region information objects. + """ + return Regenerator(self.iter_memory_map, minAddr, maxAddr) + + def iter_memory_map(self, minAddr = None, maxAddr = None): + """ + Produces an iterator over the memory map to the process address space. + + Optionally restrict the map to the given address range. + + @see: L{mquery} + + @type minAddr: int + @param minAddr: (Optional) Starting address in address range to query. + + @type maxAddr: int + @param maxAddr: (Optional) Ending address in address range to query. + + @rtype: iterator of L{win32.MemoryBasicInformation} + @return: List of memory region information objects. + """ + minAddr, maxAddr = MemoryAddresses.align_address_range(minAddr,maxAddr) + prevAddr = minAddr - 1 + currentAddr = minAddr + while prevAddr < currentAddr < maxAddr: + try: + mbi = self.mquery(currentAddr) + except WindowsError: + e = sys.exc_info()[1] + if e.winerror == win32.ERROR_INVALID_PARAMETER: + break + raise + yield mbi + prevAddr = currentAddr + currentAddr = mbi.BaseAddress + mbi.RegionSize + + def get_mapped_filenames(self, memoryMap = None): + """ + Retrieves the filenames for memory mapped files in the debugee. + + @type memoryMap: list( L{win32.MemoryBasicInformation} ) + @param memoryMap: (Optional) Memory map returned by L{get_memory_map}. + If not given, the current memory map is used. + + @rtype: dict( int S{->} str ) + @return: Dictionary mapping memory addresses to file names. + Native filenames are converted to Win32 filenames when possible. + """ + hProcess = self.get_handle( win32.PROCESS_VM_READ | + win32.PROCESS_QUERY_INFORMATION ) + if not memoryMap: + memoryMap = self.get_memory_map() + mappedFilenames = dict() + for mbi in memoryMap: + if mbi.Type not in (win32.MEM_IMAGE, win32.MEM_MAPPED): + continue + baseAddress = mbi.BaseAddress + fileName = "" + try: + fileName = win32.GetMappedFileName(hProcess, baseAddress) + fileName = PathOperations.native_to_win32_pathname(fileName) + except WindowsError: + #e = sys.exc_info()[1] + #try: + # msg = "Can't get mapped file name at address %s in process " \ + # "%d, reason: %s" % (HexDump.address(baseAddress), + # self.get_pid(), + # e.strerror) + # warnings.warn(msg, Warning) + #except Exception: + pass + mappedFilenames[baseAddress] = fileName + return mappedFilenames + + def generate_memory_snapshot(self, minAddr = None, maxAddr = None): + """ + Returns a L{Regenerator} that allows you to iterate through the memory + contents of a process indefinitely. + + It's basically the same as the L{take_memory_snapshot} method, but it + takes the snapshot of each memory region as it goes, as opposed to + taking the whole snapshot at once. This allows you to work with very + large snapshots without a significant performance penalty. + + Example:: + # Print the memory contents of a process. + process.suspend() + try: + snapshot = process.generate_memory_snapshot() + for mbi in snapshot: + print HexDump.hexblock(mbi.content, mbi.BaseAddress) + finally: + process.resume() + + The downside of this is the process must remain suspended while + iterating the snapshot, otherwise strange things may happen. + + The snapshot can be iterated more than once. Each time it's iterated + the memory contents of the process will be fetched again. + + You can also iterate the memory of a dead process, just as long as the + last open handle to it hasn't been closed. + + @see: L{take_memory_snapshot} + + @type minAddr: int + @param minAddr: (Optional) Starting address in address range to query. + + @type maxAddr: int + @param maxAddr: (Optional) Ending address in address range to query. + + @rtype: L{Regenerator} of L{win32.MemoryBasicInformation} + @return: Generator that when iterated returns memory region information + objects. Two extra properties are added to these objects: + - C{filename}: Mapped filename, or C{None}. + - C{content}: Memory contents, or C{None}. + """ + return Regenerator(self.iter_memory_snapshot, minAddr, maxAddr) + + def iter_memory_snapshot(self, minAddr = None, maxAddr = None): + """ + Returns an iterator that allows you to go through the memory contents + of a process. + + It's basically the same as the L{take_memory_snapshot} method, but it + takes the snapshot of each memory region as it goes, as opposed to + taking the whole snapshot at once. This allows you to work with very + large snapshots without a significant performance penalty. + + Example:: + # Print the memory contents of a process. + process.suspend() + try: + snapshot = process.generate_memory_snapshot() + for mbi in snapshot: + print HexDump.hexblock(mbi.content, mbi.BaseAddress) + finally: + process.resume() + + The downside of this is the process must remain suspended while + iterating the snapshot, otherwise strange things may happen. + + The snapshot can only iterated once. To be able to iterate indefinitely + call the L{generate_memory_snapshot} method instead. + + You can also iterate the memory of a dead process, just as long as the + last open handle to it hasn't been closed. + + @see: L{take_memory_snapshot} + + @type minAddr: int + @param minAddr: (Optional) Starting address in address range to query. + + @type maxAddr: int + @param maxAddr: (Optional) Ending address in address range to query. + + @rtype: iterator of L{win32.MemoryBasicInformation} + @return: Iterator of memory region information objects. + Two extra properties are added to these objects: + - C{filename}: Mapped filename, or C{None}. + - C{content}: Memory contents, or C{None}. + """ + + # One may feel tempted to include calls to self.suspend() and + # self.resume() here, but that wouldn't work on a dead process. + # It also wouldn't be needed when debugging since the process is + # already suspended when the debug event arrives. So it's up to + # the user to suspend the process if needed. + + # Get the memory map. + memory = self.get_memory_map(minAddr, maxAddr) + + # Abort if the map couldn't be retrieved. + if not memory: + return + + # Get the mapped filenames. + # Don't fail on access denied errors. + try: + filenames = self.get_mapped_filenames(memory) + except WindowsError: + e = sys.exc_info()[1] + if e.winerror != win32.ERROR_ACCESS_DENIED: + raise + filenames = dict() + + # Trim the first memory information block if needed. + if minAddr is not None: + minAddr = MemoryAddresses.align_address_to_page_start(minAddr) + mbi = memory[0] + if mbi.BaseAddress < minAddr: + mbi.RegionSize = mbi.BaseAddress + mbi.RegionSize - minAddr + mbi.BaseAddress = minAddr + + # Trim the last memory information block if needed. + if maxAddr is not None: + if maxAddr != MemoryAddresses.align_address_to_page_start(maxAddr): + maxAddr = MemoryAddresses.align_address_to_page_end(maxAddr) + mbi = memory[-1] + if mbi.BaseAddress + mbi.RegionSize > maxAddr: + mbi.RegionSize = maxAddr - mbi.BaseAddress + + # Read the contents of each block and yield it. + while memory: + mbi = memory.pop(0) # so the garbage collector can take it + mbi.filename = filenames.get(mbi.BaseAddress, None) + if mbi.has_content(): + mbi.content = self.read(mbi.BaseAddress, mbi.RegionSize) + else: + mbi.content = None + yield mbi + + def take_memory_snapshot(self, minAddr = None, maxAddr = None): + """ + Takes a snapshot of the memory contents of the process. + + It's best if the process is suspended (if alive) when taking the + snapshot. Execution can be resumed afterwards. + + Example:: + # Print the memory contents of a process. + process.suspend() + try: + snapshot = process.take_memory_snapshot() + for mbi in snapshot: + print HexDump.hexblock(mbi.content, mbi.BaseAddress) + finally: + process.resume() + + You can also iterate the memory of a dead process, just as long as the + last open handle to it hasn't been closed. + + @warning: If the target process has a very big memory footprint, the + resulting snapshot will be equally big. This may result in a severe + performance penalty. + + @see: L{generate_memory_snapshot} + + @type minAddr: int + @param minAddr: (Optional) Starting address in address range to query. + + @type maxAddr: int + @param maxAddr: (Optional) Ending address in address range to query. + + @rtype: list( L{win32.MemoryBasicInformation} ) + @return: List of memory region information objects. + Two extra properties are added to these objects: + - C{filename}: Mapped filename, or C{None}. + - C{content}: Memory contents, or C{None}. + """ + return list( self.iter_memory_snapshot(minAddr, maxAddr) ) + + def restore_memory_snapshot(self, snapshot, + bSkipMappedFiles = True, + bSkipOnError = False): + """ + Attempts to restore the memory state as it was when the given snapshot + was taken. + + @warning: Currently only the memory contents, state and protect bits + are restored. Under some circumstances this method may fail (for + example if memory was freed and then reused by a mapped file). + + @type snapshot: list( L{win32.MemoryBasicInformation} ) + @param snapshot: Memory snapshot returned by L{take_memory_snapshot}. + Snapshots returned by L{generate_memory_snapshot} don't work here. + + @type bSkipMappedFiles: bool + @param bSkipMappedFiles: C{True} to avoid restoring the contents of + memory mapped files, C{False} otherwise. Use with care! Setting + this to C{False} can cause undesired side effects - changes to + memory mapped files may be written to disk by the OS. Also note + that most mapped files are typically executables and don't change, + so trying to restore their contents is usually a waste of time. + + @type bSkipOnError: bool + @param bSkipOnError: C{True} to issue a warning when an error occurs + during the restoration of the snapshot, C{False} to stop and raise + an exception instead. Use with care! Setting this to C{True} will + cause the debugger to falsely believe the memory snapshot has been + correctly restored. + + @raise WindowsError: An error occured while restoring the snapshot. + @raise RuntimeError: An error occured while restoring the snapshot. + @raise TypeError: A snapshot of the wrong type was passed. + """ + if not snapshot or not isinstance(snapshot, list) \ + or not isinstance(snapshot[0], win32.MemoryBasicInformation): + raise TypeError( "Only snapshots returned by " \ + "take_memory_snapshot() can be used here." ) + + # Get the process handle. + hProcess = self.get_handle( win32.PROCESS_VM_WRITE | + win32.PROCESS_VM_OPERATION | + win32.PROCESS_SUSPEND_RESUME | + win32.PROCESS_QUERY_INFORMATION ) + + # Freeze the process. + self.suspend() + try: + + # For each memory region in the snapshot... + for old_mbi in snapshot: + + # If the region matches, restore it directly. + new_mbi = self.mquery(old_mbi.BaseAddress) + if new_mbi.BaseAddress == old_mbi.BaseAddress and \ + new_mbi.RegionSize == old_mbi.RegionSize: + self.__restore_mbi(hProcess, new_mbi, old_mbi, + bSkipMappedFiles) + + # If the region doesn't match, restore it page by page. + else: + + # We need a copy so we don't corrupt the snapshot. + old_mbi = win32.MemoryBasicInformation(old_mbi) + + # Get the overlapping range of pages. + old_start = old_mbi.BaseAddress + old_end = old_start + old_mbi.RegionSize + new_start = new_mbi.BaseAddress + new_end = new_start + new_mbi.RegionSize + if old_start > new_start: + start = old_start + else: + start = new_start + if old_end < new_end: + end = old_end + else: + end = new_end + + # Restore each page in the overlapping range. + step = MemoryAddresses.pageSize + old_mbi.RegionSize = step + new_mbi.RegionSize = step + address = start + while address < end: + old_mbi.BaseAddress = address + new_mbi.BaseAddress = address + self.__restore_mbi(hProcess, new_mbi, old_mbi, + bSkipMappedFiles, bSkipOnError) + address = address + step + + # Resume execution. + finally: + self.resume() + + def __restore_mbi(self, hProcess, new_mbi, old_mbi, bSkipMappedFiles, + bSkipOnError): + """ + Used internally by L{restore_memory_snapshot}. + """ + +## print "Restoring %s-%s" % ( +## HexDump.address(old_mbi.BaseAddress, self.get_bits()), +## HexDump.address(old_mbi.BaseAddress + old_mbi.RegionSize, +## self.get_bits())) + + try: + + # Restore the region state. + if new_mbi.State != old_mbi.State: + if new_mbi.is_free(): + if old_mbi.is_reserved(): + + # Free -> Reserved + address = win32.VirtualAllocEx(hProcess, + old_mbi.BaseAddress, + old_mbi.RegionSize, + win32.MEM_RESERVE, + old_mbi.Protect) + if address != old_mbi.BaseAddress: + self.free(address) + msg = "Error restoring region at address %s" + msg = msg % HexDump(old_mbi.BaseAddress, + self.get_bits()) + raise RuntimeError(msg) + # permissions already restored + new_mbi.Protect = old_mbi.Protect + + else: # elif old_mbi.is_commited(): + + # Free -> Commited + address = win32.VirtualAllocEx(hProcess, + old_mbi.BaseAddress, + old_mbi.RegionSize, + win32.MEM_RESERVE | \ + win32.MEM_COMMIT, + old_mbi.Protect) + if address != old_mbi.BaseAddress: + self.free(address) + msg = "Error restoring region at address %s" + msg = msg % HexDump(old_mbi.BaseAddress, + self.get_bits()) + raise RuntimeError(msg) + # permissions already restored + new_mbi.Protect = old_mbi.Protect + + elif new_mbi.is_reserved(): + if old_mbi.is_commited(): + + # Reserved -> Commited + address = win32.VirtualAllocEx(hProcess, + old_mbi.BaseAddress, + old_mbi.RegionSize, + win32.MEM_COMMIT, + old_mbi.Protect) + if address != old_mbi.BaseAddress: + self.free(address) + msg = "Error restoring region at address %s" + msg = msg % HexDump(old_mbi.BaseAddress, + self.get_bits()) + raise RuntimeError(msg) + # permissions already restored + new_mbi.Protect = old_mbi.Protect + + else: # elif old_mbi.is_free(): + + # Reserved -> Free + win32.VirtualFreeEx(hProcess, + old_mbi.BaseAddress, + old_mbi.RegionSize, + win32.MEM_RELEASE) + + else: # elif new_mbi.is_commited(): + if old_mbi.is_reserved(): + + # Commited -> Reserved + win32.VirtualFreeEx(hProcess, + old_mbi.BaseAddress, + old_mbi.RegionSize, + win32.MEM_DECOMMIT) + + else: # elif old_mbi.is_free(): + + # Commited -> Free + win32.VirtualFreeEx(hProcess, + old_mbi.BaseAddress, + old_mbi.RegionSize, + win32.MEM_DECOMMIT | win32.MEM_RELEASE) + + new_mbi.State = old_mbi.State + + # Restore the region permissions. + if old_mbi.is_commited() and old_mbi.Protect != new_mbi.Protect: + win32.VirtualProtectEx(hProcess, old_mbi.BaseAddress, + old_mbi.RegionSize, old_mbi.Protect) + new_mbi.Protect = old_mbi.Protect + + # Restore the region data. + # Ignore write errors when the region belongs to a mapped file. + if old_mbi.has_content(): + if old_mbi.Type != 0: + if not bSkipMappedFiles: + self.poke(old_mbi.BaseAddress, old_mbi.content) + else: + self.write(old_mbi.BaseAddress, old_mbi.content) + new_mbi.content = old_mbi.content + + # On error, skip this region or raise an exception. + except Exception: + if not bSkipOnError: + raise + msg = "Error restoring region at address %s: %s" + msg = msg % ( + HexDump(old_mbi.BaseAddress, self.get_bits()), + traceback.format_exc()) + warnings.warn(msg, RuntimeWarning) + +#------------------------------------------------------------------------------ + + def inject_code(self, payload, lpParameter = 0): + """ + Injects relocatable code into the process memory and executes it. + + @warning: Don't forget to free the memory when you're done with it! + Otherwise you'll be leaking memory in the target process. + + @see: L{inject_dll} + + @type payload: str + @param payload: Relocatable code to run in a new thread. + + @type lpParameter: int + @param lpParameter: (Optional) Parameter to be pushed in the stack. + + @rtype: tuple( L{Thread}, int ) + @return: The injected Thread object + and the memory address where the code was written. + + @raise WindowsError: An exception is raised on error. + """ + + # Uncomment for debugging... +## payload = '\xCC' + payload + + # Allocate the memory for the shellcode. + lpStartAddress = self.malloc(len(payload)) + + # Catch exceptions so we can free the memory on error. + try: + + # Write the shellcode to our memory location. + self.write(lpStartAddress, payload) + + # Start a new thread for the shellcode to run. + aThread = self.start_thread(lpStartAddress, lpParameter, + bSuspended = False) + + # Remember the shellcode address. + # It will be freed ONLY by the Thread.kill() method + # and the EventHandler class, otherwise you'll have to + # free it in your code, or have your shellcode clean up + # after itself (recommended). + aThread.pInjectedMemory = lpStartAddress + + # Free the memory on error. + except Exception: + self.free(lpStartAddress) + raise + + # Return the Thread object and the shellcode address. + return aThread, lpStartAddress + + # TODO + # The shellcode should check for errors, otherwise it just crashes + # when the DLL can't be loaded or the procedure can't be found. + # On error the shellcode should execute an int3 instruction. + def inject_dll(self, dllname, procname = None, lpParameter = 0, + bWait = True, dwTimeout = None): + """ + Injects a DLL into the process memory. + + @warning: Setting C{bWait} to C{True} when the process is frozen by a + debug event will cause a deadlock in your debugger. + + @warning: This involves allocating memory in the target process. + This is how the freeing of this memory is handled: + + - If the C{bWait} flag is set to C{True} the memory will be freed + automatically before returning from this method. + - If the C{bWait} flag is set to C{False}, the memory address is + set as the L{Thread.pInjectedMemory} property of the returned + thread object. + - L{Debug} objects free L{Thread.pInjectedMemory} automatically + both when it detaches from a process and when the injected + thread finishes its execution. + - The {Thread.kill} method also frees L{Thread.pInjectedMemory} + automatically, even if you're not attached to the process. + + You could still be leaking memory if not careful. For example, if + you inject a dll into a process you're not attached to, you don't + wait for the thread's completion and you don't kill it either, the + memory would be leaked. + + @see: L{inject_code} + + @type dllname: str + @param dllname: Name of the DLL module to load. + + @type procname: str + @param procname: (Optional) Procedure to call when the DLL is loaded. + + @type lpParameter: int + @param lpParameter: (Optional) Parameter to the C{procname} procedure. + + @type bWait: bool + @param bWait: C{True} to wait for the process to finish. + C{False} to return immediately. + + @type dwTimeout: int + @param dwTimeout: (Optional) Timeout value in milliseconds. + Ignored if C{bWait} is C{False}. + + @rtype: L{Thread} + @return: Newly created thread object. If C{bWait} is set to C{True} the + thread will be dead, otherwise it will be alive. + + @raise NotImplementedError: The target platform is not supported. + Currently calling a procedure in the library is only supported in + the I{i386} architecture. + + @raise WindowsError: An exception is raised on error. + """ + + # Resolve kernel32.dll + aModule = self.get_module_by_name(compat.b('kernel32.dll')) + if aModule is None: + self.scan_modules() + aModule = self.get_module_by_name(compat.b('kernel32.dll')) + if aModule is None: + raise RuntimeError( + "Cannot resolve kernel32.dll in the remote process") + + # Old method, using shellcode. + if procname: + if self.get_arch() != win32.ARCH_I386: + raise NotImplementedError() + dllname = compat.b(dllname) + + # Resolve kernel32.dll!LoadLibraryA + pllib = aModule.resolve(compat.b('LoadLibraryA')) + if not pllib: + raise RuntimeError( + "Cannot resolve kernel32.dll!LoadLibraryA" + " in the remote process") + + # Resolve kernel32.dll!GetProcAddress + pgpad = aModule.resolve(compat.b('GetProcAddress')) + if not pgpad: + raise RuntimeError( + "Cannot resolve kernel32.dll!GetProcAddress" + " in the remote process") + + # Resolve kernel32.dll!VirtualFree + pvf = aModule.resolve(compat.b('VirtualFree')) + if not pvf: + raise RuntimeError( + "Cannot resolve kernel32.dll!VirtualFree" + " in the remote process") + + # Shellcode follows... + code = compat.b('') + + # push dllname + code += compat.b('\xe8') + struct.pack('= 2 and bAllowElevation: + pi = win32.CreateProcess(None, lpCmdLine, + bInheritHandles = bInheritHandles, + dwCreationFlags = dwCreationFlags, + lpStartupInfo = lpStartupInfo) + + # Create the process the hard way... + else: + + # If we allow elevation, use the current process token. + # If not, get the token from the current shell process. + hToken = None + try: + if not bAllowElevation: + if bFollow: + msg = ( + "Child processes can't be autofollowed" + " when dropping UAC elevation.") + raise NotImplementedError(msg) + if bConsole: + msg = ( + "Child processes can't inherit the debugger's" + " console when dropping UAC elevation.") + raise NotImplementedError(msg) + if bInheritHandles: + msg = ( + "Child processes can't inherit the debugger's" + " handles when dropping UAC elevation.") + raise NotImplementedError(msg) + try: + hWnd = self.get_shell_window() + except WindowsError: + hWnd = self.get_desktop_window() + shell = hWnd.get_process() + try: + hShell = shell.get_handle( + win32.PROCESS_QUERY_INFORMATION) + with win32.OpenProcessToken(hShell) as hShellToken: + hToken = win32.DuplicateTokenEx(hShellToken) + finally: + shell.close_handle() + + # Lower trust level if requested. + if iTrustLevel < 2: + if iTrustLevel > 0: + dwLevelId = win32.SAFER_LEVELID_NORMALUSER + else: + dwLevelId = win32.SAFER_LEVELID_UNTRUSTED + with win32.SaferCreateLevel(dwLevelId = dwLevelId) as hSafer: + hSaferToken = win32.SaferComputeTokenFromLevel( + hSafer, hToken)[0] + try: + if hToken is not None: + hToken.close() + except: + hSaferToken.close() + raise + hToken = hSaferToken + + # If we have a computed token, call CreateProcessAsUser(). + if bAllowElevation: + pi = win32.CreateProcessAsUser( + hToken = hToken, + lpCommandLine = lpCmdLine, + bInheritHandles = bInheritHandles, + dwCreationFlags = dwCreationFlags, + lpStartupInfo = lpStartupInfo) + + # If we have a primary token call CreateProcessWithToken(). + # The problem is, there are many flags CreateProcess() and + # CreateProcessAsUser() accept but CreateProcessWithToken() + # and CreateProcessWithLogonW() don't, so we need to work + # around them. + else: + + # Remove the debug flags. + dwCreationFlags &= ~win32.DEBUG_PROCESS + dwCreationFlags &= ~win32.DEBUG_ONLY_THIS_PROCESS + + # Remove the console flags. + dwCreationFlags &= ~win32.DETACHED_PROCESS + + # The process will be created suspended. + dwCreationFlags |= win32.CREATE_SUSPENDED + + # Create the process using the new primary token. + pi = win32.CreateProcessWithToken( + hToken = hToken, + dwLogonFlags = win32.LOGON_WITH_PROFILE, + lpCommandLine = lpCmdLine, + dwCreationFlags = dwCreationFlags, + lpStartupInfo = lpStartupInfo) + + # Attach as a debugger, if requested. + if bDebug: + win32.DebugActiveProcess(pi.dwProcessId) + + # Resume execution, if requested. + if not bSuspended: + win32.ResumeThread(pi.hThread) + + # Close the token when we're done with it. + finally: + if hToken is not None: + hToken.close() + + # Wrap the new process and thread in Process and Thread objects, + # and add them to the corresponding snapshots. + aProcess = Process(pi.dwProcessId, pi.hProcess) + aThread = Thread (pi.dwThreadId, pi.hThread) + aProcess._add_thread(aThread) + self._add_process(aProcess) + + # Clean up on error. + except: + if pi is not None: + try: + win32.TerminateProcess(pi.hProcess) + except WindowsError: + pass + pi.hThread.close() + pi.hProcess.close() + raise + + # Return the new Process object. + return aProcess + + def get_explorer_pid(self): + """ + Tries to find the process ID for "explorer.exe". + + @rtype: int or None + @return: Returns the process ID, or C{None} on error. + """ + try: + exp = win32.SHGetFolderPath(win32.CSIDL_WINDOWS) + except Exception: + exp = None + if not exp: + exp = os.getenv('SystemRoot') + if exp: + exp = os.path.join(exp, 'explorer.exe') + exp_list = self.find_processes_by_filename(exp) + if exp_list: + return exp_list[0][0].get_pid() + return None + +#------------------------------------------------------------------------------ + + # XXX this methods musn't end up calling __initialize_snapshot by accident! + + def scan(self): + """ + Populates the snapshot with running processes and threads, + and loaded modules. + + Tipically this is the first method called after instantiating a + L{System} object, as it makes a best effort approach to gathering + information on running processes. + + @rtype: bool + @return: C{True} if the snapshot is complete, C{False} if the debugger + doesn't have permission to scan some processes. In either case, the + snapshot is complete for all processes the debugger has access to. + """ + has_threads = True + try: + try: + + # Try using the Toolhelp API + # to scan for processes and threads. + self.scan_processes_and_threads() + + except Exception: + + # On error, try using the PSAPI to scan for process IDs only. + self.scan_processes_fast() + + # Now try using the Toolhelp again to get the threads. + for aProcess in self.__processDict.values(): + if aProcess._get_thread_ids(): + try: + aProcess.scan_threads() + except WindowsError: + has_threads = False + + finally: + + # Try using the Remote Desktop API to scan for processes only. + # This will update the filenames when it's not possible + # to obtain them from the Toolhelp API. + self.scan_processes() + + # When finished scanning for processes, try modules too. + has_modules = self.scan_modules() + + # Try updating the process filenames when possible. + has_full_names = self.scan_process_filenames() + + # Return the completion status. + return has_threads and has_modules and has_full_names + + def scan_processes_and_threads(self): + """ + Populates the snapshot with running processes and threads. + + Tipically you don't need to call this method directly, if unsure use + L{scan} instead. + + @note: This method uses the Toolhelp API. + + @see: L{scan_modules} + + @raise WindowsError: An error occured while updating the snapshot. + The snapshot was not modified. + """ + + # The main module filename may be spoofed by malware, + # since this information resides in usermode space. + # See: http://www.ragestorm.net/blogs/?p=163 + + our_pid = win32.GetCurrentProcessId() + dead_pids = set( compat.iterkeys(self.__processDict) ) + found_tids = set() + + # Ignore our own process if it's in the snapshot for some reason + if our_pid in dead_pids: + dead_pids.remove(our_pid) + + # Take a snapshot of all processes and threads + dwFlags = win32.TH32CS_SNAPPROCESS | win32.TH32CS_SNAPTHREAD + with win32.CreateToolhelp32Snapshot(dwFlags) as hSnapshot: + + # Add all the processes (excluding our own) + pe = win32.Process32First(hSnapshot) + while pe is not None: + dwProcessId = pe.th32ProcessID + if dwProcessId != our_pid: + if dwProcessId in dead_pids: + dead_pids.remove(dwProcessId) + if dwProcessId not in self.__processDict: + aProcess = Process(dwProcessId, fileName=pe.szExeFile) + self._add_process(aProcess) + elif pe.szExeFile: + aProcess = self.get_process(dwProcessId) + if not aProcess.fileName: + aProcess.fileName = pe.szExeFile + pe = win32.Process32Next(hSnapshot) + + # Add all the threads + te = win32.Thread32First(hSnapshot) + while te is not None: + dwProcessId = te.th32OwnerProcessID + if dwProcessId != our_pid: + if dwProcessId in dead_pids: + dead_pids.remove(dwProcessId) + if dwProcessId in self.__processDict: + aProcess = self.get_process(dwProcessId) + else: + aProcess = Process(dwProcessId) + self._add_process(aProcess) + dwThreadId = te.th32ThreadID + found_tids.add(dwThreadId) + if not aProcess._has_thread_id(dwThreadId): + aThread = Thread(dwThreadId, process = aProcess) + aProcess._add_thread(aThread) + te = win32.Thread32Next(hSnapshot) + + # Remove dead processes + for pid in dead_pids: + self._del_process(pid) + + # Remove dead threads + for aProcess in compat.itervalues(self.__processDict): + dead_tids = set( aProcess._get_thread_ids() ) + dead_tids.difference_update(found_tids) + for tid in dead_tids: + aProcess._del_thread(tid) + + def scan_modules(self): + """ + Populates the snapshot with loaded modules. + + Tipically you don't need to call this method directly, if unsure use + L{scan} instead. + + @note: This method uses the Toolhelp API. + + @see: L{scan_processes_and_threads} + + @rtype: bool + @return: C{True} if the snapshot is complete, C{False} if the debugger + doesn't have permission to scan some processes. In either case, the + snapshot is complete for all processes the debugger has access to. + """ + complete = True + for aProcess in compat.itervalues(self.__processDict): + try: + aProcess.scan_modules() + except WindowsError: + complete = False + return complete + + def scan_processes(self): + """ + Populates the snapshot with running processes. + + Tipically you don't need to call this method directly, if unsure use + L{scan} instead. + + @note: This method uses the Remote Desktop API instead of the Toolhelp + API. It might give slightly different results, especially if the + current process does not have full privileges. + + @note: This method will only retrieve process filenames. To get the + process pathnames instead, B{after} this method call + L{scan_process_filenames}. + + @raise WindowsError: An error occured while updating the snapshot. + The snapshot was not modified. + """ + + # Get the previous list of PIDs. + # We'll be removing live PIDs from it as we find them. + our_pid = win32.GetCurrentProcessId() + dead_pids = set( compat.iterkeys(self.__processDict) ) + + # Ignore our own PID. + if our_pid in dead_pids: + dead_pids.remove(our_pid) + + # Get the list of processes from the Remote Desktop API. + pProcessInfo = None + try: + pProcessInfo, dwCount = win32.WTSEnumerateProcesses( + win32.WTS_CURRENT_SERVER_HANDLE) + + # For each process found... + for index in compat.xrange(dwCount): + sProcessInfo = pProcessInfo[index] + +## # Ignore processes belonging to other sessions. +## if sProcessInfo.SessionId != win32.WTS_CURRENT_SESSION: +## continue + + # Ignore our own PID. + pid = sProcessInfo.ProcessId + if pid == our_pid: + continue + + # Remove the PID from the dead PIDs list. + if pid in dead_pids: + dead_pids.remove(pid) + + # Get the "process name". + # Empirically, this seems to be the filename without the path. + # (The MSDN docs aren't very clear about this API call). + fileName = sProcessInfo.pProcessName + + # If the process is new, add a new Process object. + if pid not in self.__processDict: + aProcess = Process(pid, fileName = fileName) + self._add_process(aProcess) + + # If the process was already in the snapshot, and the + # filename is missing, update the Process object. + elif fileName: + aProcess = self.__processDict.get(pid) + if not aProcess.fileName: + aProcess.fileName = fileName + + # Free the memory allocated by the Remote Desktop API. + finally: + if pProcessInfo is not None: + try: + win32.WTSFreeMemory(pProcessInfo) + except WindowsError: + pass + + # At this point the only remaining PIDs from the old list are dead. + # Remove them from the snapshot. + for pid in dead_pids: + self._del_process(pid) + + def scan_processes_fast(self): + """ + Populates the snapshot with running processes. + Only the PID is retrieved for each process. + + Dead processes are removed. + Threads and modules of living processes are ignored. + + Tipically you don't need to call this method directly, if unsure use + L{scan} instead. + + @note: This method uses the PSAPI. It may be faster for scanning, + but some information may be missing, outdated or slower to obtain. + This could be a good tradeoff under some circumstances. + """ + + # Get the new and old list of pids + new_pids = set( win32.EnumProcesses() ) + old_pids = set( compat.iterkeys(self.__processDict) ) + + # Ignore our own pid + our_pid = win32.GetCurrentProcessId() + if our_pid in new_pids: + new_pids.remove(our_pid) + if our_pid in old_pids: + old_pids.remove(our_pid) + + # Add newly found pids + for pid in new_pids.difference(old_pids): + self._add_process( Process(pid) ) + + # Remove missing pids + for pid in old_pids.difference(new_pids): + self._del_process(pid) + + def scan_process_filenames(self): + """ + Update the filename for each process in the snapshot when possible. + + @note: Tipically you don't need to call this method. It's called + automatically by L{scan} to get the full pathname for each process + when possible, since some scan methods only get filenames without + the path component. + + If unsure, use L{scan} instead. + + @see: L{scan}, L{Process.get_filename} + + @rtype: bool + @return: C{True} if all the pathnames were retrieved, C{False} if the + debugger doesn't have permission to scan some processes. In either + case, all processes the debugger has access to have a full pathname + instead of just a filename. + """ + complete = True + for aProcess in self.__processDict.values(): + try: + new_name = None + old_name = aProcess.fileName + try: + aProcess.fileName = None + new_name = aProcess.get_filename() + finally: + if not new_name: + aProcess.fileName = old_name + complete = False + except Exception: + complete = False + return complete + +#------------------------------------------------------------------------------ + + def clear_dead_processes(self): + """ + Removes Process objects from the snapshot + referring to processes no longer running. + """ + for pid in self.get_process_ids(): + aProcess = self.get_process(pid) + if not aProcess.is_alive(): + self._del_process(aProcess) + + def clear_unattached_processes(self): + """ + Removes Process objects from the snapshot + referring to processes not being debugged. + """ + for pid in self.get_process_ids(): + aProcess = self.get_process(pid) + if not aProcess.is_being_debugged(): + self._del_process(aProcess) + + def close_process_handles(self): + """ + Closes all open handles to processes in this snapshot. + """ + for pid in self.get_process_ids(): + aProcess = self.get_process(pid) + try: + aProcess.close_handle() + except Exception: + e = sys.exc_info()[1] + try: + msg = "Cannot close process handle %s, reason: %s" + msg %= (aProcess.hProcess.value, str(e)) + warnings.warn(msg) + except Exception: + pass + + def close_process_and_thread_handles(self): + """ + Closes all open handles to processes and threads in this snapshot. + """ + for aProcess in self.iter_processes(): + aProcess.close_thread_handles() + try: + aProcess.close_handle() + except Exception: + e = sys.exc_info()[1] + try: + msg = "Cannot close process handle %s, reason: %s" + msg %= (aProcess.hProcess.value, str(e)) + warnings.warn(msg) + except Exception: + pass + + def clear_processes(self): + """ + Removes all L{Process}, L{Thread} and L{Module} objects in this snapshot. + """ + #self.close_process_and_thread_handles() + for aProcess in self.iter_processes(): + aProcess.clear() + self.__processDict = dict() + + def clear(self): + """ + Clears this snapshot. + + @see: L{clear_processes} + """ + self.clear_processes() + +#------------------------------------------------------------------------------ + + # Docs for these methods are taken from the _ThreadContainer class. + + def has_thread(self, dwThreadId): + dwProcessId = self.get_pid_from_tid(dwThreadId) + if dwProcessId is None: + return False + return self.has_process(dwProcessId) + + def get_thread(self, dwThreadId): + dwProcessId = self.get_pid_from_tid(dwThreadId) + if dwProcessId is None: + msg = "Unknown thread ID %d" % dwThreadId + raise KeyError(msg) + return self.get_process(dwProcessId).get_thread(dwThreadId) + + def get_thread_ids(self): + ids = list() + for aProcess in self.iter_processes(): + ids += aProcess.get_thread_ids() + return ids + + def get_thread_count(self): + count = 0 + for aProcess in self.iter_processes(): + count += aProcess.get_thread_count() + return count + + has_thread.__doc__ = _ThreadContainer.has_thread.__doc__ + get_thread.__doc__ = _ThreadContainer.get_thread.__doc__ + get_thread_ids.__doc__ = _ThreadContainer.get_thread_ids.__doc__ + get_thread_count.__doc__ = _ThreadContainer.get_thread_count.__doc__ + +#------------------------------------------------------------------------------ + + # Docs for these methods are taken from the _ModuleContainer class. + + def get_module_count(self): + count = 0 + for aProcess in self.iter_processes(): + count += aProcess.get_module_count() + return count + + get_module_count.__doc__ = _ModuleContainer.get_module_count.__doc__ + +#------------------------------------------------------------------------------ + + def find_modules_by_base(self, lpBaseOfDll): + """ + @rtype: list( L{Module}... ) + @return: List of Module objects with the given base address. + """ + found = list() + for aProcess in self.iter_processes(): + if aProcess.has_module(lpBaseOfDll): + aModule = aProcess.get_module(lpBaseOfDll) + found.append( (aProcess, aModule) ) + return found + + def find_modules_by_name(self, fileName): + """ + @rtype: list( L{Module}... ) + @return: List of Module objects found. + """ + found = list() + for aProcess in self.iter_processes(): + aModule = aProcess.get_module_by_name(fileName) + if aModule is not None: + found.append( (aProcess, aModule) ) + return found + + def find_modules_by_address(self, address): + """ + @rtype: list( L{Module}... ) + @return: List of Module objects that best match the given address. + """ + found = list() + for aProcess in self.iter_processes(): + aModule = aProcess.get_module_at_address(address) + if aModule is not None: + found.append( (aProcess, aModule) ) + return found + + def __find_processes_by_filename(self, filename): + """ + Internally used by L{find_processes_by_filename}. + """ + found = list() + filename = filename.lower() + if PathOperations.path_is_absolute(filename): + for aProcess in self.iter_processes(): + imagename = aProcess.get_filename() + if imagename and imagename.lower() == filename: + found.append( (aProcess, imagename) ) + else: + for aProcess in self.iter_processes(): + imagename = aProcess.get_filename() + if imagename: + imagename = PathOperations.pathname_to_filename(imagename) + if imagename.lower() == filename: + found.append( (aProcess, imagename) ) + return found + + def find_processes_by_filename(self, fileName): + """ + @type fileName: str + @param fileName: Filename to search for. + If it's a full pathname, the match must be exact. + If it's a base filename only, the file part is matched, + regardless of the directory where it's located. + + @note: If the process is not found and the file extension is not + given, this method will search again assuming a default + extension (.exe). + + @rtype: list of tuple( L{Process}, str ) + @return: List of processes matching the given main module filename. + Each tuple contains a Process object and it's filename. + """ + found = self.__find_processes_by_filename(fileName) + if not found: + fn, ext = PathOperations.split_extension(fileName) + if not ext: + fileName = '%s.exe' % fn + found = self.__find_processes_by_filename(fileName) + return found + +#------------------------------------------------------------------------------ + + # XXX _notify_* methods should not trigger a scan + + def _add_process(self, aProcess): + """ + Private method to add a process object to the snapshot. + + @type aProcess: L{Process} + @param aProcess: Process object. + """ +## if not isinstance(aProcess, Process): +## if hasattr(aProcess, '__class__'): +## typename = aProcess.__class__.__name__ +## else: +## typename = str(type(aProcess)) +## msg = "Expected Process, got %s instead" % typename +## raise TypeError(msg) + dwProcessId = aProcess.dwProcessId +## if dwProcessId in self.__processDict: +## msg = "Process already exists: %d" % dwProcessId +## raise KeyError(msg) + self.__processDict[dwProcessId] = aProcess + + def _del_process(self, dwProcessId): + """ + Private method to remove a process object from the snapshot. + + @type dwProcessId: int + @param dwProcessId: Global process ID. + """ + try: + aProcess = self.__processDict[dwProcessId] + del self.__processDict[dwProcessId] + except KeyError: + aProcess = None + msg = "Unknown process ID %d" % dwProcessId + warnings.warn(msg, RuntimeWarning) + if aProcess: + aProcess.clear() # remove circular references + + # Notify the creation of a new process. + def _notify_create_process(self, event): + """ + Notify the creation of a new process. + + This is done automatically by the L{Debug} class, you shouldn't need + to call it yourself. + + @type event: L{CreateProcessEvent} + @param event: Create process event. + + @rtype: bool + @return: C{True} to call the user-defined handle, C{False} otherwise. + """ + dwProcessId = event.get_pid() + dwThreadId = event.get_tid() + hProcess = event.get_process_handle() +## if not self.has_process(dwProcessId): # XXX this would trigger a scan + if dwProcessId not in self.__processDict: + aProcess = Process(dwProcessId, hProcess) + self._add_process(aProcess) + aProcess.fileName = event.get_filename() + else: + aProcess = self.get_process(dwProcessId) + #if hProcess != win32.INVALID_HANDLE_VALUE: + # aProcess.hProcess = hProcess # may have more privileges + if not aProcess.fileName: + fileName = event.get_filename() + if fileName: + aProcess.fileName = fileName + return aProcess._notify_create_process(event) # pass it to the process + + def _notify_exit_process(self, event): + """ + Notify the termination of a process. + + This is done automatically by the L{Debug} class, you shouldn't need + to call it yourself. + + @type event: L{ExitProcessEvent} + @param event: Exit process event. + + @rtype: bool + @return: C{True} to call the user-defined handle, C{False} otherwise. + """ + dwProcessId = event.get_pid() +## if self.has_process(dwProcessId): # XXX this would trigger a scan + if dwProcessId in self.__processDict: + self._del_process(dwProcessId) + return True diff --git a/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/registry.py b/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/registry.py new file mode 100644 index 00000000..5623b80a --- /dev/null +++ b/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/registry.py @@ -0,0 +1,695 @@ +#!~/.wine/drive_c/Python25/python.exe +# -*- coding: utf-8 -*- + +# Copyright (c) 2009-2014, Mario Vilas +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright +# notice,this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# * Neither the name of the copyright holder nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. + +""" +Registry access. + +@group Instrumentation: + Registry, RegistryKey +""" + +from __future__ import with_statement + +__revision__ = "$Id$" + +__all__ = ['Registry'] + +import sys +from winappdbg import win32 +from winappdbg import compat +import collections +import warnings + +#============================================================================== + +class _RegistryContainer (object): + """ + Base class for L{Registry} and L{RegistryKey}. + """ + + # Dummy object to detect empty arguments. + class __EmptyArgument: + pass + __emptyArgument = __EmptyArgument() + + def __init__(self): + self.__default = None + + def has_key(self, name): + return name in self + + def get(self, name, default=__emptyArgument): + try: + return self[name] + except KeyError: + if default is RegistryKey.__emptyArgument: + return self.__default + return default + + def setdefault(self, default): + self.__default = default + + def __iter__(self): + return compat.iterkeys(self) + +#============================================================================== + +class RegistryKey (_RegistryContainer): + """ + Exposes a single Windows Registry key as a dictionary-like object. + + @see: L{Registry} + + @type path: str + @ivar path: Registry key path. + + @type handle: L{win32.RegistryKeyHandle} + @ivar handle: Registry key handle. + """ + + def __init__(self, path, handle): + """ + @type path: str + @param path: Registry key path. + + @type handle: L{win32.RegistryKeyHandle} + @param handle: Registry key handle. + """ + super(RegistryKey, self).__init__() + if path.endswith('\\'): + path = path[:-1] + self._path = path + self._handle = handle + + @property + def path(self): + return self._path + + @property + def handle(self): + #if not self._handle: + # msg = "This Registry key handle has already been closed." + # raise RuntimeError(msg) + return self._handle + + #def close(self): + # """ + # Close the Registry key handle, freeing its resources. It cannot be + # used again after calling this method. + # + # @note: This method will be called automatically by the garbage + # collector, and upon exiting a "with" block. + # + # @raise RuntimeError: This Registry key handle has already been closed. + # """ + # self.handle.close() + # + #def __enter__(self): + # """ + # Compatibility with the "C{with}" Python statement. + # """ + # return self + # + #def __exit__(self, type, value, traceback): + # """ + # Compatibility with the "C{with}" Python statement. + # """ + # try: + # self.close() + # except Exception: + # pass + + def __contains__(self, name): + try: + win32.RegQueryValueEx(self.handle, name, False) + return True + except WindowsError: + e = sys.exc_info()[1] + if e.winerror == win32.ERROR_FILE_NOT_FOUND: + return False + raise + + def __getitem__(self, name): + try: + return win32.RegQueryValueEx(self.handle, name)[0] + except WindowsError: + e = sys.exc_info()[1] + if e.winerror == win32.ERROR_FILE_NOT_FOUND: + raise KeyError(name) + raise + + def __setitem__(self, name, value): + win32.RegSetValueEx(self.handle, name, value) + + def __delitem__(self, name): + win32.RegDeleteValue(self.handle, name) + + def iterkeys(self): + handle = self.handle + index = 0 + while 1: + resp = win32.RegEnumValue(handle, index, False) + if resp is None: + break + yield resp[0] + index += 1 + + def itervalues(self): + handle = self.handle + index = 0 + while 1: + resp = win32.RegEnumValue(handle, index) + if resp is None: + break + yield resp[2] + index += 1 + + def iteritems(self): + handle = self.handle + index = 0 + while 1: + resp = win32.RegEnumValue(handle, index) + if resp is None: + break + yield resp[0], resp[2] + index += 1 + + def keys(self): + # return list(self.iterkeys()) # that can't be optimized by psyco + handle = self.handle + keys = list() + index = 0 + while 1: + resp = win32.RegEnumValue(handle, index, False) + if resp is None: + break + keys.append(resp[0]) + index += 1 + return keys + + def values(self): + # return list(self.itervalues()) # that can't be optimized by psyco + handle = self.handle + values = list() + index = 0 + while 1: + resp = win32.RegEnumValue(handle, index) + if resp is None: + break + values.append(resp[2]) + index += 1 + return values + + def items(self): + # return list(self.iteritems()) # that can't be optimized by psyco + handle = self.handle + items = list() + index = 0 + while 1: + resp = win32.RegEnumValue(handle, index) + if resp is None: + break + items.append( (resp[0], resp[2]) ) + index += 1 + return items + + def get_value_type(self, name): + """ + Retrieves the low-level data type for the given value. + + @type name: str + @param name: Registry value name. + + @rtype: int + @return: One of the following constants: + - L{win32.REG_NONE} (0) + - L{win32.REG_SZ} (1) + - L{win32.REG_EXPAND_SZ} (2) + - L{win32.REG_BINARY} (3) + - L{win32.REG_DWORD} (4) + - L{win32.REG_DWORD_BIG_ENDIAN} (5) + - L{win32.REG_LINK} (6) + - L{win32.REG_MULTI_SZ} (7) + - L{win32.REG_RESOURCE_LIST} (8) + - L{win32.REG_FULL_RESOURCE_DESCRIPTOR} (9) + - L{win32.REG_RESOURCE_REQUIREMENTS_LIST} (10) + - L{win32.REG_QWORD} (11) + + @raise KeyError: The specified value could not be found. + """ + try: + return win32.RegQueryValueEx(self.handle, name)[1] + except WindowsError: + e = sys.exc_info()[1] + if e.winerror == win32.ERROR_FILE_NOT_FOUND: + raise KeyError(name) + raise + + def clear(self): + handle = self.handle + while 1: + resp = win32.RegEnumValue(handle, 0, False) + if resp is None: + break + win32.RegDeleteValue(handle, resp[0]) + + def __str__(self): + default = self[''] + return str(default) + + def __unicode__(self): + default = self[u''] + return compat.unicode(default) + + def __repr__(self): + return '' % self._path + + def iterchildren(self): + """ + Iterates the subkeys for this Registry key. + + @rtype: iter of L{RegistryKey} + @return: Iterator of subkeys. + """ + handle = self.handle + index = 0 + while 1: + subkey = win32.RegEnumKey(handle, index) + if subkey is None: + break + yield self.child(subkey) + index += 1 + + def children(self): + """ + Returns a list of subkeys for this Registry key. + + @rtype: list(L{RegistryKey}) + @return: List of subkeys. + """ + # return list(self.iterchildren()) # that can't be optimized by psyco + handle = self.handle + result = [] + index = 0 + while 1: + subkey = win32.RegEnumKey(handle, index) + if subkey is None: + break + result.append( self.child(subkey) ) + index += 1 + return result + + def child(self, subkey): + """ + Retrieves a subkey for this Registry key, given its name. + + @type subkey: str + @param subkey: Name of the subkey. + + @rtype: L{RegistryKey} + @return: Subkey. + """ + path = self._path + '\\' + subkey + handle = win32.RegOpenKey(self.handle, subkey) + return RegistryKey(path, handle) + + def flush(self): + """ + Flushes changes immediately to disk. + + This method is normally not needed, as the Registry writes changes + to disk by itself. This mechanism is provided to ensure the write + happens immediately, as opposed to whenever the OS wants to. + + @warn: Calling this method too often may degrade performance. + """ + win32.RegFlushKey(self.handle) + +#============================================================================== + +# TODO: possibly cache the RegistryKey objects +# to avoid opening and closing handles many times on code sequences like this: +# +# r = Registry() +# r['HKLM\\Software\\Microsoft\\Windows NT\\CurrentVersion\\Run']['Example 1'] = 'example1.exe' +# r['HKLM\\Software\\Microsoft\\Windows NT\\CurrentVersion\\Run']['Example 2'] = 'example2.exe' +# r['HKLM\\Software\\Microsoft\\Windows NT\\CurrentVersion\\Run']['Example 3'] = 'example3.exe' + +# TODO: support for access flags? +# TODO: should be possible to disable the safety checks (see __delitem__) + +# TODO: workaround for an API bug described by a user in MSDN +# +# http://msdn.microsoft.com/en-us/library/windows/desktop/aa379776(v=vs.85).aspx +# +# Apparently RegDeleteTree won't work remotely from Win7 to WinXP, and the only +# solution is to recursively call RegDeleteKey. + +class Registry (_RegistryContainer): + """ + Exposes the Windows Registry as a Python container. + + @type machine: str or None + @ivar machine: For a remote Registry, the machine name. + For a local Registry, the value is C{None}. + """ + + _hives_by_name = { + + # Short names + 'HKCR' : win32.HKEY_CLASSES_ROOT, + 'HKCU' : win32.HKEY_CURRENT_USER, + 'HKLM' : win32.HKEY_LOCAL_MACHINE, + 'HKU' : win32.HKEY_USERS, + 'HKPD' : win32.HKEY_PERFORMANCE_DATA, + 'HKCC' : win32.HKEY_CURRENT_CONFIG, + + # Long names + 'HKEY_CLASSES_ROOT' : win32.HKEY_CLASSES_ROOT, + 'HKEY_CURRENT_USER' : win32.HKEY_CURRENT_USER, + 'HKEY_LOCAL_MACHINE' : win32.HKEY_LOCAL_MACHINE, + 'HKEY_USERS' : win32.HKEY_USERS, + 'HKEY_PERFORMANCE_DATA' : win32.HKEY_PERFORMANCE_DATA, + 'HKEY_CURRENT_CONFIG' : win32.HKEY_CURRENT_CONFIG, + } + + _hives_by_value = { + win32.HKEY_CLASSES_ROOT : 'HKEY_CLASSES_ROOT', + win32.HKEY_CURRENT_USER : 'HKEY_CURRENT_USER', + win32.HKEY_LOCAL_MACHINE : 'HKEY_LOCAL_MACHINE', + win32.HKEY_USERS : 'HKEY_USERS', + win32.HKEY_PERFORMANCE_DATA : 'HKEY_PERFORMANCE_DATA', + win32.HKEY_CURRENT_CONFIG : 'HKEY_CURRENT_CONFIG', + } + + _hives = sorted(compat.itervalues(_hives_by_value)) + + def __init__(self, machine = None): + """ + Opens a local or remote registry. + + @type machine: str + @param machine: Optional machine name. If C{None} it opens the local + registry. + """ + self._machine = machine + self._remote_hives = {} + + @property + def machine(self): + return self._machine + + def _split_path(self, path): + """ + Splits a Registry path and returns the hive and key. + + @type path: str + @param path: Registry path. + + @rtype: tuple( int, str ) + @return: Tuple containing the hive handle and the subkey path. + The hive handle is always one of the following integer constants: + - L{win32.HKEY_CLASSES_ROOT} + - L{win32.HKEY_CURRENT_USER} + - L{win32.HKEY_LOCAL_MACHINE} + - L{win32.HKEY_USERS} + - L{win32.HKEY_PERFORMANCE_DATA} + - L{win32.HKEY_CURRENT_CONFIG} + """ + if '\\' in path: + p = path.find('\\') + hive = path[:p] + path = path[p+1:] + else: + hive = path + path = None + handle = self._hives_by_name[ hive.upper() ] + return handle, path + + def _parse_path(self, path): + """ + Parses a Registry path and returns the hive and key. + + @type path: str + @param path: Registry path. + + @rtype: tuple( int, str ) + @return: Tuple containing the hive handle and the subkey path. + For a local Registry, the hive handle is an integer. + For a remote Registry, the hive handle is a L{RegistryKeyHandle}. + """ + handle, path = self._split_path(path) + if self._machine is not None: + handle = self._connect_hive(handle) + return handle, path + + def _join_path(self, hive, subkey): + """ + Joins the hive and key to make a Registry path. + + @type hive: int + @param hive: Registry hive handle. + The hive handle must be one of the following integer constants: + - L{win32.HKEY_CLASSES_ROOT} + - L{win32.HKEY_CURRENT_USER} + - L{win32.HKEY_LOCAL_MACHINE} + - L{win32.HKEY_USERS} + - L{win32.HKEY_PERFORMANCE_DATA} + - L{win32.HKEY_CURRENT_CONFIG} + + @type subkey: str + @param subkey: Subkey path. + + @rtype: str + @return: Registry path. + """ + path = self._hives_by_value[hive] + if subkey: + path = path + '\\' + subkey + return path + + def _sanitize_path(self, path): + """ + Sanitizes the given Registry path. + + @type path: str + @param path: Registry path. + + @rtype: str + @return: Registry path. + """ + return self._join_path( *self._split_path(path) ) + + def _connect_hive(self, hive): + """ + Connect to the specified hive of a remote Registry. + + @note: The connection will be cached, to close all connections and + erase this cache call the L{close} method. + + @type hive: int + @param hive: Hive to connect to. + + @rtype: L{win32.RegistryKeyHandle} + @return: Open handle to the remote Registry hive. + """ + try: + handle = self._remote_hives[hive] + except KeyError: + handle = win32.RegConnectRegistry(self._machine, hive) + self._remote_hives[hive] = handle + return handle + + def close(self): + """ + Closes all open connections to the remote Registry. + + No exceptions are raised, even if an error occurs. + + This method has no effect when opening the local Registry. + + The remote Registry will still be accessible after calling this method + (new connections will be opened automatically on access). + """ + while self._remote_hives: + hive = self._remote_hives.popitem()[1] + try: + hive.close() + except Exception: + try: + e = sys.exc_info()[1] + msg = "Cannot close registry hive handle %s, reason: %s" + msg %= (hive.value, str(e)) + warnings.warn(msg) + except Exception: + pass + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, traceback): + self.close() + + def __repr__(self): + if self._machine: + return '' % self._machine + return '' + + def __contains__(self, path): + hive, subpath = self._parse_path(path) + try: + with win32.RegOpenKey(hive, subpath): + return True + except WindowsError: + e = sys.exc_info()[1] + if e.winerror == win32.ERROR_FILE_NOT_FOUND: + return False + raise + + def __getitem__(self, path): + path = self._sanitize_path(path) + hive, subpath = self._parse_path(path) + try: + handle = win32.RegOpenKey(hive, subpath) + except WindowsError: + e = sys.exc_info()[1] + if e.winerror == win32.ERROR_FILE_NOT_FOUND: + raise KeyError(path) + raise + return RegistryKey(path, handle) + + def __setitem__(self, path, value): + do_copy = isinstance(value, RegistryKey) + if not do_copy and not isinstance(value, str) \ + and not isinstance(value, compat.unicode): + if isinstance(value, object): + t = value.__class__.__name__ + else: + t = type(value) + raise TypeError("Expected string or RegistryKey, got %s" % t) + hive, subpath = self._parse_path(path) + with win32.RegCreateKey(hive, subpath) as handle: + if do_copy: + win32.RegCopyTree(value.handle, None, handle) + else: + win32.RegSetValueEx(handle, None, value) + + # XXX FIXME currently not working! + # It's probably best to call RegDeleteKey recursively, even if slower. + def __delitem__(self, path): + hive, subpath = self._parse_path(path) + if not subpath: + raise TypeError( + "Are you SURE you want to wipe out an entire hive?!" + " Call win32.RegDeleteTree() directly if you must...") + try: + win32.RegDeleteTree(hive, subpath) + except WindowsError: + e = sys.exc_info()[1] + if e.winerror == win32.ERROR_FILE_NOT_FOUND: + raise KeyError(path) + raise + + def create(self, path): + """ + Creates a new Registry key. + + @type path: str + @param path: Registry key path. + + @rtype: L{RegistryKey} + @return: The newly created Registry key. + """ + path = self._sanitize_path(path) + hive, subpath = self._parse_path(path) + handle = win32.RegCreateKey(hive, subpath) + return RegistryKey(path, handle) + + def subkeys(self, path): + """ + Returns a list of subkeys for the given Registry key. + + @type path: str + @param path: Registry key path. + + @rtype: list(str) + @return: List of subkey names. + """ + result = list() + hive, subpath = self._parse_path(path) + with win32.RegOpenKey(hive, subpath) as handle: + index = 0 + while 1: + name = win32.RegEnumKey(handle, index) + if name is None: + break + result.append(name) + index += 1 + return result + + def iterate(self, path): + """ + Returns a recursive iterator on the specified key and its subkeys. + + @type path: str + @param path: Registry key path. + + @rtype: iterator + @return: Recursive iterator that returns Registry key paths. + + @raise KeyError: The specified path does not exist. + """ + if path.endswith('\\'): + path = path[:-1] + if not self.has_key(path): + raise KeyError(path) + stack = collections.deque() + stack.appendleft(path) + return self.__iterate(stack) + + def iterkeys(self): + """ + Returns an iterator that crawls the entire Windows Registry. + """ + stack = collections.deque(self._hives) + stack.reverse() + return self.__iterate(stack) + + def __iterate(self, stack): + while stack: + path = stack.popleft() + yield path + try: + subkeys = self.subkeys(path) + except WindowsError: + continue + prefix = path + '\\' + subkeys = [prefix + name for name in subkeys] + stack.extendleft(subkeys) diff --git a/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/search.py b/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/search.py new file mode 100644 index 00000000..6efaea6d --- /dev/null +++ b/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/search.py @@ -0,0 +1,665 @@ +#!~/.wine/drive_c/Python25/python.exe +# -*- coding: utf-8 -*- + +# Process memory finder +# Copyright (c) 2009-2014, Mario Vilas +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright +# notice,this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# * Neither the name of the copyright holder nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. + +""" +Process memory search. + +@group Memory search: + Search, + Pattern, + BytePattern, + TextPattern, + RegExpPattern, + HexPattern +""" + +__revision__ = "$Id$" + +__all__ = [ + 'Search', + 'Pattern', + 'BytePattern', + 'TextPattern', + 'RegExpPattern', + 'HexPattern', + ] + +from winappdbg.textio import HexInput +from winappdbg.util import StaticClass, MemoryAddresses +from winappdbg import win32 + +import warnings + +try: + # http://pypi.python.org/pypi/regex + import regex as re +except ImportError: + import re + +#============================================================================== + +class Pattern (object): + """ + Base class for search patterns. + + The following L{Pattern} subclasses are provided by WinAppDbg: + - L{BytePattern} + - L{TextPattern} + - L{RegExpPattern} + - L{HexPattern} + + @see: L{Search.search_process} + """ + + def __init__(self, pattern): + """ + Class constructor. + + The only mandatory argument should be the pattern string. + + This method B{MUST} be reimplemented by subclasses of L{Pattern}. + """ + raise NotImplementedError() + + def __len__(self): + """ + Returns the maximum expected length of the strings matched by this + pattern. Exact behavior is implementation dependent. + + Ideally it should be an exact value, but in some cases it's not + possible to calculate so an upper limit should be returned instead. + + If that's not possible either an exception must be raised. + + This value will be used to calculate the required buffer size when + doing buffered searches. + + This method B{MUST} be reimplemented by subclasses of L{Pattern}. + """ + raise NotImplementedError() + + def read(self, process, address, size): + """ + Reads the requested number of bytes from the process memory at the + given address. + + Subclasses of L{Pattern} tipically don't need to reimplement this + method. + """ + return process.read(address, size) + + def find(self, buffer, pos = None): + """ + Searches for the pattern in the given buffer, optionally starting at + the given position within the buffer. + + This method B{MUST} be reimplemented by subclasses of L{Pattern}. + + @type buffer: str + @param buffer: Buffer to search on. + + @type pos: int + @param pos: + (Optional) Position within the buffer to start searching from. + + @rtype: tuple( int, int ) + @return: Tuple containing the following: + - Position within the buffer where a match is found, or C{-1} if + no match was found. + - Length of the matched data if a match is found, or undefined if + no match was found. + """ + raise NotImplementedError() + + def found(self, address, size, data): + """ + This method gets called when a match is found. + + This allows subclasses of L{Pattern} to filter out unwanted results, + or modify the results before giving them to the caller of + L{Search.search_process}. + + If the return value is C{None} the result is skipped. + + Subclasses of L{Pattern} don't need to reimplement this method unless + filtering is needed. + + @type address: int + @param address: The memory address where the pattern was found. + + @type size: int + @param size: The size of the data that matches the pattern. + + @type data: str + @param data: The data that matches the pattern. + + @rtype: tuple( int, int, str ) + @return: Tuple containing the following: + * The memory address where the pattern was found. + * The size of the data that matches the pattern. + * The data that matches the pattern. + """ + return (address, size, data) + +#------------------------------------------------------------------------------ + +class BytePattern (Pattern): + """ + Fixed byte pattern. + + @type pattern: str + @ivar pattern: Byte string to search for. + + @type length: int + @ivar length: Length of the byte pattern. + """ + + def __init__(self, pattern): + """ + @type pattern: str + @param pattern: Byte string to search for. + """ + self.pattern = str(pattern) + self.length = len(pattern) + + def __len__(self): + """ + Returns the exact length of the pattern. + + @see: L{Pattern.__len__} + """ + return self.length + + def find(self, buffer, pos = None): + return buffer.find(self.pattern, pos), self.length + +#------------------------------------------------------------------------------ + +# FIXME: case insensitive compat.unicode searches are probably buggy! + +class TextPattern (BytePattern): + """ + Text pattern. + + @type isUnicode: bool + @ivar isUnicode: C{True} if the text to search for is a compat.unicode string, + C{False} otherwise. + + @type encoding: str + @ivar encoding: Encoding for the text parameter. + Only used when the text to search for is a Unicode string. + Don't change unless you know what you're doing! + + @type caseSensitive: bool + @ivar caseSensitive: C{True} of the search is case sensitive, + C{False} otherwise. + """ + + def __init__(self, text, encoding = "utf-16le", caseSensitive = False): + """ + @type text: str or compat.unicode + @param text: Text to search for. + + @type encoding: str + @param encoding: (Optional) Encoding for the text parameter. + Only used when the text to search for is a Unicode string. + Don't change unless you know what you're doing! + + @type caseSensitive: bool + @param caseSensitive: C{True} of the search is case sensitive, + C{False} otherwise. + """ + self.isUnicode = isinstance(text, compat.unicode) + self.encoding = encoding + self.caseSensitive = caseSensitive + if not self.caseSensitive: + pattern = text.lower() + if self.isUnicode: + pattern = text.encode(encoding) + super(TextPattern, self).__init__(pattern) + + def read(self, process, address, size): + data = super(TextPattern, self).read(address, size) + if not self.caseSensitive: + if self.isUnicode: + try: + encoding = self.encoding + text = data.decode(encoding, "replace") + text = text.lower() + new_data = text.encode(encoding, "replace") + if len(data) == len(new_data): + data = new_data + else: + data = data.lower() + except Exception: + data = data.lower() + else: + data = data.lower() + return data + + def found(self, address, size, data): + if self.isUnicode: + try: + data = compat.unicode(data, self.encoding) + except Exception: +## traceback.print_exc() # XXX DEBUG + return None + return (address, size, data) + +#------------------------------------------------------------------------------ + +class RegExpPattern (Pattern): + """ + Regular expression pattern. + + @type pattern: str + @ivar pattern: Regular expression in text form. + + @type flags: int + @ivar flags: Regular expression flags. + + @type regexp: re.compile + @ivar regexp: Regular expression in compiled form. + + @type maxLength: int + @ivar maxLength: + Maximum expected length of the strings matched by this regular + expression. + + This value will be used to calculate the required buffer size when + doing buffered searches. + + Ideally it should be an exact value, but in some cases it's not + possible to calculate so an upper limit should be given instead. + + If that's not possible either, C{None} should be used. That will + cause an exception to be raised if this pattern is used in a + buffered search. + """ + + def __init__(self, regexp, flags = 0, maxLength = None): + """ + @type regexp: str + @param regexp: Regular expression string. + + @type flags: int + @param flags: Regular expression flags. + + @type maxLength: int + @param maxLength: Maximum expected length of the strings matched by + this regular expression. + + This value will be used to calculate the required buffer size when + doing buffered searches. + + Ideally it should be an exact value, but in some cases it's not + possible to calculate so an upper limit should be given instead. + + If that's not possible either, C{None} should be used. That will + cause an exception to be raised if this pattern is used in a + buffered search. + """ + self.pattern = regexp + self.flags = flags + self.regexp = re.compile(regexp, flags) + self.maxLength = maxLength + + def __len__(self): + """ + Returns the maximum expected length of the strings matched by this + pattern. This value is taken from the C{maxLength} argument of the + constructor if this class. + + Ideally it should be an exact value, but in some cases it's not + possible to calculate so an upper limit should be returned instead. + + If that's not possible either an exception must be raised. + + This value will be used to calculate the required buffer size when + doing buffered searches. + """ + if self.maxLength is None: + raise NotImplementedError() + return self.maxLength + + def find(self, buffer, pos = None): + if not pos: # make sure pos is an int + pos = 0 + match = self.regexp.search(buffer, pos) + if match: + start, end = match.span() + return start, end - start + return -1, 0 + +#------------------------------------------------------------------------------ + +class HexPattern (RegExpPattern): + """ + Hexadecimal pattern. + + Hex patterns must be in this form:: + "68 65 6c 6c 6f 20 77 6f 72 6c 64" # "hello world" + + Spaces are optional. Capitalization of hex digits doesn't matter. + This is exactly equivalent to the previous example:: + "68656C6C6F20776F726C64" # "hello world" + + Wildcards are allowed, in the form of a C{?} sign in any hex digit:: + "5? 5? c3" # pop register / pop register / ret + "b8 ?? ?? ?? ??" # mov eax, immediate value + + @type pattern: str + @ivar pattern: Hexadecimal pattern. + """ + + def __new__(cls, pattern): + """ + If the pattern is completely static (no wildcards are present) a + L{BytePattern} is created instead. That's because searching for a + fixed byte pattern is faster than searching for a regular expression. + """ + if '?' not in pattern: + return BytePattern( HexInput.hexadecimal(pattern) ) + return object.__new__(cls, pattern) + + def __init__(self, hexa): + """ + Hex patterns must be in this form:: + "68 65 6c 6c 6f 20 77 6f 72 6c 64" # "hello world" + + Spaces are optional. Capitalization of hex digits doesn't matter. + This is exactly equivalent to the previous example:: + "68656C6C6F20776F726C64" # "hello world" + + Wildcards are allowed, in the form of a C{?} sign in any hex digit:: + "5? 5? c3" # pop register / pop register / ret + "b8 ?? ?? ?? ??" # mov eax, immediate value + + @type hexa: str + @param hexa: Pattern to search for. + """ + maxLength = len([x for x in hexa + if x in "?0123456789ABCDEFabcdef"]) / 2 + super(HexPattern, self).__init__(HexInput.pattern(hexa), + maxLength = maxLength) + +#============================================================================== + +class Search (StaticClass): + """ + Static class to group the search functionality. + + Do not instance this class! Use its static methods instead. + """ + + # TODO: aligned searches + # TODO: method to coalesce search results + # TODO: search memory dumps + # TODO: search non-ascii C strings + + @staticmethod + def search_process(process, pattern, minAddr = None, + maxAddr = None, + bufferPages = None, + overlapping = False): + """ + Search for the given pattern within the process memory. + + @type process: L{Process} + @param process: Process to search. + + @type pattern: L{Pattern} + @param pattern: Pattern to search for. + It must be an instance of a subclass of L{Pattern}. + + The following L{Pattern} subclasses are provided by WinAppDbg: + - L{BytePattern} + - L{TextPattern} + - L{RegExpPattern} + - L{HexPattern} + + You can also write your own subclass of L{Pattern} for customized + searches. + + @type minAddr: int + @param minAddr: (Optional) Start the search at this memory address. + + @type maxAddr: int + @param maxAddr: (Optional) Stop the search at this memory address. + + @type bufferPages: int + @param bufferPages: (Optional) Number of memory pages to buffer when + performing the search. Valid values are: + - C{0} or C{None}: + Automatically determine the required buffer size. May not give + complete results for regular expressions that match variable + sized strings. + - C{> 0}: Set the buffer size, in memory pages. + - C{< 0}: Disable buffering entirely. This may give you a little + speed gain at the cost of an increased memory usage. If the + target process has very large contiguous memory regions it may + actually be slower or even fail. It's also the only way to + guarantee complete results for regular expressions that match + variable sized strings. + + @type overlapping: bool + @param overlapping: C{True} to allow overlapping results, C{False} + otherwise. + + Overlapping results yield the maximum possible number of results. + + For example, if searching for "AAAA" within "AAAAAAAA" at address + C{0x10000}, when overlapping is turned off the following matches + are yielded:: + (0x10000, 4, "AAAA") + (0x10004, 4, "AAAA") + + If overlapping is turned on, the following matches are yielded:: + (0x10000, 4, "AAAA") + (0x10001, 4, "AAAA") + (0x10002, 4, "AAAA") + (0x10003, 4, "AAAA") + (0x10004, 4, "AAAA") + + As you can see, the middle results are overlapping the last two. + + @rtype: iterator of tuple( int, int, str ) + @return: An iterator of tuples. Each tuple contains the following: + - The memory address where the pattern was found. + - The size of the data that matches the pattern. + - The data that matches the pattern. + + @raise WindowsError: An error occurred when querying or reading the + process memory. + """ + + # Do some namespace lookups of symbols we'll be using frequently. + MEM_COMMIT = win32.MEM_COMMIT + PAGE_GUARD = win32.PAGE_GUARD + page = MemoryAddresses.pageSize + read = pattern.read + find = pattern.find + + # Calculate the address range. + if minAddr is None: + minAddr = 0 + if maxAddr is None: + maxAddr = win32.LPVOID(-1).value # XXX HACK + + # Calculate the buffer size from the number of pages. + if bufferPages is None: + try: + size = MemoryAddresses.\ + align_address_to_page_end(len(pattern)) + page + except NotImplementedError: + size = None + elif bufferPages > 0: + size = page * (bufferPages + 1) + else: + size = None + + # Get the memory map of the process. + memory_map = process.iter_memory_map(minAddr, maxAddr) + + # Perform search with buffering enabled. + if size: + + # Loop through all memory blocks containing data. + buffer = "" # buffer to hold the memory data + prev_addr = 0 # previous memory block address + last = 0 # position of the last match + delta = 0 # delta of last read address and start of buffer + for mbi in memory_map: + + # Skip blocks with no data to search on. + if not mbi.has_content(): + continue + + # Get the address and size of this block. + address = mbi.BaseAddress # current address to search on + block_size = mbi.RegionSize # total size of the block + if address >= maxAddr: + break + end = address + block_size # end address of the block + + # If the block is contiguous to the previous block, + # coalesce the new data in the buffer. + if delta and address == prev_addr: + buffer += read(process, address, page) + + # If not, clear the buffer and read new data. + else: + buffer = read(process, address, min(size, block_size)) + last = 0 + delta = 0 + + # Search for the pattern in this block. + while 1: + + # Yield each match of the pattern in the buffer. + pos, length = find(buffer, last) + while pos >= last: + match_addr = address + pos - delta + if minAddr <= match_addr < maxAddr: + result = pattern.found( + match_addr, length, + buffer [ pos : pos + length ] ) + if result is not None: + yield result + if overlapping: + last = pos + 1 + else: + last = pos + length + pos, length = find(buffer, last) + + # Advance to the next page. + address = address + page + block_size = block_size - page + prev_addr = address + + # Fix the position of the last match. + last = last - page + if last < 0: + last = 0 + + # Remove the first page in the buffer. + buffer = buffer[ page : ] + delta = page + + # If we haven't reached the end of the block yet, + # read the next page in the block and keep seaching. + if address < end: + buffer = buffer + read(process, address, page) + + # Otherwise, we're done searching this block. + else: + break + + # Perform search with buffering disabled. + else: + + # Loop through all memory blocks containing data. + for mbi in memory_map: + + # Skip blocks with no data to search on. + if not mbi.has_content(): + continue + + # Get the address and size of this block. + address = mbi.BaseAddress + block_size = mbi.RegionSize + if address >= maxAddr: + break; + + # Read the whole memory region. + buffer = process.read(address, block_size) + + # Search for the pattern in this region. + pos, length = find(buffer) + last = 0 + while pos >= last: + match_addr = address + pos + if minAddr <= match_addr < maxAddr: + result = pattern.found( + match_addr, length, + buffer [ pos : pos + length ] ) + if result is not None: + yield result + if overlapping: + last = pos + 1 + else: + last = pos + length + pos, length = find(buffer, last) + + @classmethod + def extract_ascii_strings(cls, process, minSize = 4, maxSize = 1024): + """ + Extract ASCII strings from the process memory. + + @type process: L{Process} + @param process: Process to search. + + @type minSize: int + @param minSize: (Optional) Minimum size of the strings to search for. + + @type maxSize: int + @param maxSize: (Optional) Maximum size of the strings to search for. + + @rtype: iterator of tuple(int, int, str) + @return: Iterator of strings extracted from the process memory. + Each tuple contains the following: + - The memory address where the string was found. + - The size of the string. + - The string. + """ + regexp = r"[\s\w\!\@\#\$\%%\^\&\*\(\)\{\}\[\]\~\`\'\"\:\;\.\,\\\/\-\+\=\_\<\>]{%d,%d}\0" % (minSize, maxSize) + pattern = RegExpPattern(regexp, 0, maxSize) + return cls.search_process(process, pattern, overlapping = False) diff --git a/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/sql.py b/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/sql.py new file mode 100644 index 00000000..d9741105 --- /dev/null +++ b/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/sql.py @@ -0,0 +1,993 @@ +#!~/.wine/drive_c/Python25/python.exe +# -*- coding: utf-8 -*- + +# Copyright (c) 2009-2014, Mario Vilas +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright +# notice,this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# * Neither the name of the copyright holder nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. + +""" +SQL database storage support. + +@group Crash reporting: + CrashDAO +""" + +__revision__ = "$Id$" + +__all__ = ['CrashDAO'] + +import sqlite3 +import datetime +import warnings + +from sqlalchemy import create_engine, Column, ForeignKey, Sequence +from sqlalchemy.engine.url import URL +from sqlalchemy.ext.compiler import compiles +from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.interfaces import PoolListener +from sqlalchemy.orm import sessionmaker, deferred +from sqlalchemy.orm.exc import NoResultFound, MultipleResultsFound +from sqlalchemy.types import Integer, BigInteger, Boolean, DateTime, String, \ + LargeBinary, Enum, VARCHAR +from sqlalchemy.sql.expression import asc, desc + +from crash import Crash, Marshaller, pickle, HIGHEST_PROTOCOL +from textio import CrashDump +import win32 + +#------------------------------------------------------------------------------ + +try: + from decorator import decorator +except ImportError: + import functools + def decorator(w): + """ + The C{decorator} module was not found. You can install it from: + U{http://pypi.python.org/pypi/decorator/} + """ + def d(fn): + @functools.wraps(fn) + def x(*argv, **argd): + return w(fn, *argv, **argd) + return x + return d + +#------------------------------------------------------------------------------ + +@compiles(String, 'mysql') +@compiles(VARCHAR, 'mysql') +def _compile_varchar_mysql(element, compiler, **kw): + """MySQL hack to avoid the "VARCHAR requires a length" error.""" + if not element.length or element.length == 'max': + return "TEXT" + else: + return compiler.visit_VARCHAR(element, **kw) + +#------------------------------------------------------------------------------ + +class _SQLitePatch (PoolListener): + """ + Used internally by L{BaseDAO}. + + After connecting to an SQLite database, ensure that the foreign keys + support is enabled. If not, abort the connection. + + @see: U{http://sqlite.org/foreignkeys.html} + """ + def connect(dbapi_connection, connection_record): + """ + Called once by SQLAlchemy for each new SQLite DB-API connection. + + Here is where we issue some PRAGMA statements to configure how we're + going to access the SQLite database. + + @param dbapi_connection: + A newly connected raw SQLite DB-API connection. + + @param connection_record: + Unused by this method. + """ + try: + cursor = dbapi_connection.cursor() + try: + cursor.execute("PRAGMA foreign_keys = ON;") + cursor.execute("PRAGMA foreign_keys;") + if cursor.fetchone()[0] != 1: + raise Exception() + finally: + cursor.close() + except Exception: + dbapi_connection.close() + raise sqlite3.Error() + +#------------------------------------------------------------------------------ + +class BaseDTO (object): + """ + Customized declarative base for SQLAlchemy. + """ + + __table_args__ = { + + # Don't use MyISAM in MySQL. It doesn't support ON DELETE CASCADE. + 'mysql_engine': 'InnoDB', + + # Don't use BlitzDB in Drizzle. It doesn't support foreign keys. + 'drizzle_engine': 'InnoDB', + + # Collate to UTF-8. + 'mysql_charset': 'utf8', + + } + +BaseDTO = declarative_base(cls = BaseDTO) + +#------------------------------------------------------------------------------ + +# TODO: if using mssql, check it's at least SQL Server 2005 +# (LIMIT and OFFSET support is required). +# TODO: if using mysql, check it's at least MySQL 5.0.3 +# (nested transactions are required). +# TODO: maybe in mysql check the tables are not myisam? +# TODO: maybe create the database if it doesn't exist? +# TODO: maybe add a method to compact the database? +# http://stackoverflow.com/questions/1875885 +# http://www.sqlite.org/lang_vacuum.html +# http://dev.mysql.com/doc/refman/5.1/en/optimize-table.html +# http://msdn.microsoft.com/en-us/library/ms174459(v=sql.90).aspx + +class BaseDAO (object): + """ + Data Access Object base class. + + @type _url: sqlalchemy.url.URL + @ivar _url: Database connection URL. + + @type _dialect: str + @ivar _dialect: SQL dialect currently being used. + + @type _driver: str + @ivar _driver: Name of the database driver currently being used. + To get the actual Python module use L{_url}.get_driver() instead. + + @type _session: sqlalchemy.orm.Session + @ivar _session: Database session object. + + @type _new_session: class + @cvar _new_session: Custom configured Session class used to create the + L{_session} instance variable. + + @type _echo: bool + @cvar _echo: Set to C{True} to print all SQL queries to standard output. + """ + + _echo = False + + _new_session = sessionmaker(autoflush = True, + autocommit = True, + expire_on_commit = True, + weak_identity_map = True) + + def __init__(self, url, creator = None): + """ + Connect to the database using the given connection URL. + + The current implementation uses SQLAlchemy and so it will support + whatever database said module supports. + + @type url: str + @param url: + URL that specifies the database to connect to. + + Some examples: + - Opening an SQLite file: + C{dao = CrashDAO("sqlite:///C:\\some\\path\\database.sqlite")} + - Connecting to a locally installed SQL Express database: + C{dao = CrashDAO("mssql://.\\SQLEXPRESS/Crashes?trusted_connection=yes")} + - Connecting to a MySQL database running locally, using the + C{oursql} library, authenticating as the "winappdbg" user with + no password: + C{dao = CrashDAO("mysql+oursql://winappdbg@localhost/Crashes")} + - Connecting to a Postgres database running locally, + authenticating with user and password: + C{dao = CrashDAO("postgresql://winappdbg:winappdbg@localhost/Crashes")} + + For more information see the C{SQLAlchemy} documentation online: + U{http://docs.sqlalchemy.org/en/latest/core/engines.html} + + Note that in all dialects except for SQLite the database + must already exist. The tables schema, however, is created + automatically when connecting for the first time. + + To create the database in MSSQL, you can use the + U{SQLCMD} + command:: + sqlcmd -Q "CREATE DATABASE Crashes" + + In MySQL you can use something like the following:: + mysql -u root -e "CREATE DATABASE Crashes;" + + And in Postgres:: + createdb Crashes -h localhost -U winappdbg -p winappdbg -O winappdbg + + Some small changes to the schema may be tolerated (for example, + increasing the maximum length of string columns, or adding new + columns with default values). Of course, it's best to test it + first before making changes in a live database. This all depends + very much on the SQLAlchemy version you're using, but it's best + to use the latest version always. + + @type creator: callable + @param creator: (Optional) Callback function that creates the SQL + database connection. + + Normally it's not necessary to use this argument. However in some + odd cases you may need to customize the database connection. + """ + + # Parse the connection URL. + parsed_url = URL(url) + schema = parsed_url.drivername + if '+' in schema: + dialect, driver = schema.split('+') + else: + dialect, driver = schema, 'base' + dialect = dialect.strip().lower() + driver = driver.strip() + + # Prepare the database engine arguments. + arguments = {'echo' : self._echo} + if dialect == 'sqlite': + arguments['module'] = sqlite3.dbapi2 + arguments['listeners'] = [_SQLitePatch()] + if creator is not None: + arguments['creator'] = creator + + # Load the database engine. + engine = create_engine(url, **arguments) + + # Create a new session. + session = self._new_session(bind = engine) + + # Create the required tables if they don't exist. + BaseDTO.metadata.create_all(engine) + # TODO: create a dialect specific index on the "signature" column. + + # Set the instance properties. + self._url = parsed_url + self._driver = driver + self._dialect = dialect + self._session = session + + def _transactional(self, method, *argv, **argd): + """ + Begins a transaction and calls the given DAO method. + + If the method executes successfully the transaction is commited. + + If the method fails, the transaction is rolled back. + + @type method: callable + @param method: Bound method of this class or one of its subclasses. + The first argument will always be C{self}. + + @return: The return value of the method call. + + @raise Exception: Any exception raised by the method. + """ + self._session.begin(subtransactions = True) + try: + result = method(self, *argv, **argd) + self._session.commit() + return result + except: + self._session.rollback() + raise + +#------------------------------------------------------------------------------ + +@decorator +def Transactional(fn, self, *argv, **argd): + """ + Decorator that wraps DAO methods to handle transactions automatically. + + It may only work with subclasses of L{BaseDAO}. + """ + return self._transactional(fn, *argv, **argd) + +#============================================================================== + +# Generates all possible memory access flags. +def _gen_valid_access_flags(): + f = [] + for a1 in ("---", "R--", "RW-", "RC-", "--X", "R-X", "RWX", "RCX", "???"): + for a2 in ("G", "-"): + for a3 in ("N", "-"): + for a4 in ("W", "-"): + f.append("%s %s%s%s" % (a1, a2, a3, a4)) + return tuple(f) +_valid_access_flags = _gen_valid_access_flags() + +# Enumerated types for the memory table. +n_MEM_ACCESS_ENUM = {"name" : "MEM_ACCESS_ENUM"} +n_MEM_ALLOC_ACCESS_ENUM = {"name" : "MEM_ALLOC_ACCESS_ENUM"} +MEM_ACCESS_ENUM = Enum(*_valid_access_flags, + **n_MEM_ACCESS_ENUM) +MEM_ALLOC_ACCESS_ENUM = Enum(*_valid_access_flags, + **n_MEM_ALLOC_ACCESS_ENUM) +MEM_STATE_ENUM = Enum("Reserved", "Commited", "Free", "Unknown", + name = "MEM_STATE_ENUM") +MEM_TYPE_ENUM = Enum("Image", "Mapped", "Private", "Unknown", + name = "MEM_TYPE_ENUM") + +# Cleanup the namespace. +del _gen_valid_access_flags +del _valid_access_flags +del n_MEM_ACCESS_ENUM +del n_MEM_ALLOC_ACCESS_ENUM + +#------------------------------------------------------------------------------ + +class MemoryDTO (BaseDTO): + """ + Database mapping for memory dumps. + """ + + # Declare the table mapping. + __tablename__ = 'memory' + id = Column(Integer, Sequence(__tablename__ + '_seq'), + primary_key = True, autoincrement = True) + crash_id = Column(Integer, ForeignKey('crashes.id', + ondelete = 'CASCADE', + onupdate = 'CASCADE'), + nullable = False) + address = Column(BigInteger, nullable = False, index = True) + size = Column(BigInteger, nullable = False) + state = Column(MEM_STATE_ENUM, nullable = False) + access = Column(MEM_ACCESS_ENUM) + type = Column(MEM_TYPE_ENUM) + alloc_base = Column(BigInteger) + alloc_access = Column(MEM_ALLOC_ACCESS_ENUM) + filename = Column(String) + content = deferred(Column(LargeBinary)) + + def __init__(self, crash_id, mbi): + """ + Process a L{win32.MemoryBasicInformation} object for database storage. + """ + + # Crash ID. + self.crash_id = crash_id + + # Address. + self.address = mbi.BaseAddress + + # Size. + self.size = mbi.RegionSize + + # State (free or allocated). + if mbi.State == win32.MEM_RESERVE: + self.state = "Reserved" + elif mbi.State == win32.MEM_COMMIT: + self.state = "Commited" + elif mbi.State == win32.MEM_FREE: + self.state = "Free" + else: + self.state = "Unknown" + + # Page protection bits (R/W/X/G). + if mbi.State != win32.MEM_COMMIT: + self.access = None + else: + self.access = self._to_access(mbi.Protect) + + # Type (file mapping, executable image, or private memory). + if mbi.Type == win32.MEM_IMAGE: + self.type = "Image" + elif mbi.Type == win32.MEM_MAPPED: + self.type = "Mapped" + elif mbi.Type == win32.MEM_PRIVATE: + self.type = "Private" + elif mbi.Type == 0: + self.type = None + else: + self.type = "Unknown" + + # Allocation info. + self.alloc_base = mbi.AllocationBase + if not mbi.AllocationProtect: + self.alloc_access = None + else: + self.alloc_access = self._to_access(mbi.AllocationProtect) + + # Filename (for memory mappings). + try: + self.filename = mbi.filename + except AttributeError: + self.filename = None + + # Memory contents. + try: + self.content = mbi.content + except AttributeError: + self.content = None + + def _to_access(self, protect): + if protect & win32.PAGE_NOACCESS: + access = "--- " + elif protect & win32.PAGE_READONLY: + access = "R-- " + elif protect & win32.PAGE_READWRITE: + access = "RW- " + elif protect & win32.PAGE_WRITECOPY: + access = "RC- " + elif protect & win32.PAGE_EXECUTE: + access = "--X " + elif protect & win32.PAGE_EXECUTE_READ: + access = "R-X " + elif protect & win32.PAGE_EXECUTE_READWRITE: + access = "RWX " + elif protect & win32.PAGE_EXECUTE_WRITECOPY: + access = "RCX " + else: + access = "??? " + if protect & win32.PAGE_GUARD: + access += "G" + else: + access += "-" + if protect & win32.PAGE_NOCACHE: + access += "N" + else: + access += "-" + if protect & win32.PAGE_WRITECOMBINE: + access += "W" + else: + access += "-" + return access + + def toMBI(self, getMemoryDump = False): + """ + Returns a L{win32.MemoryBasicInformation} object using the data + retrieved from the database. + + @type getMemoryDump: bool + @param getMemoryDump: (Optional) If C{True} retrieve the memory dump. + Defaults to C{False} since this may be a costly operation. + + @rtype: L{win32.MemoryBasicInformation} + @return: Memory block information. + """ + mbi = win32.MemoryBasicInformation() + mbi.BaseAddress = self.address + mbi.RegionSize = self.size + mbi.State = self._parse_state(self.state) + mbi.Protect = self._parse_access(self.access) + mbi.Type = self._parse_type(self.type) + if self.alloc_base is not None: + mbi.AllocationBase = self.alloc_base + else: + mbi.AllocationBase = mbi.BaseAddress + if self.alloc_access is not None: + mbi.AllocationProtect = self._parse_access(self.alloc_access) + else: + mbi.AllocationProtect = mbi.Protect + if self.filename is not None: + mbi.filename = self.filename + if getMemoryDump and self.content is not None: + mbi.content = self.content + return mbi + + @staticmethod + def _parse_state(state): + if state: + if state == "Reserved": + return win32.MEM_RESERVE + if state == "Commited": + return win32.MEM_COMMIT + if state == "Free": + return win32.MEM_FREE + return 0 + + @staticmethod + def _parse_type(type): + if type: + if type == "Image": + return win32.MEM_IMAGE + if type == "Mapped": + return win32.MEM_MAPPED + if type == "Private": + return win32.MEM_PRIVATE + return -1 + return 0 + + @staticmethod + def _parse_access(access): + if not access: + return 0 + perm = access[:3] + if perm == "R--": + protect = win32.PAGE_READONLY + elif perm == "RW-": + protect = win32.PAGE_READWRITE + elif perm == "RC-": + protect = win32.PAGE_WRITECOPY + elif perm == "--X": + protect = win32.PAGE_EXECUTE + elif perm == "R-X": + protect = win32.PAGE_EXECUTE_READ + elif perm == "RWX": + protect = win32.PAGE_EXECUTE_READWRITE + elif perm == "RCX": + protect = win32.PAGE_EXECUTE_WRITECOPY + else: + protect = win32.PAGE_NOACCESS + if access[5] == "G": + protect = protect | win32.PAGE_GUARD + if access[6] == "N": + protect = protect | win32.PAGE_NOCACHE + if access[7] == "W": + protect = protect | win32.PAGE_WRITECOMBINE + return protect + +#------------------------------------------------------------------------------ + +class CrashDTO (BaseDTO): + """ + Database mapping for crash dumps. + """ + + # Table name. + __tablename__ = "crashes" + + # Primary key. + id = Column(Integer, Sequence(__tablename__ + '_seq'), + primary_key = True, autoincrement = True) + + # Timestamp. + timestamp = Column(DateTime, nullable = False, index = True) + + # Exploitability test. + exploitable = Column(Integer, nullable = False) + exploitability_rule = Column(String(32), nullable = False) + exploitability_rating = Column(String(32), nullable = False) + exploitability_desc = Column(String, nullable = False) + + # Platform description. + os = Column(String(32), nullable = False) + arch = Column(String(16), nullable = False) + bits = Column(Integer, nullable = False) # Integer(4) is deprecated :( + + # Event description. + event = Column(String, nullable = False) + pid = Column(Integer, nullable = False) + tid = Column(Integer, nullable = False) + pc = Column(BigInteger, nullable = False) + sp = Column(BigInteger, nullable = False) + fp = Column(BigInteger, nullable = False) + pc_label = Column(String, nullable = False) + + # Exception description. + exception = Column(String(64)) + exception_text = Column(String(64)) + exception_address = Column(BigInteger) + exception_label = Column(String) + first_chance = Column(Boolean) + fault_type = Column(Integer) + fault_address = Column(BigInteger) + fault_label = Column(String) + fault_disasm = Column(String) + stack_trace = Column(String) + + # Environment description. + command_line = Column(String) + environment = Column(String) + + # Debug strings. + debug_string = Column(String) + + # Notes. + notes = Column(String) + + # Heuristic signature. + signature = Column(String, nullable = False) + + # Pickled Crash object, minus the memory dump. + data = deferred(Column(LargeBinary, nullable = False)) + + def __init__(self, crash): + """ + @type crash: Crash + @param crash: L{Crash} object to store into the database. + """ + + # Timestamp and signature. + self.timestamp = datetime.datetime.fromtimestamp( crash.timeStamp ) + self.signature = pickle.dumps(crash.signature, protocol = 0) + + # Marshalled Crash object, minus the memory dump. + # This code is *not* thread safe! + memoryMap = crash.memoryMap + try: + crash.memoryMap = None + self.data = buffer( Marshaller.dumps(crash) ) + finally: + crash.memoryMap = memoryMap + + # Exploitability test. + self.exploitability_rating, \ + self.exploitability_rule, \ + self.exploitability_desc = crash.isExploitable() + + # Exploitability test as an integer result (for sorting). + self.exploitable = [ + "Not an exception", + "Not exploitable", + "Not likely exploitable", + "Unknown", + "Probably exploitable", + "Exploitable", + ].index(self.exploitability_rating) + + # Platform description. + self.os = crash.os + self.arch = crash.arch + self.bits = crash.bits + + # Event description. + self.event = crash.eventName + self.pid = crash.pid + self.tid = crash.tid + self.pc = crash.pc + self.sp = crash.sp + self.fp = crash.fp + self.pc_label = crash.labelPC + + # Exception description. + self.exception = crash.exceptionName + self.exception_text = crash.exceptionDescription + self.exception_address = crash.exceptionAddress + self.exception_label = crash.exceptionLabel + self.first_chance = crash.firstChance + self.fault_type = crash.faultType + self.fault_address = crash.faultAddress + self.fault_label = crash.faultLabel + self.fault_disasm = CrashDump.dump_code( crash.faultDisasm, + crash.pc ) + self.stack_trace = CrashDump.dump_stack_trace_with_labels( + crash.stackTracePretty ) + + # Command line. + self.command_line = crash.commandLine + + # Environment. + if crash.environment: + envList = crash.environment.items() + envList.sort() + environment = '' + for envKey, envVal in envList: + # Must concatenate here instead of using a substitution, + # so strings can be automatically promoted to Unicode. + environment += envKey + '=' + envVal + '\n' + if environment: + self.environment = environment + + # Debug string. + self.debug_string = crash.debugString + + # Notes. + self.notes = crash.notesReport() + + def toCrash(self, getMemoryDump = False): + """ + Returns a L{Crash} object using the data retrieved from the database. + + @type getMemoryDump: bool + @param getMemoryDump: If C{True} retrieve the memory dump. + Defaults to C{False} since this may be a costly operation. + + @rtype: L{Crash} + @return: Crash object. + """ + crash = Marshaller.loads(str(self.data)) + if not isinstance(crash, Crash): + raise TypeError( + "Expected Crash instance, got %s instead" % type(crash)) + crash._rowid = self.id + if not crash.memoryMap: + memory = getattr(self, "memory", []) + if memory: + crash.memoryMap = [dto.toMBI(getMemoryDump) for dto in memory] + return crash + +#============================================================================== + +# TODO: add a method to modify already stored crash dumps. + +class CrashDAO (BaseDAO): + """ + Data Access Object to read, write and search for L{Crash} objects in a + database. + """ + + @Transactional + def add(self, crash, allow_duplicates = True): + """ + Add a new crash dump to the database, optionally filtering them by + signature to avoid duplicates. + + @type crash: L{Crash} + @param crash: Crash object. + + @type allow_duplicates: bool + @param allow_duplicates: (Optional) + C{True} to always add the new crash dump. + C{False} to only add the crash dump if no other crash with the + same signature is found in the database. + + Sometimes, your fuzzer turns out to be I{too} good. Then you find + youself browsing through gigabytes of crash dumps, only to find + a handful of actual bugs in them. This simple heuristic filter + saves you the trouble by discarding crashes that seem to be similar + to another one you've already found. + """ + + # Filter out duplicated crashes, if requested. + if not allow_duplicates: + signature = pickle.dumps(crash.signature, protocol = 0) + if self._session.query(CrashDTO.id) \ + .filter_by(signature = signature) \ + .count() > 0: + return + + # Fill out a new row for the crashes table. + crash_id = self.__add_crash(crash) + + # Fill out new rows for the memory dump. + self.__add_memory(crash_id, crash.memoryMap) + + # On success set the row ID for the Crash object. + # WARNING: In nested calls, make sure to delete + # this property before a session rollback! + crash._rowid = crash_id + + # Store the Crash object into the crashes table. + def __add_crash(self, crash): + session = self._session + r_crash = None + try: + + # Fill out a new row for the crashes table. + r_crash = CrashDTO(crash) + session.add(r_crash) + + # Flush and get the new row ID. + session.flush() + crash_id = r_crash.id + + finally: + try: + + # Make the ORM forget the CrashDTO object. + if r_crash is not None: + session.expire(r_crash) + + finally: + + # Delete the last reference to the CrashDTO + # object, so the Python garbage collector claims it. + del r_crash + + # Return the row ID. + return crash_id + + # Store the memory dump into the memory table. + def __add_memory(self, crash_id, memoryMap): + session = self._session + if memoryMap: + for mbi in memoryMap: + r_mem = MemoryDTO(crash_id, mbi) + session.add(r_mem) + session.flush() + + @Transactional + def find(self, + signature = None, order = 0, + since = None, until = None, + offset = None, limit = None): + """ + Retrieve all crash dumps in the database, optionally filtering them by + signature and timestamp, and/or sorting them by timestamp. + + Results can be paged to avoid consuming too much memory if the database + is large. + + @see: L{find_by_example} + + @type signature: object + @param signature: (Optional) Return only through crashes matching + this signature. See L{Crash.signature} for more details. + + @type order: int + @param order: (Optional) Sort by timestamp. + If C{== 0}, results are not sorted. + If C{> 0}, results are sorted from older to newer. + If C{< 0}, results are sorted from newer to older. + + @type since: datetime + @param since: (Optional) Return only the crashes after and + including this date and time. + + @type until: datetime + @param until: (Optional) Return only the crashes before this date + and time, not including it. + + @type offset: int + @param offset: (Optional) Skip the first I{offset} results. + + @type limit: int + @param limit: (Optional) Return at most I{limit} results. + + @rtype: list(L{Crash}) + @return: List of Crash objects. + """ + + # Validate the parameters. + if since and until and since > until: + warnings.warn("CrashDAO.find() got the 'since' and 'until'" + " arguments reversed, corrected automatically.") + since, until = until, since + if limit is not None and not limit: + warnings.warn("CrashDAO.find() was set a limit of 0 results," + " returning without executing a query.") + return [] + + # Build the SQL query. + query = self._session.query(CrashDTO) + if signature is not None: + sig_pickled = pickle.dumps(signature, protocol = 0) + query = query.filter(CrashDTO.signature == sig_pickled) + if since: + query = query.filter(CrashDTO.timestamp >= since) + if until: + query = query.filter(CrashDTO.timestamp < until) + if order: + if order > 0: + query = query.order_by(asc(CrashDTO.timestamp)) + else: + query = query.order_by(desc(CrashDTO.timestamp)) + else: + # Default ordering is by row ID, to get consistent results. + # Also some database engines require ordering when using offsets. + query = query.order_by(asc(CrashDTO.id)) + if offset: + query = query.offset(offset) + if limit: + query = query.limit(limit) + + # Execute the SQL query and convert the results. + try: + return [dto.toCrash() for dto in query.all()] + except NoResultFound: + return [] + + @Transactional + def find_by_example(self, crash, offset = None, limit = None): + """ + Find all crash dumps that have common properties with the crash dump + provided. + + Results can be paged to avoid consuming too much memory if the database + is large. + + @see: L{find} + + @type crash: L{Crash} + @param crash: Crash object to compare with. Fields set to C{None} are + ignored, all other fields but the signature are used in the + comparison. + + To search for signature instead use the L{find} method. + + @type offset: int + @param offset: (Optional) Skip the first I{offset} results. + + @type limit: int + @param limit: (Optional) Return at most I{limit} results. + + @rtype: list(L{Crash}) + @return: List of similar crash dumps found. + """ + + # Validate the parameters. + if limit is not None and not limit: + warnings.warn("CrashDAO.find_by_example() was set a limit of 0" + " results, returning without executing a query.") + return [] + + # Build the query. + query = self._session.query(CrashDTO) + + # Order by row ID to get consistent results. + # Also some database engines require ordering when using offsets. + query = query.asc(CrashDTO.id) + + # Build a CrashDTO from the Crash object. + dto = CrashDTO(crash) + + # Filter all the fields in the crashes table that are present in the + # CrashDTO object and not set to None, except for the row ID. + for name, column in compat.iteritems(CrashDTO.__dict__): + if not name.startswith('__') and name not in ('id', + 'signature', + 'data'): + if isinstance(column, Column): + value = getattr(dto, name, None) + if value is not None: + query = query.filter(column == value) + + # Page the query. + if offset: + query = query.offset(offset) + if limit: + query = query.limit(limit) + + # Execute the SQL query and convert the results. + try: + return [dto.toCrash() for dto in query.all()] + except NoResultFound: + return [] + + @Transactional + def count(self, signature = None): + """ + Counts how many crash dumps have been stored in this database. + Optionally filters the count by heuristic signature. + + @type signature: object + @param signature: (Optional) Count only the crashes that match + this signature. See L{Crash.signature} for more details. + + @rtype: int + @return: Count of crash dumps stored in this database. + """ + query = self._session.query(CrashDTO.id) + if signature: + sig_pickled = pickle.dumps(signature, protocol = 0) + query = query.filter_by(signature = sig_pickled) + return query.count() + + @Transactional + def delete(self, crash): + """ + Remove the given crash dump from the database. + + @type crash: L{Crash} + @param crash: Crash dump to remove. + """ + query = self._session.query(CrashDTO).filter_by(id = crash._rowid) + query.delete(synchronize_session = False) + del crash._rowid diff --git a/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/system.py b/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/system.py new file mode 100644 index 00000000..9ee32001 --- /dev/null +++ b/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/system.py @@ -0,0 +1,1297 @@ +#!~/.wine/drive_c/Python25/python.exe +# -*- coding: utf-8 -*- + +# Copyright (c) 2009-2014, Mario Vilas +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright +# notice,this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# * Neither the name of the copyright holder nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. + +""" +System settings. + +@group Instrumentation: + System +""" + +from __future__ import with_statement + +__revision__ = "$Id$" + +__all__ = ['System'] + +from winappdbg import win32 +from winappdbg.registry import Registry +from winappdbg.textio import HexInput, HexDump +from winappdbg.util import Regenerator, PathOperations, MemoryAddresses, DebugRegister, \ + classproperty +from winappdbg.process import _ProcessContainer +from winappdbg.window import Window + +import sys +import os +import ctypes +import warnings + +from os import path, getenv + +#============================================================================== + +class System (_ProcessContainer): + """ + Interface to a batch of processes, plus some system wide settings. + Contains a snapshot of processes. + + @group Platform settings: + arch, bits, os, wow64, pageSize + + @group Instrumentation: + find_window, get_window_at, get_foreground_window, + get_desktop_window, get_shell_window + + @group Debugging: + load_dbghelp, fix_symbol_store_path, + request_debug_privileges, drop_debug_privileges + + @group Postmortem debugging: + get_postmortem_debugger, set_postmortem_debugger, + get_postmortem_exclusion_list, add_to_postmortem_exclusion_list, + remove_from_postmortem_exclusion_list + + @group System services: + get_services, get_active_services, + start_service, stop_service, + pause_service, resume_service, + get_service_display_name, get_service_from_display_name + + @group Permissions and privileges: + request_privileges, drop_privileges, adjust_privileges, is_admin + + @group Miscellaneous global settings: + set_kill_on_exit_mode, read_msr, write_msr, enable_step_on_branch_mode, + get_last_branch_location + + @type arch: str + @cvar arch: Name of the processor architecture we're running on. + For more details see L{win32.version._get_arch}. + + @type bits: int + @cvar bits: Size of the machine word in bits for the current architecture. + For more details see L{win32.version._get_bits}. + + @type os: str + @cvar os: Name of the Windows version we're runing on. + For more details see L{win32.version._get_os}. + + @type wow64: bool + @cvar wow64: C{True} if the debugger is a 32 bits process running in a 64 + bits version of Windows, C{False} otherwise. + + @type pageSize: int + @cvar pageSize: Page size in bytes. Defaults to 0x1000 but it's + automatically updated on runtime when importing the module. + + @type registry: L{Registry} + @cvar registry: Windows Registry for this machine. + """ + + arch = win32.arch + bits = win32.bits + os = win32.os + wow64 = win32.wow64 + + @classproperty + def pageSize(cls): + pageSize = MemoryAddresses.pageSize + cls.pageSize = pageSize + return pageSize + + registry = Registry() + +#------------------------------------------------------------------------------ + + @staticmethod + def find_window(className = None, windowName = None): + """ + Find the first top-level window in the current desktop to match the + given class name and/or window name. If neither are provided any + top-level window will match. + + @see: L{get_window_at} + + @type className: str + @param className: (Optional) Class name of the window to find. + If C{None} or not used any class name will match the search. + + @type windowName: str + @param windowName: (Optional) Caption text of the window to find. + If C{None} or not used any caption text will match the search. + + @rtype: L{Window} or None + @return: A window that matches the request. There may be more matching + windows, but this method only returns one. If no matching window + is found, the return value is C{None}. + + @raise WindowsError: An error occured while processing this request. + """ + # I'd love to reverse the order of the parameters + # but that might create some confusion. :( + hWnd = win32.FindWindow(className, windowName) + if hWnd: + return Window(hWnd) + + @staticmethod + def get_window_at(x, y): + """ + Get the window located at the given coordinates in the desktop. + If no such window exists an exception is raised. + + @see: L{find_window} + + @type x: int + @param x: Horizontal coordinate. + @type y: int + @param y: Vertical coordinate. + + @rtype: L{Window} + @return: Window at the requested position. If no such window + exists a C{WindowsError} exception is raised. + + @raise WindowsError: An error occured while processing this request. + """ + return Window( win32.WindowFromPoint( (x, y) ) ) + + @staticmethod + def get_foreground_window(): + """ + @rtype: L{Window} + @return: Returns the foreground window. + @raise WindowsError: An error occured while processing this request. + """ + return Window( win32.GetForegroundWindow() ) + + @staticmethod + def get_desktop_window(): + """ + @rtype: L{Window} + @return: Returns the desktop window. + @raise WindowsError: An error occured while processing this request. + """ + return Window( win32.GetDesktopWindow() ) + + @staticmethod + def get_shell_window(): + """ + @rtype: L{Window} + @return: Returns the shell window. + @raise WindowsError: An error occured while processing this request. + """ + return Window( win32.GetShellWindow() ) + +#------------------------------------------------------------------------------ + + @classmethod + def request_debug_privileges(cls, bIgnoreExceptions = False): + """ + Requests debug privileges. + + This may be needed to debug processes running as SYSTEM + (such as services) since Windows XP. + + @type bIgnoreExceptions: bool + @param bIgnoreExceptions: C{True} to ignore any exceptions that may be + raised when requesting debug privileges. + + @rtype: bool + @return: C{True} on success, C{False} on failure. + + @raise WindowsError: Raises an exception on error, unless + C{bIgnoreExceptions} is C{True}. + """ + try: + cls.request_privileges(win32.SE_DEBUG_NAME) + return True + except Exception: + if not bIgnoreExceptions: + raise + return False + + @classmethod + def drop_debug_privileges(cls, bIgnoreExceptions = False): + """ + Drops debug privileges. + + This may be needed to avoid being detected + by certain anti-debug tricks. + + @type bIgnoreExceptions: bool + @param bIgnoreExceptions: C{True} to ignore any exceptions that may be + raised when dropping debug privileges. + + @rtype: bool + @return: C{True} on success, C{False} on failure. + + @raise WindowsError: Raises an exception on error, unless + C{bIgnoreExceptions} is C{True}. + """ + try: + cls.drop_privileges(win32.SE_DEBUG_NAME) + return True + except Exception: + if not bIgnoreExceptions: + raise + return False + + @classmethod + def request_privileges(cls, *privileges): + """ + Requests privileges. + + @type privileges: int... + @param privileges: Privileges to request. + + @raise WindowsError: Raises an exception on error. + """ + cls.adjust_privileges(True, privileges) + + @classmethod + def drop_privileges(cls, *privileges): + """ + Drops privileges. + + @type privileges: int... + @param privileges: Privileges to drop. + + @raise WindowsError: Raises an exception on error. + """ + cls.adjust_privileges(False, privileges) + + @staticmethod + def adjust_privileges(state, privileges): + """ + Requests or drops privileges. + + @type state: bool + @param state: C{True} to request, C{False} to drop. + + @type privileges: list(int) + @param privileges: Privileges to request or drop. + + @raise WindowsError: Raises an exception on error. + """ + with win32.OpenProcessToken(win32.GetCurrentProcess(), + win32.TOKEN_ADJUST_PRIVILEGES) as hToken: + NewState = ( (priv, state) for priv in privileges ) + win32.AdjustTokenPrivileges(hToken, NewState) + + @staticmethod + def is_admin(): + """ + @rtype: bool + @return: C{True} if the current user as Administrator privileges, + C{False} otherwise. Since Windows Vista and above this means if + the current process is running with UAC elevation or not. + """ + return win32.IsUserAnAdmin() + +#------------------------------------------------------------------------------ + + __binary_types = { + win32.VFT_APP: "application", + win32.VFT_DLL: "dynamic link library", + win32.VFT_STATIC_LIB: "static link library", + win32.VFT_FONT: "font", + win32.VFT_DRV: "driver", + win32.VFT_VXD: "legacy driver", + } + + __driver_types = { + win32.VFT2_DRV_COMM: "communications driver", + win32.VFT2_DRV_DISPLAY: "display driver", + win32.VFT2_DRV_INSTALLABLE: "installable driver", + win32.VFT2_DRV_KEYBOARD: "keyboard driver", + win32.VFT2_DRV_LANGUAGE: "language driver", + win32.VFT2_DRV_MOUSE: "mouse driver", + win32.VFT2_DRV_NETWORK: "network driver", + win32.VFT2_DRV_PRINTER: "printer driver", + win32.VFT2_DRV_SOUND: "sound driver", + win32.VFT2_DRV_SYSTEM: "system driver", + win32.VFT2_DRV_VERSIONED_PRINTER: "versioned printer driver", + } + + __font_types = { + win32.VFT2_FONT_RASTER: "raster font", + win32.VFT2_FONT_TRUETYPE: "TrueType font", + win32.VFT2_FONT_VECTOR: "vector font", + } + + __months = ( + "January", + "February", + "March", + "April", + "May", + "June", + "July", + "August", + "September", + "October", + "November", + "December", + ) + + __days_of_the_week = ( + "Sunday", + "Monday", + "Tuesday", + "Wednesday", + "Thursday", + "Friday", + "Saturday", + ) + + @classmethod + def get_file_version_info(cls, filename): + """ + Get the program version from an executable file, if available. + + @type filename: str + @param filename: Pathname to the executable file to query. + + @rtype: tuple(str, str, bool, bool, str, str) + @return: Tuple with version information extracted from the executable + file metadata, containing the following: + - File version number (C{"major.minor"}). + - Product version number (C{"major.minor"}). + - C{True} for debug builds, C{False} for production builds. + - C{True} for legacy OS builds (DOS, OS/2, Win16), + C{False} for modern OS builds. + - Binary file type. + May be one of the following values: + - "application" + - "dynamic link library" + - "static link library" + - "font" + - "raster font" + - "TrueType font" + - "vector font" + - "driver" + - "communications driver" + - "display driver" + - "installable driver" + - "keyboard driver" + - "language driver" + - "legacy driver" + - "mouse driver" + - "network driver" + - "printer driver" + - "sound driver" + - "system driver" + - "versioned printer driver" + - Binary creation timestamp. + Any of the fields may be C{None} if not available. + + @raise WindowsError: Raises an exception on error. + """ + + # Get the file version info structure. + pBlock = win32.GetFileVersionInfo(filename) + pBuffer, dwLen = win32.VerQueryValue(pBlock, "\\") + if dwLen != ctypes.sizeof(win32.VS_FIXEDFILEINFO): + raise ctypes.WinError(win32.ERROR_BAD_LENGTH) + pVersionInfo = ctypes.cast(pBuffer, + ctypes.POINTER(win32.VS_FIXEDFILEINFO)) + VersionInfo = pVersionInfo.contents + if VersionInfo.dwSignature != 0xFEEF04BD: + raise ctypes.WinError(win32.ERROR_BAD_ARGUMENTS) + + # File and product versions. + FileVersion = "%d.%d" % (VersionInfo.dwFileVersionMS, + VersionInfo.dwFileVersionLS) + ProductVersion = "%d.%d" % (VersionInfo.dwProductVersionMS, + VersionInfo.dwProductVersionLS) + + # Debug build? + if VersionInfo.dwFileFlagsMask & win32.VS_FF_DEBUG: + DebugBuild = (VersionInfo.dwFileFlags & win32.VS_FF_DEBUG) != 0 + else: + DebugBuild = None + + # Legacy OS build? + LegacyBuild = (VersionInfo.dwFileOS != win32.VOS_NT_WINDOWS32) + + # File type. + FileType = cls.__binary_types.get(VersionInfo.dwFileType) + if VersionInfo.dwFileType == win32.VFT_DRV: + FileType = cls.__driver_types.get(VersionInfo.dwFileSubtype) + elif VersionInfo.dwFileType == win32.VFT_FONT: + FileType = cls.__font_types.get(VersionInfo.dwFileSubtype) + + # Timestamp, ex: "Monday, July 7, 2013 (12:20:50.126)". + # FIXME: how do we know the time zone? + FileDate = (VersionInfo.dwFileDateMS << 32) + VersionInfo.dwFileDateLS + if FileDate: + CreationTime = win32.FileTimeToSystemTime(FileDate) + CreationTimestamp = "%s, %s %d, %d (%d:%d:%d.%d)" % ( + cls.__days_of_the_week[CreationTime.wDayOfWeek], + cls.__months[CreationTime.wMonth], + CreationTime.wDay, + CreationTime.wYear, + CreationTime.wHour, + CreationTime.wMinute, + CreationTime.wSecond, + CreationTime.wMilliseconds, + ) + else: + CreationTimestamp = None + + # Return the file version info. + return ( + FileVersion, + ProductVersion, + DebugBuild, + LegacyBuild, + FileType, + CreationTimestamp, + ) + +#------------------------------------------------------------------------------ + + # Locations for dbghelp.dll. + # Unfortunately, Microsoft started bundling WinDbg with the + # platform SDK, so the install directories may vary across + # versions and platforms. + __dbghelp_locations = { + + # Intel 64 bits. + win32.ARCH_AMD64: set([ + + # WinDbg bundled with the SDK, version 8.0. + path.join( + getenv("ProgramFiles", "C:\\Program Files"), + "Windows Kits", + "8.0", + "Debuggers", + "x64", + "dbghelp.dll"), + path.join( + getenv("ProgramW6432", getenv("ProgramFiles", + "C:\\Program Files")), + "Windows Kits", + "8.0", + "Debuggers", + "x64", + "dbghelp.dll"), + + # Old standalone versions of WinDbg. + path.join( + getenv("ProgramFiles", "C:\\Program Files"), + "Debugging Tools for Windows (x64)", + "dbghelp.dll"), + ]), + + # Intel 32 bits. + win32.ARCH_I386 : set([ + + # WinDbg bundled with the SDK, version 8.0. + path.join( + getenv("ProgramFiles", "C:\\Program Files"), + "Windows Kits", + "8.0", + "Debuggers", + "x86", + "dbghelp.dll"), + path.join( + getenv("ProgramW6432", getenv("ProgramFiles", + "C:\\Program Files")), + "Windows Kits", + "8.0", + "Debuggers", + "x86", + "dbghelp.dll"), + + # Old standalone versions of WinDbg. + path.join( + getenv("ProgramFiles", "C:\\Program Files"), + "Debugging Tools for Windows (x86)", + "dbghelp.dll"), + + # Version shipped with Windows. + path.join( + getenv("ProgramFiles", "C:\\Program Files"), + "Debugging Tools for Windows (x86)", + "dbghelp.dll"), + ]), + } + + @classmethod + def load_dbghelp(cls, pathname = None): + """ + Load the specified version of the C{dbghelp.dll} library. + + This library is shipped with the Debugging Tools for Windows, and it's + required to load debug symbols. + + Normally you don't need to call this method, as WinAppDbg already tries + to load the latest version automatically - but it may come in handy if + the Debugging Tools are installed in a non standard folder. + + Example:: + from winappdbg import Debug + + def simple_debugger( argv ): + + # Instance a Debug object, passing it the event handler callback + debug = Debug( my_event_handler ) + try: + + # Load a specific dbghelp.dll file + debug.system.load_dbghelp("C:\\Some folder\\dbghelp.dll") + + # Start a new process for debugging + debug.execv( argv ) + + # Wait for the debugee to finish + debug.loop() + + # Stop the debugger + finally: + debug.stop() + + @see: U{http://msdn.microsoft.com/en-us/library/ms679294(VS.85).aspx} + + @type pathname: str + @param pathname: + (Optional) Full pathname to the C{dbghelp.dll} library. + If not provided this method will try to autodetect it. + + @rtype: ctypes.WinDLL + @return: Loaded instance of C{dbghelp.dll}. + + @raise NotImplementedError: This feature was not implemented for the + current architecture. + + @raise WindowsError: An error occured while processing this request. + """ + + # If an explicit pathname was not given, search for the library. + if not pathname: + + # Under WOW64 we'll treat AMD64 as I386. + arch = win32.arch + if arch == win32.ARCH_AMD64 and win32.bits == 32: + arch = win32.ARCH_I386 + + # Check if the architecture is supported. + if not arch in cls.__dbghelp_locations: + msg = "Architecture %s is not currently supported." + raise NotImplementedError(msg % arch) + + # Grab all versions of the library we can find. + found = [] + for pathname in cls.__dbghelp_locations[arch]: + if path.isfile(pathname): + try: + f_ver, p_ver = cls.get_file_version_info(pathname)[:2] + except WindowsError: + msg = "Failed to parse file version metadata for: %s" + warnings.warn(msg % pathname) + if not f_ver: + f_ver = p_ver + elif p_ver and p_ver > f_ver: + f_ver = p_ver + found.append( (f_ver, pathname) ) + + # If we found any, use the newest version. + if found: + found.sort() + pathname = found.pop()[1] + + # If we didn't find any, trust the default DLL search algorithm. + else: + pathname = "dbghelp.dll" + + # Load the library. + dbghelp = ctypes.windll.LoadLibrary(pathname) + + # Set it globally as the library to be used. + ctypes.windll.dbghelp = dbghelp + + # Return the library. + return dbghelp + + @staticmethod + def fix_symbol_store_path(symbol_store_path = None, + remote = True, + force = False): + """ + Fix the symbol store path. Equivalent to the C{.symfix} command in + Microsoft WinDbg. + + If the symbol store path environment variable hasn't been set, this + method will provide a default one. + + @type symbol_store_path: str or None + @param symbol_store_path: (Optional) Symbol store path to set. + + @type remote: bool + @param remote: (Optional) Defines the symbol store path to set when the + C{symbol_store_path} is C{None}. + + If C{True} the default symbol store path is set to the Microsoft + symbol server. Debug symbols will be downloaded through HTTP. + This gives the best results but is also quite slow. + + If C{False} the default symbol store path is set to the local + cache only. This prevents debug symbols from being downloaded and + is faster, but unless you've installed the debug symbols on this + machine or downloaded them in a previous debugging session, some + symbols may be missing. + + If the C{symbol_store_path} argument is not C{None}, this argument + is ignored entirely. + + @type force: bool + @param force: (Optional) If C{True} the new symbol store path is set + always. If C{False} the new symbol store path is only set if + missing. + + This allows you to call this method preventively to ensure the + symbol server is always set up correctly when running your script, + but without messing up whatever configuration the user has. + + Example:: + from winappdbg import Debug, System + + def simple_debugger( argv ): + + # Instance a Debug object + debug = Debug( MyEventHandler() ) + try: + + # Make sure the remote symbol store is set + System.fix_symbol_store_path(remote = True, + force = False) + + # Start a new process for debugging + debug.execv( argv ) + + # Wait for the debugee to finish + debug.loop() + + # Stop the debugger + finally: + debug.stop() + + @rtype: str or None + @return: The previously set symbol store path if any, + otherwise returns C{None}. + """ + try: + if symbol_store_path is None: + local_path = "C:\\SYMBOLS" + if not path.isdir(local_path): + local_path = "C:\\Windows\\Symbols" + if not path.isdir(local_path): + local_path = path.abspath(".") + if remote: + symbol_store_path = ( + "cache*;SRV*" + + local_path + + "*" + "http://msdl.microsoft.com/download/symbols" + ) + else: + symbol_store_path = "cache*;SRV*" + local_path + previous = os.environ.get("_NT_SYMBOL_PATH", None) + if not previous or force: + os.environ["_NT_SYMBOL_PATH"] = symbol_store_path + return previous + except Exception: + e = sys.exc_info()[1] + warnings.warn("Cannot fix symbol path, reason: %s" % str(e), + RuntimeWarning) + +#------------------------------------------------------------------------------ + + @staticmethod + def set_kill_on_exit_mode(bKillOnExit = False): + """ + Defines the behavior of the debugged processes when the debugging + thread dies. This method only affects the calling thread. + + Works on the following platforms: + + - Microsoft Windows XP and above. + - Wine (Windows Emulator). + + Fails on the following platforms: + + - Microsoft Windows 2000 and below. + - ReactOS. + + @type bKillOnExit: bool + @param bKillOnExit: C{True} to automatically kill processes when the + debugger thread dies. C{False} to automatically detach from + processes when the debugger thread dies. + + @rtype: bool + @return: C{True} on success, C{False} on error. + + @note: + This call will fail if a debug port was not created. That is, if + the debugger isn't attached to at least one process. For more info + see: U{http://msdn.microsoft.com/en-us/library/ms679307.aspx} + """ + try: + # won't work before calling CreateProcess or DebugActiveProcess + win32.DebugSetProcessKillOnExit(bKillOnExit) + except (AttributeError, WindowsError): + return False + return True + + @staticmethod + def read_msr(address): + """ + Read the contents of the specified MSR (Machine Specific Register). + + @type address: int + @param address: MSR to read. + + @rtype: int + @return: Value of the specified MSR. + + @raise WindowsError: + Raises an exception on error. + + @raise NotImplementedError: + Current architecture is not C{i386} or C{amd64}. + + @warning: + It could potentially brick your machine. + It works on my machine, but your mileage may vary. + """ + if win32.arch not in (win32.ARCH_I386, win32.ARCH_AMD64): + raise NotImplementedError( + "MSR reading is only supported on i386 or amd64 processors.") + msr = win32.SYSDBG_MSR() + msr.Address = address + msr.Data = 0 + win32.NtSystemDebugControl(win32.SysDbgReadMsr, + InputBuffer = msr, + OutputBuffer = msr) + return msr.Data + + @staticmethod + def write_msr(address, value): + """ + Set the contents of the specified MSR (Machine Specific Register). + + @type address: int + @param address: MSR to write. + + @type value: int + @param value: Contents to write on the MSR. + + @raise WindowsError: + Raises an exception on error. + + @raise NotImplementedError: + Current architecture is not C{i386} or C{amd64}. + + @warning: + It could potentially brick your machine. + It works on my machine, but your mileage may vary. + """ + if win32.arch not in (win32.ARCH_I386, win32.ARCH_AMD64): + raise NotImplementedError( + "MSR writing is only supported on i386 or amd64 processors.") + msr = win32.SYSDBG_MSR() + msr.Address = address + msr.Data = value + win32.NtSystemDebugControl(win32.SysDbgWriteMsr, InputBuffer = msr) + + @classmethod + def enable_step_on_branch_mode(cls): + """ + When tracing, call this on every single step event + for step on branch mode. + + @raise WindowsError: + Raises C{ERROR_DEBUGGER_INACTIVE} if the debugger is not attached + to least one process. + + @raise NotImplementedError: + Current architecture is not C{i386} or C{amd64}. + + @warning: + This method uses the processor's machine specific registers (MSR). + It could potentially brick your machine. + It works on my machine, but your mileage may vary. + + @note: + It doesn't seem to work in VMWare or VirtualBox machines. + Maybe it fails in other virtualization/emulation environments, + no extensive testing was made so far. + """ + cls.write_msr(DebugRegister.DebugCtlMSR, + DebugRegister.BranchTrapFlag | DebugRegister.LastBranchRecord) + + @classmethod + def get_last_branch_location(cls): + """ + Returns the source and destination addresses of the last taken branch. + + @rtype: tuple( int, int ) + @return: Source and destination addresses of the last taken branch. + + @raise WindowsError: + Raises an exception on error. + + @raise NotImplementedError: + Current architecture is not C{i386} or C{amd64}. + + @warning: + This method uses the processor's machine specific registers (MSR). + It could potentially brick your machine. + It works on my machine, but your mileage may vary. + + @note: + It doesn't seem to work in VMWare or VirtualBox machines. + Maybe it fails in other virtualization/emulation environments, + no extensive testing was made so far. + """ + LastBranchFromIP = cls.read_msr(DebugRegister.LastBranchFromIP) + LastBranchToIP = cls.read_msr(DebugRegister.LastBranchToIP) + return ( LastBranchFromIP, LastBranchToIP ) + +#------------------------------------------------------------------------------ + + @classmethod + def get_postmortem_debugger(cls, bits = None): + """ + Returns the postmortem debugging settings from the Registry. + + @see: L{set_postmortem_debugger} + + @type bits: int + @param bits: Set to C{32} for the 32 bits debugger, or C{64} for the + 64 bits debugger. Set to {None} for the default (L{System.bits}. + + @rtype: tuple( str, bool, int ) + @return: A tuple containing the command line string to the postmortem + debugger, a boolean specifying if user interaction is allowed + before attaching, and an integer specifying a user defined hotkey. + Any member of the tuple may be C{None}. + See L{set_postmortem_debugger} for more details. + + @raise WindowsError: + Raises an exception on error. + """ + if bits is None: + bits = cls.bits + elif bits not in (32, 64): + raise NotImplementedError("Unknown architecture (%r bits)" % bits) + + if bits == 32 and cls.bits == 64: + keyname = 'HKLM\\SOFTWARE\\Wow6432Node\\Microsoft\\Windows NT\\CurrentVersion\\AeDebug' + else: + keyname = 'HKLM\\SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion\\AeDebug' + + key = cls.registry[keyname] + + debugger = key.get('Debugger') + auto = key.get('Auto') + hotkey = key.get('UserDebuggerHotkey') + + if auto is not None: + auto = bool(auto) + + return (debugger, auto, hotkey) + + @classmethod + def get_postmortem_exclusion_list(cls, bits = None): + """ + Returns the exclusion list for the postmortem debugger. + + @see: L{get_postmortem_debugger} + + @type bits: int + @param bits: Set to C{32} for the 32 bits debugger, or C{64} for the + 64 bits debugger. Set to {None} for the default (L{System.bits}). + + @rtype: list( str ) + @return: List of excluded application filenames. + + @raise WindowsError: + Raises an exception on error. + """ + if bits is None: + bits = cls.bits + elif bits not in (32, 64): + raise NotImplementedError("Unknown architecture (%r bits)" % bits) + + if bits == 32 and cls.bits == 64: + keyname = 'HKLM\\SOFTWARE\\Wow6432Node\\Microsoft\\Windows NT\\CurrentVersion\\AeDebug\\AutoExclusionList' + else: + keyname = 'HKLM\\SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion\\AeDebug\\AutoExclusionList' + + try: + key = cls.registry[keyname] + except KeyError: + return [] + + return [name for (name, enabled) in key.items() if enabled] + + @classmethod + def set_postmortem_debugger(cls, cmdline, + auto = None, hotkey = None, bits = None): + """ + Sets the postmortem debugging settings in the Registry. + + @warning: This method requires administrative rights. + + @see: L{get_postmortem_debugger} + + @type cmdline: str + @param cmdline: Command line to the new postmortem debugger. + When the debugger is invoked, the first "%ld" is replaced with the + process ID and the second "%ld" is replaced with the event handle. + Don't forget to enclose the program filename in double quotes if + the path contains spaces. + + @type auto: bool + @param auto: Set to C{True} if no user interaction is allowed, C{False} + to prompt a confirmation dialog before attaching. + Use C{None} to leave this value unchanged. + + @type hotkey: int + @param hotkey: Virtual key scan code for the user defined hotkey. + Use C{0} to disable the hotkey. + Use C{None} to leave this value unchanged. + + @type bits: int + @param bits: Set to C{32} for the 32 bits debugger, or C{64} for the + 64 bits debugger. Set to {None} for the default (L{System.bits}). + + @rtype: tuple( str, bool, int ) + @return: Previously defined command line and auto flag. + + @raise WindowsError: + Raises an exception on error. + """ + if bits is None: + bits = cls.bits + elif bits not in (32, 64): + raise NotImplementedError("Unknown architecture (%r bits)" % bits) + + if bits == 32 and cls.bits == 64: + keyname = 'HKLM\\SOFTWARE\\Wow6432Node\\Microsoft\\Windows NT\\CurrentVersion\\AeDebug' + else: + keyname = 'HKLM\\SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion\\AeDebug' + + key = cls.registry[keyname] + + if cmdline is not None: + key['Debugger'] = cmdline + if auto is not None: + key['Auto'] = int(bool(auto)) + if hotkey is not None: + key['UserDebuggerHotkey'] = int(hotkey) + + @classmethod + def add_to_postmortem_exclusion_list(cls, pathname, bits = None): + """ + Adds the given filename to the exclusion list for postmortem debugging. + + @warning: This method requires administrative rights. + + @see: L{get_postmortem_exclusion_list} + + @type pathname: str + @param pathname: + Application pathname to exclude from postmortem debugging. + + @type bits: int + @param bits: Set to C{32} for the 32 bits debugger, or C{64} for the + 64 bits debugger. Set to {None} for the default (L{System.bits}). + + @raise WindowsError: + Raises an exception on error. + """ + if bits is None: + bits = cls.bits + elif bits not in (32, 64): + raise NotImplementedError("Unknown architecture (%r bits)" % bits) + + if bits == 32 and cls.bits == 64: + keyname = 'HKLM\\SOFTWARE\\Wow6432Node\\Microsoft\\Windows NT\\CurrentVersion\\AeDebug\\AutoExclusionList' + else: + keyname = 'HKLM\\SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion\\AeDebug\\AutoExclusionList' + + try: + key = cls.registry[keyname] + except KeyError: + key = cls.registry.create(keyname) + + key[pathname] = 1 + + @classmethod + def remove_from_postmortem_exclusion_list(cls, pathname, bits = None): + """ + Removes the given filename to the exclusion list for postmortem + debugging from the Registry. + + @warning: This method requires administrative rights. + + @warning: Don't ever delete entries you haven't created yourself! + Some entries are set by default for your version of Windows. + Deleting them might deadlock your system under some circumstances. + + For more details see: + U{http://msdn.microsoft.com/en-us/library/bb204634(v=vs.85).aspx} + + @see: L{get_postmortem_exclusion_list} + + @type pathname: str + @param pathname: Application pathname to remove from the postmortem + debugging exclusion list. + + @type bits: int + @param bits: Set to C{32} for the 32 bits debugger, or C{64} for the + 64 bits debugger. Set to {None} for the default (L{System.bits}). + + @raise WindowsError: + Raises an exception on error. + """ + if bits is None: + bits = cls.bits + elif bits not in (32, 64): + raise NotImplementedError("Unknown architecture (%r bits)" % bits) + + if bits == 32 and cls.bits == 64: + keyname = 'HKLM\\SOFTWARE\\Wow6432Node\\Microsoft\\Windows NT\\CurrentVersion\\AeDebug\\AutoExclusionList' + else: + keyname = 'HKLM\\SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion\\AeDebug\\AutoExclusionList' + + try: + key = cls.registry[keyname] + except KeyError: + return + + try: + del key[pathname] + except KeyError: + return + +#------------------------------------------------------------------------------ + + @staticmethod + def get_services(): + """ + Retrieve a list of all system services. + + @see: L{get_active_services}, + L{start_service}, L{stop_service}, + L{pause_service}, L{resume_service} + + @rtype: list( L{win32.ServiceStatusProcessEntry} ) + @return: List of service status descriptors. + """ + with win32.OpenSCManager( + dwDesiredAccess = win32.SC_MANAGER_ENUMERATE_SERVICE + ) as hSCManager: + try: + return win32.EnumServicesStatusEx(hSCManager) + except AttributeError: + return win32.EnumServicesStatus(hSCManager) + + @staticmethod + def get_active_services(): + """ + Retrieve a list of all active system services. + + @see: L{get_services}, + L{start_service}, L{stop_service}, + L{pause_service}, L{resume_service} + + @rtype: list( L{win32.ServiceStatusProcessEntry} ) + @return: List of service status descriptors. + """ + with win32.OpenSCManager( + dwDesiredAccess = win32.SC_MANAGER_ENUMERATE_SERVICE + ) as hSCManager: + return [ entry for entry in win32.EnumServicesStatusEx(hSCManager, + dwServiceType = win32.SERVICE_WIN32, + dwServiceState = win32.SERVICE_ACTIVE) \ + if entry.ProcessId ] + + @staticmethod + def get_service(name): + """ + Get the service descriptor for the given service name. + + @see: L{start_service}, L{stop_service}, + L{pause_service}, L{resume_service} + + @type name: str + @param name: Service unique name. You can get this value from the + C{ServiceName} member of the service descriptors returned by + L{get_services} or L{get_active_services}. + + @rtype: L{win32.ServiceStatusProcess} + @return: Service status descriptor. + """ + with win32.OpenSCManager( + dwDesiredAccess = win32.SC_MANAGER_ENUMERATE_SERVICE + ) as hSCManager: + with win32.OpenService(hSCManager, name, + dwDesiredAccess = win32.SERVICE_QUERY_STATUS + ) as hService: + try: + return win32.QueryServiceStatusEx(hService) + except AttributeError: + return win32.QueryServiceStatus(hService) + + @staticmethod + def get_service_display_name(name): + """ + Get the service display name for the given service name. + + @see: L{get_service} + + @type name: str + @param name: Service unique name. You can get this value from the + C{ServiceName} member of the service descriptors returned by + L{get_services} or L{get_active_services}. + + @rtype: str + @return: Service display name. + """ + with win32.OpenSCManager( + dwDesiredAccess = win32.SC_MANAGER_ENUMERATE_SERVICE + ) as hSCManager: + return win32.GetServiceDisplayName(hSCManager, name) + + @staticmethod + def get_service_from_display_name(displayName): + """ + Get the service unique name given its display name. + + @see: L{get_service} + + @type displayName: str + @param displayName: Service display name. You can get this value from + the C{DisplayName} member of the service descriptors returned by + L{get_services} or L{get_active_services}. + + @rtype: str + @return: Service unique name. + """ + with win32.OpenSCManager( + dwDesiredAccess = win32.SC_MANAGER_ENUMERATE_SERVICE + ) as hSCManager: + return win32.GetServiceKeyName(hSCManager, displayName) + + @staticmethod + def start_service(name, argv = None): + """ + Start the service given by name. + + @warn: This method requires UAC elevation in Windows Vista and above. + + @see: L{stop_service}, L{pause_service}, L{resume_service} + + @type name: str + @param name: Service unique name. You can get this value from the + C{ServiceName} member of the service descriptors returned by + L{get_services} or L{get_active_services}. + """ + with win32.OpenSCManager( + dwDesiredAccess = win32.SC_MANAGER_CONNECT + ) as hSCManager: + with win32.OpenService(hSCManager, name, + dwDesiredAccess = win32.SERVICE_START + ) as hService: + win32.StartService(hService) + + @staticmethod + def stop_service(name): + """ + Stop the service given by name. + + @warn: This method requires UAC elevation in Windows Vista and above. + + @see: L{get_services}, L{get_active_services}, + L{start_service}, L{pause_service}, L{resume_service} + """ + with win32.OpenSCManager( + dwDesiredAccess = win32.SC_MANAGER_CONNECT + ) as hSCManager: + with win32.OpenService(hSCManager, name, + dwDesiredAccess = win32.SERVICE_STOP + ) as hService: + win32.ControlService(hService, win32.SERVICE_CONTROL_STOP) + + @staticmethod + def pause_service(name): + """ + Pause the service given by name. + + @warn: This method requires UAC elevation in Windows Vista and above. + + @note: Not all services support this. + + @see: L{get_services}, L{get_active_services}, + L{start_service}, L{stop_service}, L{resume_service} + """ + with win32.OpenSCManager( + dwDesiredAccess = win32.SC_MANAGER_CONNECT + ) as hSCManager: + with win32.OpenService(hSCManager, name, + dwDesiredAccess = win32.SERVICE_PAUSE_CONTINUE + ) as hService: + win32.ControlService(hService, win32.SERVICE_CONTROL_PAUSE) + + @staticmethod + def resume_service(name): + """ + Resume the service given by name. + + @warn: This method requires UAC elevation in Windows Vista and above. + + @note: Not all services support this. + + @see: L{get_services}, L{get_active_services}, + L{start_service}, L{stop_service}, L{pause_service} + """ + with win32.OpenSCManager( + dwDesiredAccess = win32.SC_MANAGER_CONNECT + ) as hSCManager: + with win32.OpenService(hSCManager, name, + dwDesiredAccess = win32.SERVICE_PAUSE_CONTINUE + ) as hService: + win32.ControlService(hService, win32.SERVICE_CONTROL_CONTINUE) + + # TODO: create_service, delete_service diff --git a/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/textio.py b/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/textio.py new file mode 100644 index 00000000..402f631d --- /dev/null +++ b/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/textio.py @@ -0,0 +1,1879 @@ +#!~/.wine/drive_c/Python25/python.exe +# -*- coding: utf-8 -*- + +# Copyright (c) 2009-2014, Mario Vilas +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright +# notice,this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# * Neither the name of the copyright holder nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. + +""" +Functions for text input, logging or text output. + +@group Helpers: + HexDump, + HexInput, + HexOutput, + Color, + Table, + Logger + DebugLog + CrashDump +""" + +__revision__ = "$Id$" + +__all__ = [ + 'HexDump', + 'HexInput', + 'HexOutput', + 'Color', + 'Table', + 'CrashDump', + 'DebugLog', + 'Logger', + ] + +import sys +from winappdbg import win32 +from winappdbg import compat +from winappdbg.util import StaticClass + +import re +import time +import struct +import traceback + +#------------------------------------------------------------------------------ + +class HexInput (StaticClass): + """ + Static functions for user input parsing. + The counterparts for each method are in the L{HexOutput} class. + """ + + @staticmethod + def integer(token): + """ + Convert numeric strings into integers. + + @type token: str + @param token: String to parse. + + @rtype: int + @return: Parsed integer value. + """ + token = token.strip() + neg = False + if token.startswith(compat.b('-')): + token = token[1:] + neg = True + if token.startswith(compat.b('0x')): + result = int(token, 16) # hexadecimal + elif token.startswith(compat.b('0b')): + result = int(token[2:], 2) # binary + elif token.startswith(compat.b('0o')): + result = int(token, 8) # octal + else: + try: + result = int(token) # decimal + except ValueError: + result = int(token, 16) # hexadecimal (no "0x" prefix) + if neg: + result = -result + return result + + @staticmethod + def address(token): + """ + Convert numeric strings into memory addresses. + + @type token: str + @param token: String to parse. + + @rtype: int + @return: Parsed integer value. + """ + return int(token, 16) + + @staticmethod + def hexadecimal(token): + """ + Convert a strip of hexadecimal numbers into binary data. + + @type token: str + @param token: String to parse. + + @rtype: str + @return: Parsed string value. + """ + token = ''.join([ c for c in token if c.isalnum() ]) + if len(token) % 2 != 0: + raise ValueError("Missing characters in hex data") + data = '' + for i in compat.xrange(0, len(token), 2): + x = token[i:i+2] + d = int(x, 16) + s = struct.pack('= 0: + return ('0x%%.%dx' % (integer_size - 2)) % integer + return ('-0x%%.%dx' % (integer_size - 2)) % -integer + + @classmethod + def address(cls, address, bits = None): + """ + @type address: int + @param address: Memory address. + + @type bits: int + @param bits: + (Optional) Number of bits of the target architecture. + The default is platform dependent. See: L{HexOutput.address_size} + + @rtype: str + @return: Text output. + """ + if bits is None: + address_size = cls.address_size + bits = win32.bits + else: + address_size = (bits / 4) + 2 + if address < 0: + address = ((2 ** bits) - 1) ^ ~address + return ('0x%%.%dx' % (address_size - 2)) % address + + @staticmethod + def hexadecimal(data): + """ + Convert binary data to a string of hexadecimal numbers. + + @type data: str + @param data: Binary data. + + @rtype: str + @return: Hexadecimal representation. + """ + return HexDump.hexadecimal(data, separator = '') + + @classmethod + def integer_list_file(cls, filename, values, bits = None): + """ + Write a list of integers to a file. + If a file of the same name exists, it's contents are replaced. + + See L{HexInput.integer_list_file} for a description of the file format. + + @type filename: str + @param filename: Name of the file to write. + + @type values: list( int ) + @param values: List of integers to write to the file. + + @type bits: int + @param bits: + (Optional) Number of bits of the target architecture. + The default is platform dependent. See: L{HexOutput.integer_size} + """ + fd = open(filename, 'w') + for integer in values: + print >> fd, cls.integer(integer, bits) + fd.close() + + @classmethod + def string_list_file(cls, filename, values): + """ + Write a list of strings to a file. + If a file of the same name exists, it's contents are replaced. + + See L{HexInput.string_list_file} for a description of the file format. + + @type filename: str + @param filename: Name of the file to write. + + @type values: list( int ) + @param values: List of strings to write to the file. + """ + fd = open(filename, 'w') + for string in values: + print >> fd, string + fd.close() + + @classmethod + def mixed_list_file(cls, filename, values, bits): + """ + Write a list of mixed values to a file. + If a file of the same name exists, it's contents are replaced. + + See L{HexInput.mixed_list_file} for a description of the file format. + + @type filename: str + @param filename: Name of the file to write. + + @type values: list( int ) + @param values: List of mixed values to write to the file. + + @type bits: int + @param bits: + (Optional) Number of bits of the target architecture. + The default is platform dependent. See: L{HexOutput.integer_size} + """ + fd = open(filename, 'w') + for original in values: + try: + parsed = cls.integer(original, bits) + except TypeError: + parsed = repr(original) + print >> fd, parsed + fd.close() + +#------------------------------------------------------------------------------ + +class HexDump (StaticClass): + """ + Static functions for hexadecimal dumps. + + @type integer_size: int + @cvar integer_size: Size in characters of an outputted integer. + This value is platform dependent. + + @type address_size: int + @cvar address_size: Size in characters of an outputted address. + This value is platform dependent. + """ + + integer_size = (win32.SIZEOF(win32.DWORD) * 2) + address_size = (win32.SIZEOF(win32.SIZE_T) * 2) + + @classmethod + def integer(cls, integer, bits = None): + """ + @type integer: int + @param integer: Integer. + + @type bits: int + @param bits: + (Optional) Number of bits of the target architecture. + The default is platform dependent. See: L{HexDump.integer_size} + + @rtype: str + @return: Text output. + """ + if bits is None: + integer_size = cls.integer_size + else: + integer_size = bits / 4 + return ('%%.%dX' % integer_size) % integer + + @classmethod + def address(cls, address, bits = None): + """ + @type address: int + @param address: Memory address. + + @type bits: int + @param bits: + (Optional) Number of bits of the target architecture. + The default is platform dependent. See: L{HexDump.address_size} + + @rtype: str + @return: Text output. + """ + if bits is None: + address_size = cls.address_size + bits = win32.bits + else: + address_size = bits / 4 + if address < 0: + address = ((2 ** bits) - 1) ^ ~address + return ('%%.%dX' % address_size) % address + + @staticmethod + def printable(data): + """ + Replace unprintable characters with dots. + + @type data: str + @param data: Binary data. + + @rtype: str + @return: Printable text. + """ + result = '' + for c in data: + if 32 < ord(c) < 128: + result += c + else: + result += '.' + return result + + @staticmethod + def hexadecimal(data, separator = ''): + """ + Convert binary data to a string of hexadecimal numbers. + + @type data: str + @param data: Binary data. + + @type separator: str + @param separator: + Separator between the hexadecimal representation of each character. + + @rtype: str + @return: Hexadecimal representation. + """ + return separator.join( [ '%.2x' % ord(c) for c in data ] ) + + @staticmethod + def hexa_word(data, separator = ' '): + """ + Convert binary data to a string of hexadecimal WORDs. + + @type data: str + @param data: Binary data. + + @type separator: str + @param separator: + Separator between the hexadecimal representation of each WORD. + + @rtype: str + @return: Hexadecimal representation. + """ + if len(data) & 1 != 0: + data += '\0' + return separator.join( [ '%.4x' % struct.unpack(' 0: + width.extend( len_row[ -missing : ] ) + elif missing < 0: + len_row.extend( [0] * (-missing) ) + self.__width = [ max( width[i], len_row[i] ) for i in compat.xrange(len(len_row)) ] + self.__cols.append(row) + + def justify(self, column, direction): + """ + Make the text in a column left or right justified. + + @type column: int + @param column: Index of the column. + + @type direction: int + @param direction: + C{-1} to justify left, + C{1} to justify right. + + @raise IndexError: Bad column index. + @raise ValueError: Bad direction value. + """ + if direction == -1: + self.__width[column] = abs(self.__width[column]) + elif direction == 1: + self.__width[column] = - abs(self.__width[column]) + else: + raise ValueError("Bad direction value.") + + def getWidth(self): + """ + Get the width of the text output for the table. + + @rtype: int + @return: Width in characters for the text output, + including the newline character. + """ + width = 0 + if self.__width: + width = sum( abs(x) for x in self.__width ) + width = width + len(self.__width) * len(self.__sep) + 1 + return width + + def getOutput(self): + """ + Get the text output for the table. + + @rtype: str + @return: Text output. + """ + return '%s\n' % '\n'.join( self.yieldOutput() ) + + def yieldOutput(self): + """ + Generate the text output for the table. + + @rtype: generator of str + @return: Text output. + """ + width = self.__width + if width: + num_cols = len(width) + fmt = ['%%%ds' % -w for w in width] + if width[-1] > 0: + fmt[-1] = '%s' + fmt = self.__sep.join(fmt) + for row in self.__cols: + row.extend( [''] * (num_cols - len(row)) ) + yield fmt % tuple(row) + + def show(self): + """ + Print the text output for the table. + """ + print(self.getOutput()) + +#------------------------------------------------------------------------------ + +class CrashDump (StaticClass): + """ + Static functions for crash dumps. + + @type reg_template: str + @cvar reg_template: Template for the L{dump_registers} method. + """ + + # Templates for the dump_registers method. + reg_template = { + win32.ARCH_I386 : ( + 'eax=%(Eax).8x ebx=%(Ebx).8x ecx=%(Ecx).8x edx=%(Edx).8x esi=%(Esi).8x edi=%(Edi).8x\n' + 'eip=%(Eip).8x esp=%(Esp).8x ebp=%(Ebp).8x %(efl_dump)s\n' + 'cs=%(SegCs).4x ss=%(SegSs).4x ds=%(SegDs).4x es=%(SegEs).4x fs=%(SegFs).4x gs=%(SegGs).4x efl=%(EFlags).8x\n' + ), + win32.ARCH_AMD64 : ( + 'rax=%(Rax).16x rbx=%(Rbx).16x rcx=%(Rcx).16x\n' + 'rdx=%(Rdx).16x rsi=%(Rsi).16x rdi=%(Rdi).16x\n' + 'rip=%(Rip).16x rsp=%(Rsp).16x rbp=%(Rbp).16x\n' + ' r8=%(R8).16x r9=%(R9).16x r10=%(R10).16x\n' + 'r11=%(R11).16x r12=%(R12).16x r13=%(R13).16x\n' + 'r14=%(R14).16x r15=%(R15).16x\n' + '%(efl_dump)s\n' + 'cs=%(SegCs).4x ss=%(SegSs).4x ds=%(SegDs).4x es=%(SegEs).4x fs=%(SegFs).4x gs=%(SegGs).4x efl=%(EFlags).8x\n' + ), + } + + @staticmethod + def dump_flags(efl): + """ + Dump the x86 processor flags. + The output mimics that of the WinDBG debugger. + Used by L{dump_registers}. + + @type efl: int + @param efl: Value of the eFlags register. + + @rtype: str + @return: Text suitable for logging. + """ + if efl is None: + return '' + efl_dump = 'iopl=%1d' % ((efl & 0x3000) >> 12) + if efl & 0x100000: + efl_dump += ' vip' + else: + efl_dump += ' ' + if efl & 0x80000: + efl_dump += ' vif' + else: + efl_dump += ' ' + # 0x20000 ??? + if efl & 0x800: + efl_dump += ' ov' # Overflow + else: + efl_dump += ' no' # No overflow + if efl & 0x400: + efl_dump += ' dn' # Downwards + else: + efl_dump += ' up' # Upwards + if efl & 0x200: + efl_dump += ' ei' # Enable interrupts + else: + efl_dump += ' di' # Disable interrupts + # 0x100 trap flag + if efl & 0x80: + efl_dump += ' ng' # Negative + else: + efl_dump += ' pl' # Positive + if efl & 0x40: + efl_dump += ' zr' # Zero + else: + efl_dump += ' nz' # Nonzero + if efl & 0x10: + efl_dump += ' ac' # Auxiliary carry + else: + efl_dump += ' na' # No auxiliary carry + # 0x8 ??? + if efl & 0x4: + efl_dump += ' pe' # Parity odd + else: + efl_dump += ' po' # Parity even + # 0x2 ??? + if efl & 0x1: + efl_dump += ' cy' # Carry + else: + efl_dump += ' nc' # No carry + return efl_dump + + @classmethod + def dump_registers(cls, registers, arch = None): + """ + Dump the x86/x64 processor register values. + The output mimics that of the WinDBG debugger. + + @type registers: dict( str S{->} int ) + @param registers: Dictionary mapping register names to their values. + + @type arch: str + @param arch: Architecture of the machine whose registers were dumped. + Defaults to the current architecture. + Currently only the following architectures are supported: + - L{win32.ARCH_I386} + - L{win32.ARCH_AMD64} + + @rtype: str + @return: Text suitable for logging. + """ + if registers is None: + return '' + if arch is None: + if 'Eax' in registers: + arch = win32.ARCH_I386 + elif 'Rax' in registers: + arch = win32.ARCH_AMD64 + else: + arch = 'Unknown' + if arch not in cls.reg_template: + msg = "Don't know how to dump the registers for architecture: %s" + raise NotImplementedError(msg % arch) + registers = registers.copy() + registers['efl_dump'] = cls.dump_flags( registers['EFlags'] ) + return cls.reg_template[arch] % registers + + @staticmethod + def dump_registers_peek(registers, data, separator = ' ', width = 16): + """ + Dump data pointed to by the given registers, if any. + + @type registers: dict( str S{->} int ) + @param registers: Dictionary mapping register names to their values. + This value is returned by L{Thread.get_context}. + + @type data: dict( str S{->} str ) + @param data: Dictionary mapping register names to the data they point to. + This value is returned by L{Thread.peek_pointers_in_registers}. + + @rtype: str + @return: Text suitable for logging. + """ + if None in (registers, data): + return '' + names = compat.keys(data) + names.sort() + result = '' + for reg_name in names: + tag = reg_name.lower() + dumped = HexDump.hexline(data[reg_name], separator, width) + result += '%s -> %s\n' % (tag, dumped) + return result + + @staticmethod + def dump_data_peek(data, base = 0, + separator = ' ', + width = 16, + bits = None): + """ + Dump data from pointers guessed within the given binary data. + + @type data: str + @param data: Dictionary mapping offsets to the data they point to. + + @type base: int + @param base: Base offset. + + @type bits: int + @param bits: + (Optional) Number of bits of the target architecture. + The default is platform dependent. See: L{HexDump.address_size} + + @rtype: str + @return: Text suitable for logging. + """ + if data is None: + return '' + pointers = compat.keys(data) + pointers.sort() + result = '' + for offset in pointers: + dumped = HexDump.hexline(data[offset], separator, width) + address = HexDump.address(base + offset, bits) + result += '%s -> %s\n' % (address, dumped) + return result + + @staticmethod + def dump_stack_peek(data, separator = ' ', width = 16, arch = None): + """ + Dump data from pointers guessed within the given stack dump. + + @type data: str + @param data: Dictionary mapping stack offsets to the data they point to. + + @type separator: str + @param separator: + Separator between the hexadecimal representation of each character. + + @type width: int + @param width: + (Optional) Maximum number of characters to convert per text line. + This value is also used for padding. + + @type arch: str + @param arch: Architecture of the machine whose registers were dumped. + Defaults to the current architecture. + + @rtype: str + @return: Text suitable for logging. + """ + if data is None: + return '' + if arch is None: + arch = win32.arch + pointers = compat.keys(data) + pointers.sort() + result = '' + if pointers: + if arch == win32.ARCH_I386: + spreg = 'esp' + elif arch == win32.ARCH_AMD64: + spreg = 'rsp' + else: + spreg = 'STACK' # just a generic tag + tag_fmt = '[%s+0x%%.%dx]' % (spreg, len( '%x' % pointers[-1] ) ) + for offset in pointers: + dumped = HexDump.hexline(data[offset], separator, width) + tag = tag_fmt % offset + result += '%s -> %s\n' % (tag, dumped) + return result + + @staticmethod + def dump_stack_trace(stack_trace, bits = None): + """ + Dump a stack trace, as returned by L{Thread.get_stack_trace} with the + C{bUseLabels} parameter set to C{False}. + + @type stack_trace: list( int, int, str ) + @param stack_trace: Stack trace as a list of tuples of + ( return address, frame pointer, module filename ) + + @type bits: int + @param bits: + (Optional) Number of bits of the target architecture. + The default is platform dependent. See: L{HexDump.address_size} + + @rtype: str + @return: Text suitable for logging. + """ + if not stack_trace: + return '' + table = Table() + table.addRow('Frame', 'Origin', 'Module') + for (fp, ra, mod) in stack_trace: + fp_d = HexDump.address(fp, bits) + ra_d = HexDump.address(ra, bits) + table.addRow(fp_d, ra_d, mod) + return table.getOutput() + + @staticmethod + def dump_stack_trace_with_labels(stack_trace, bits = None): + """ + Dump a stack trace, + as returned by L{Thread.get_stack_trace_with_labels}. + + @type stack_trace: list( int, int, str ) + @param stack_trace: Stack trace as a list of tuples of + ( return address, frame pointer, module filename ) + + @type bits: int + @param bits: + (Optional) Number of bits of the target architecture. + The default is platform dependent. See: L{HexDump.address_size} + + @rtype: str + @return: Text suitable for logging. + """ + if not stack_trace: + return '' + table = Table() + table.addRow('Frame', 'Origin') + for (fp, label) in stack_trace: + table.addRow( HexDump.address(fp, bits), label ) + return table.getOutput() + + # TODO + # + Instead of a star when EIP points to, it would be better to show + # any register value (or other values like the exception address) that + # points to a location in the dissassembled code. + # + It'd be very useful to show some labels here. + # + It'd be very useful to show register contents for code at EIP + @staticmethod + def dump_code(disassembly, pc = None, + bLowercase = True, + bits = None): + """ + Dump a disassembly. Optionally mark where the program counter is. + + @type disassembly: list of tuple( int, int, str, str ) + @param disassembly: Disassembly dump as returned by + L{Process.disassemble} or L{Thread.disassemble_around_pc}. + + @type pc: int + @param pc: (Optional) Program counter. + + @type bLowercase: bool + @param bLowercase: (Optional) If C{True} convert the code to lowercase. + + @type bits: int + @param bits: + (Optional) Number of bits of the target architecture. + The default is platform dependent. See: L{HexDump.address_size} + + @rtype: str + @return: Text suitable for logging. + """ + if not disassembly: + return '' + table = Table(sep = ' | ') + for (addr, size, code, dump) in disassembly: + if bLowercase: + code = code.lower() + if addr == pc: + addr = ' * %s' % HexDump.address(addr, bits) + else: + addr = ' %s' % HexDump.address(addr, bits) + table.addRow(addr, dump, code) + table.justify(1, 1) + return table.getOutput() + + @staticmethod + def dump_code_line(disassembly_line, bShowAddress = True, + bShowDump = True, + bLowercase = True, + dwDumpWidth = None, + dwCodeWidth = None, + bits = None): + """ + Dump a single line of code. To dump a block of code use L{dump_code}. + + @type disassembly_line: tuple( int, int, str, str ) + @param disassembly_line: Single item of the list returned by + L{Process.disassemble} or L{Thread.disassemble_around_pc}. + + @type bShowAddress: bool + @param bShowAddress: (Optional) If C{True} show the memory address. + + @type bShowDump: bool + @param bShowDump: (Optional) If C{True} show the hexadecimal dump. + + @type bLowercase: bool + @param bLowercase: (Optional) If C{True} convert the code to lowercase. + + @type dwDumpWidth: int or None + @param dwDumpWidth: (Optional) Width in characters of the hex dump. + + @type dwCodeWidth: int or None + @param dwCodeWidth: (Optional) Width in characters of the code. + + @type bits: int + @param bits: + (Optional) Number of bits of the target architecture. + The default is platform dependent. See: L{HexDump.address_size} + + @rtype: str + @return: Text suitable for logging. + """ + if bits is None: + address_size = HexDump.address_size + else: + address_size = bits / 4 + (addr, size, code, dump) = disassembly_line + dump = dump.replace(' ', '') + result = list() + fmt = '' + if bShowAddress: + result.append( HexDump.address(addr, bits) ) + fmt += '%%%ds:' % address_size + if bShowDump: + result.append(dump) + if dwDumpWidth: + fmt += ' %%-%ds' % dwDumpWidth + else: + fmt += ' %s' + if bLowercase: + code = code.lower() + result.append(code) + if dwCodeWidth: + fmt += ' %%-%ds' % dwCodeWidth + else: + fmt += ' %s' + return fmt % tuple(result) + + @staticmethod + def dump_memory_map(memoryMap, mappedFilenames = None, bits = None): + """ + Dump the memory map of a process. Optionally show the filenames for + memory mapped files as well. + + @type memoryMap: list( L{win32.MemoryBasicInformation} ) + @param memoryMap: Memory map returned by L{Process.get_memory_map}. + + @type mappedFilenames: dict( int S{->} str ) + @param mappedFilenames: (Optional) Memory mapped filenames + returned by L{Process.get_mapped_filenames}. + + @type bits: int + @param bits: + (Optional) Number of bits of the target architecture. + The default is platform dependent. See: L{HexDump.address_size} + + @rtype: str + @return: Text suitable for logging. + """ + if not memoryMap: + return '' + + table = Table() + if mappedFilenames: + table.addRow("Address", "Size", "State", "Access", "Type", "File") + else: + table.addRow("Address", "Size", "State", "Access", "Type") + + # For each memory block in the map... + for mbi in memoryMap: + + # Address and size of memory block. + BaseAddress = HexDump.address(mbi.BaseAddress, bits) + RegionSize = HexDump.address(mbi.RegionSize, bits) + + # State (free or allocated). + mbiState = mbi.State + if mbiState == win32.MEM_RESERVE: + State = "Reserved" + elif mbiState == win32.MEM_COMMIT: + State = "Commited" + elif mbiState == win32.MEM_FREE: + State = "Free" + else: + State = "Unknown" + + # Page protection bits (R/W/X/G). + if mbiState != win32.MEM_COMMIT: + Protect = "" + else: + mbiProtect = mbi.Protect + if mbiProtect & win32.PAGE_NOACCESS: + Protect = "--- " + elif mbiProtect & win32.PAGE_READONLY: + Protect = "R-- " + elif mbiProtect & win32.PAGE_READWRITE: + Protect = "RW- " + elif mbiProtect & win32.PAGE_WRITECOPY: + Protect = "RC- " + elif mbiProtect & win32.PAGE_EXECUTE: + Protect = "--X " + elif mbiProtect & win32.PAGE_EXECUTE_READ: + Protect = "R-X " + elif mbiProtect & win32.PAGE_EXECUTE_READWRITE: + Protect = "RWX " + elif mbiProtect & win32.PAGE_EXECUTE_WRITECOPY: + Protect = "RCX " + else: + Protect = "??? " + if mbiProtect & win32.PAGE_GUARD: + Protect += "G" + else: + Protect += "-" + if mbiProtect & win32.PAGE_NOCACHE: + Protect += "N" + else: + Protect += "-" + if mbiProtect & win32.PAGE_WRITECOMBINE: + Protect += "W" + else: + Protect += "-" + + # Type (file mapping, executable image, or private memory). + mbiType = mbi.Type + if mbiType == win32.MEM_IMAGE: + Type = "Image" + elif mbiType == win32.MEM_MAPPED: + Type = "Mapped" + elif mbiType == win32.MEM_PRIVATE: + Type = "Private" + elif mbiType == 0: + Type = "" + else: + Type = "Unknown" + + # Output a row in the table. + if mappedFilenames: + FileName = mappedFilenames.get(mbi.BaseAddress, '') + table.addRow( BaseAddress, RegionSize, State, Protect, Type, FileName ) + else: + table.addRow( BaseAddress, RegionSize, State, Protect, Type ) + + # Return the table output. + return table.getOutput() + +#------------------------------------------------------------------------------ + +class DebugLog (StaticClass): + 'Static functions for debug logging.' + + @staticmethod + def log_text(text): + """ + Log lines of text, inserting a timestamp. + + @type text: str + @param text: Text to log. + + @rtype: str + @return: Log line. + """ + if text.endswith('\n'): + text = text[:-len('\n')] + #text = text.replace('\n', '\n\t\t') # text CSV + ltime = time.strftime("%X") + msecs = (time.time() % 1) * 1000 + return '[%s.%04d] %s' % (ltime, msecs, text) + #return '[%s.%04d]\t%s' % (ltime, msecs, text) # text CSV + + @classmethod + def log_event(cls, event, text = None): + """ + Log lines of text associated with a debug event. + + @type event: L{Event} + @param event: Event object. + + @type text: str + @param text: (Optional) Text to log. If no text is provided the default + is to show a description of the event itself. + + @rtype: str + @return: Log line. + """ + if not text: + if event.get_event_code() == win32.EXCEPTION_DEBUG_EVENT: + what = event.get_exception_description() + if event.is_first_chance(): + what = '%s (first chance)' % what + else: + what = '%s (second chance)' % what + try: + address = event.get_fault_address() + except NotImplementedError: + address = event.get_exception_address() + else: + what = event.get_event_name() + address = event.get_thread().get_pc() + process = event.get_process() + label = process.get_label_at_address(address) + address = HexDump.address(address, process.get_bits()) + if label: + where = '%s (%s)' % (address, label) + else: + where = address + text = '%s at %s' % (what, where) + text = 'pid %d tid %d: %s' % (event.get_pid(), event.get_tid(), text) + #text = 'pid %d tid %d:\t%s' % (event.get_pid(), event.get_tid(), text) # text CSV + return cls.log_text(text) + +#------------------------------------------------------------------------------ + +class Logger(object): + """ + Logs text to standard output and/or a text file. + + @type logfile: str or None + @ivar logfile: Append messages to this text file. + + @type verbose: bool + @ivar verbose: C{True} to print messages to standard output. + + @type fd: file + @ivar fd: File object where log messages are printed to. + C{None} if no log file is used. + """ + + def __init__(self, logfile = None, verbose = True): + """ + @type logfile: str or None + @param logfile: Append messages to this text file. + + @type verbose: bool + @param verbose: C{True} to print messages to standard output. + """ + self.verbose = verbose + self.logfile = logfile + if self.logfile: + self.fd = open(self.logfile, 'a+') + + def __logfile_error(self, e): + """ + Shows an error message to standard error + if the log file can't be written to. + + Used internally. + + @type e: Exception + @param e: Exception raised when trying to write to the log file. + """ + from sys import stderr + msg = "Warning, error writing log file %s: %s\n" + msg = msg % (self.logfile, str(e)) + stderr.write(DebugLog.log_text(msg)) + self.logfile = None + self.fd = None + + def __do_log(self, text): + """ + Writes the given text verbatim into the log file (if any) + and/or standard input (if the verbose flag is turned on). + + Used internally. + + @type text: str + @param text: Text to print. + """ + if isinstance(text, compat.unicode): + text = text.encode('cp1252') + if self.verbose: + print(text) + if self.logfile: + try: + self.fd.writelines('%s\n' % text) + except IOError: + e = sys.exc_info()[1] + self.__logfile_error(e) + + def log_text(self, text): + """ + Log lines of text, inserting a timestamp. + + @type text: str + @param text: Text to log. + """ + self.__do_log( DebugLog.log_text(text) ) + + def log_event(self, event, text = None): + """ + Log lines of text associated with a debug event. + + @type event: L{Event} + @param event: Event object. + + @type text: str + @param text: (Optional) Text to log. If no text is provided the default + is to show a description of the event itself. + """ + self.__do_log( DebugLog.log_event(event, text) ) + + def log_exc(self): + """ + Log lines of text associated with the last Python exception. + """ + self.__do_log( 'Exception raised: %s' % traceback.format_exc() ) + + def is_enabled(self): + """ + Determines if the logger will actually print anything when the log_* + methods are called. + + This may save some processing if the log text requires a lengthy + calculation to prepare. If no log file is set and stdout logging + is disabled, there's no point in preparing a log text that won't + be shown to anyone. + + @rtype: bool + @return: C{True} if a log file was set and/or standard output logging + is enabled, or C{False} otherwise. + """ + return self.verbose or self.logfile diff --git a/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/thread.py b/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/thread.py new file mode 100644 index 00000000..9307c421 --- /dev/null +++ b/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/thread.py @@ -0,0 +1,2127 @@ +#!~/.wine/drive_c/Python25/python.exe +# -*- coding: utf-8 -*- + +# Copyright (c) 2009-2014, Mario Vilas +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright +# notice,this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# * Neither the name of the copyright holder nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. + +""" +Thread instrumentation. + +@group Instrumentation: + Thread +""" + +from __future__ import with_statement + +__revision__ = "$Id$" + +__all__ = ['Thread'] + +from winappdbg import win32 +from winappdbg import compat +from winappdbg.textio import HexDump +from winappdbg.util import DebugRegister +from winappdbg.window import Window + +import sys +import struct +import warnings + +# delayed imports +Process = None + +#============================================================================== + +# TODO +# + fetch special registers (MMX, XMM, 3DNow!, etc) + +class Thread (object): + """ + Interface to a thread in another process. + + @group Properties: + get_tid, get_pid, get_process, set_process, get_exit_code, is_alive, + get_name, set_name, get_windows, get_teb, get_teb_address, is_wow64, + get_arch, get_bits, get_handle, open_handle, close_handle + + @group Instrumentation: + suspend, resume, kill, wait + + @group Debugging: + get_seh_chain_pointer, set_seh_chain_pointer, + get_seh_chain, get_wait_chain, is_hidden + + @group Disassembly: + disassemble, disassemble_around, disassemble_around_pc, + disassemble_string, disassemble_instruction, disassemble_current + + @group Stack: + get_stack_frame, get_stack_frame_range, get_stack_range, + get_stack_trace, get_stack_trace_with_labels, + read_stack_data, read_stack_dwords, read_stack_qwords, + peek_stack_data, peek_stack_dwords, peek_stack_qwords, + read_stack_structure, read_stack_frame + + @group Registers: + get_context, + get_register, + get_flags, get_flag_value, + get_pc, get_sp, get_fp, + get_cf, get_df, get_sf, get_tf, get_zf, + set_context, + set_register, + set_flags, set_flag_value, + set_pc, set_sp, set_fp, + set_cf, set_df, set_sf, set_tf, set_zf, + clear_cf, clear_df, clear_sf, clear_tf, clear_zf, + Flags + + @group Threads snapshot: + clear + + @group Miscellaneous: + read_code_bytes, peek_code_bytes, + peek_pointers_in_data, peek_pointers_in_registers, + get_linear_address, get_label_at_pc + + @type dwThreadId: int + @ivar dwThreadId: Global thread ID. Use L{get_tid} instead. + + @type hThread: L{ThreadHandle} + @ivar hThread: Handle to the thread. Use L{get_handle} instead. + + @type process: L{Process} + @ivar process: Parent process object. Use L{get_process} instead. + + @type pInjectedMemory: int + @ivar pInjectedMemory: If the thread was created by L{Process.inject_code}, + this member contains a pointer to the memory buffer for the injected + code. Otherwise it's C{None}. + + The L{kill} method uses this member to free the buffer + when the injected thread is killed. + """ + + def __init__(self, dwThreadId, hThread = None, process = None): + """ + @type dwThreadId: int + @param dwThreadId: Global thread ID. + + @type hThread: L{ThreadHandle} + @param hThread: (Optional) Handle to the thread. + + @type process: L{Process} + @param process: (Optional) Parent Process object. + """ + self.dwProcessId = None + self.dwThreadId = dwThreadId + self.hThread = hThread + self.pInjectedMemory = None + self.set_name(None) + self.set_process(process) + + # Not really sure if it's a good idea... +## def __eq__(self, aThread): +## """ +## Compare two Thread objects. The comparison is made using the IDs. +## +## @warning: +## If you have two Thread instances with different handles the +## equality operator still returns C{True}, so be careful! +## +## @type aThread: L{Thread} +## @param aThread: Another Thread object. +## +## @rtype: bool +## @return: C{True} if the two thread IDs are equal, +## C{False} otherwise. +## """ +## return isinstance(aThread, Thread) and \ +## self.get_tid() == aThread.get_tid() + + def __load_Process_class(self): + global Process # delayed import + if Process is None: + from winappdbg.process import Process + + def get_process(self): + """ + @rtype: L{Process} + @return: Parent Process object. + Returns C{None} if unknown. + """ + if self.__process is not None: + return self.__process + self.__load_Process_class() + self.__process = Process(self.get_pid()) + return self.__process + + def set_process(self, process = None): + """ + Manually set the parent Process object. Use with care! + + @type process: L{Process} + @param process: (Optional) Process object. Use C{None} for no process. + """ + if process is None: + self.dwProcessId = None + self.__process = None + else: + self.__load_Process_class() + if not isinstance(process, Process): + msg = "Parent process must be a Process instance, " + msg += "got %s instead" % type(process) + raise TypeError(msg) + self.dwProcessId = process.get_pid() + self.__process = process + + process = property(get_process, set_process, doc="") + + def get_pid(self): + """ + @rtype: int + @return: Parent process global ID. + + @raise WindowsError: An error occured when calling a Win32 API function. + @raise RuntimeError: The parent process ID can't be found. + """ + if self.dwProcessId is None: + if self.__process is not None: + # Infinite loop if self.__process is None + self.dwProcessId = self.get_process().get_pid() + else: + try: + # I wish this had been implemented before Vista... + # XXX TODO find the real ntdll call under this api + hThread = self.get_handle( + win32.THREAD_QUERY_LIMITED_INFORMATION) + self.dwProcessId = win32.GetProcessIdOfThread(hThread) + except AttributeError: + # This method really sucks :P + self.dwProcessId = self.__get_pid_by_scanning() + return self.dwProcessId + + def __get_pid_by_scanning(self): + 'Internally used by get_pid().' + dwProcessId = None + dwThreadId = self.get_tid() + with win32.CreateToolhelp32Snapshot(win32.TH32CS_SNAPTHREAD) as hSnapshot: + te = win32.Thread32First(hSnapshot) + while te is not None: + if te.th32ThreadID == dwThreadId: + dwProcessId = te.th32OwnerProcessID + break + te = win32.Thread32Next(hSnapshot) + if dwProcessId is None: + msg = "Cannot find thread ID %d in any process" % dwThreadId + raise RuntimeError(msg) + return dwProcessId + + def get_tid(self): + """ + @rtype: int + @return: Thread global ID. + """ + return self.dwThreadId + + def get_name(self): + """ + @rtype: str + @return: Thread name, or C{None} if the thread is nameless. + """ + return self.name + + def set_name(self, name = None): + """ + Sets the thread's name. + + @type name: str + @param name: Thread name, or C{None} if the thread is nameless. + """ + self.name = name + +#------------------------------------------------------------------------------ + + def open_handle(self, dwDesiredAccess = win32.THREAD_ALL_ACCESS): + """ + Opens a new handle to the thread, closing the previous one. + + The new handle is stored in the L{hThread} property. + + @warn: Normally you should call L{get_handle} instead, since it's much + "smarter" and tries to reuse handles and merge access rights. + + @type dwDesiredAccess: int + @param dwDesiredAccess: Desired access rights. + Defaults to L{win32.THREAD_ALL_ACCESS}. + See: U{http://msdn.microsoft.com/en-us/library/windows/desktop/ms686769(v=vs.85).aspx} + + @raise WindowsError: It's not possible to open a handle to the thread + with the requested access rights. This tipically happens because + the target thread belongs to system process and the debugger is not + runnning with administrative rights. + """ + hThread = win32.OpenThread(dwDesiredAccess, win32.FALSE, self.dwThreadId) + + # In case hThread was set to an actual handle value instead of a Handle + # object. This shouldn't happen unless the user tinkered with it. + if not hasattr(self.hThread, '__del__'): + self.close_handle() + + self.hThread = hThread + + def close_handle(self): + """ + Closes the handle to the thread. + + @note: Normally you don't need to call this method. All handles + created by I{WinAppDbg} are automatically closed when the garbage + collector claims them. + """ + try: + if hasattr(self.hThread, 'close'): + self.hThread.close() + elif self.hThread not in (None, win32.INVALID_HANDLE_VALUE): + win32.CloseHandle(self.hThread) + finally: + self.hThread = None + + def get_handle(self, dwDesiredAccess = win32.THREAD_ALL_ACCESS): + """ + Returns a handle to the thread with I{at least} the access rights + requested. + + @note: + If a handle was previously opened and has the required access + rights, it's reused. If not, a new handle is opened with the + combination of the old and new access rights. + + @type dwDesiredAccess: int + @param dwDesiredAccess: Desired access rights. + See: U{http://msdn.microsoft.com/en-us/library/windows/desktop/ms686769(v=vs.85).aspx} + + @rtype: ThreadHandle + @return: Handle to the thread. + + @raise WindowsError: It's not possible to open a handle to the thread + with the requested access rights. This tipically happens because + the target thread belongs to system process and the debugger is not + runnning with administrative rights. + """ + if self.hThread in (None, win32.INVALID_HANDLE_VALUE): + self.open_handle(dwDesiredAccess) + else: + dwAccess = self.hThread.dwAccess + if (dwAccess | dwDesiredAccess) != dwAccess: + self.open_handle(dwAccess | dwDesiredAccess) + return self.hThread + + def clear(self): + """ + Clears the resources held by this object. + """ + try: + self.set_process(None) + finally: + self.close_handle() + +#------------------------------------------------------------------------------ + + def wait(self, dwTimeout = None): + """ + Waits for the thread to finish executing. + + @type dwTimeout: int + @param dwTimeout: (Optional) Timeout value in milliseconds. + Use C{INFINITE} or C{None} for no timeout. + """ + self.get_handle(win32.SYNCHRONIZE).wait(dwTimeout) + + def kill(self, dwExitCode = 0): + """ + Terminates the thread execution. + + @note: If the C{lpInjectedMemory} member contains a valid pointer, + the memory is freed. + + @type dwExitCode: int + @param dwExitCode: (Optional) Thread exit code. + """ + hThread = self.get_handle(win32.THREAD_TERMINATE) + win32.TerminateThread(hThread, dwExitCode) + + # Ugliest hack ever, won't work if many pieces of code are injected. + # Seriously, what was I thinking? Lame! :( + if self.pInjectedMemory is not None: + try: + self.get_process().free(self.pInjectedMemory) + self.pInjectedMemory = None + except Exception: +## raise # XXX DEBUG + pass + + # XXX TODO + # suspend() and resume() should have a counter of how many times a thread + # was suspended, so on debugger exit they could (optionally!) be restored + + def suspend(self): + """ + Suspends the thread execution. + + @rtype: int + @return: Suspend count. If zero, the thread is running. + """ + hThread = self.get_handle(win32.THREAD_SUSPEND_RESUME) + if self.is_wow64(): + # FIXME this will be horribly slow on XP 64 + # since it'll try to resolve a missing API every time + try: + return win32.Wow64SuspendThread(hThread) + except AttributeError: + pass + return win32.SuspendThread(hThread) + + def resume(self): + """ + Resumes the thread execution. + + @rtype: int + @return: Suspend count. If zero, the thread is running. + """ + hThread = self.get_handle(win32.THREAD_SUSPEND_RESUME) + return win32.ResumeThread(hThread) + + def is_alive(self): + """ + @rtype: bool + @return: C{True} if the thread if currently running. + @raise WindowsError: + The debugger doesn't have enough privileges to perform this action. + """ + try: + self.wait(0) + except WindowsError: + e = sys.exc_info()[1] + error = e.winerror + if error == win32.ERROR_ACCESS_DENIED: + raise + return error == win32.WAIT_TIMEOUT + return True + + def get_exit_code(self): + """ + @rtype: int + @return: Thread exit code, or C{STILL_ACTIVE} if it's still alive. + """ + if win32.THREAD_ALL_ACCESS == win32.THREAD_ALL_ACCESS_VISTA: + dwAccess = win32.THREAD_QUERY_LIMITED_INFORMATION + else: + dwAccess = win32.THREAD_QUERY_INFORMATION + return win32.GetExitCodeThread( self.get_handle(dwAccess) ) + +#------------------------------------------------------------------------------ + + # XXX TODO + # Support for string searches on the window captions. + + def get_windows(self): + """ + @rtype: list of L{Window} + @return: Returns a list of windows handled by this thread. + """ + try: + process = self.get_process() + except Exception: + process = None + return [ + Window( hWnd, process, self ) \ + for hWnd in win32.EnumThreadWindows( self.get_tid() ) + ] + +#------------------------------------------------------------------------------ + + # TODO + # A registers cache could be implemented here. + def get_context(self, ContextFlags = None, bSuspend = False): + """ + Retrieves the execution context (i.e. the registers values) for this + thread. + + @type ContextFlags: int + @param ContextFlags: Optional, specify which registers to retrieve. + Defaults to C{win32.CONTEXT_ALL} which retrieves all registes + for the current platform. + + @type bSuspend: bool + @param bSuspend: C{True} to automatically suspend the thread before + getting its context, C{False} otherwise. + + Defaults to C{False} because suspending the thread during some + debug events (like thread creation or destruction) may lead to + strange errors. + + Note that WinAppDbg 1.4 used to suspend the thread automatically + always. This behavior was changed in version 1.5. + + @rtype: dict( str S{->} int ) + @return: Dictionary mapping register names to their values. + + @see: L{set_context} + """ + + # Some words on the "strange errors" that lead to the bSuspend + # parameter. Peter Van Eeckhoutte and I were working on a fix + # for some bugs he found in the 1.5 betas when we stumbled upon + # what seemed to be a deadlock in the debug API that caused the + # GetThreadContext() call never to return. Since removing the + # call to SuspendThread() solved the problem, and a few Google + # searches showed a handful of problems related to these two + # APIs and Wow64 environments, I decided to break compatibility. + # + # Here are some pages about the weird behavior of SuspendThread: + # http://zachsaw.blogspot.com.es/2010/11/wow64-bug-getthreadcontext-may-return.html + # http://stackoverflow.com/questions/3444190/windows-suspendthread-doesnt-getthreadcontext-fails + + # Get the thread handle. + dwAccess = win32.THREAD_GET_CONTEXT + if bSuspend: + dwAccess = dwAccess | win32.THREAD_SUSPEND_RESUME + hThread = self.get_handle(dwAccess) + + # Suspend the thread if requested. + if bSuspend: + try: + self.suspend() + except WindowsError: + # Threads can't be suspended when the exit process event + # arrives, but you can still get the context. + bSuspend = False + + # If an exception is raised, make sure the thread execution is resumed. + try: + + if win32.bits == self.get_bits(): + + # 64 bit debugger attached to 64 bit process, or + # 32 bit debugger attached to 32 bit process. + ctx = win32.GetThreadContext(hThread, + ContextFlags = ContextFlags) + + else: + if self.is_wow64(): + + # 64 bit debugger attached to 32 bit process. + if ContextFlags is not None: + ContextFlags &= ~win32.ContextArchMask + ContextFlags |= win32.WOW64_CONTEXT_i386 + ctx = win32.Wow64GetThreadContext(hThread, ContextFlags) + + else: + + # 32 bit debugger attached to 64 bit process. + # XXX only i386/AMD64 is supported in this particular case + if win32.arch not in (win32.ARCH_I386, win32.ARCH_AMD64): + raise NotImplementedError() + if ContextFlags is not None: + ContextFlags &= ~win32.ContextArchMask + ContextFlags |= win32.context_amd64.CONTEXT_AMD64 + ctx = win32.context_amd64.GetThreadContext(hThread, + ContextFlags = ContextFlags) + + finally: + + # Resume the thread if we suspended it. + if bSuspend: + self.resume() + + # Return the context. + return ctx + + def set_context(self, context, bSuspend = False): + """ + Sets the values of the registers. + + @see: L{get_context} + + @type context: dict( str S{->} int ) + @param context: Dictionary mapping register names to their values. + + @type bSuspend: bool + @param bSuspend: C{True} to automatically suspend the thread before + setting its context, C{False} otherwise. + + Defaults to C{False} because suspending the thread during some + debug events (like thread creation or destruction) may lead to + strange errors. + + Note that WinAppDbg 1.4 used to suspend the thread automatically + always. This behavior was changed in version 1.5. + """ + + # Get the thread handle. + dwAccess = win32.THREAD_SET_CONTEXT + if bSuspend: + dwAccess = dwAccess | win32.THREAD_SUSPEND_RESUME + hThread = self.get_handle(dwAccess) + + # Suspend the thread if requested. + if bSuspend: + self.suspend() + # No fix for the exit process event bug. + # Setting the context of a dead thread is pointless anyway. + + # Set the thread context. + try: + if win32.bits == 64 and self.is_wow64(): + win32.Wow64SetThreadContext(hThread, context) + else: + win32.SetThreadContext(hThread, context) + + # Resume the thread if we suspended it. + finally: + if bSuspend: + self.resume() + + def get_register(self, register): + """ + @type register: str + @param register: Register name. + + @rtype: int + @return: Value of the requested register. + """ + 'Returns the value of a specific register.' + context = self.get_context() + return context[register] + + def set_register(self, register, value): + """ + Sets the value of a specific register. + + @type register: str + @param register: Register name. + + @rtype: int + @return: Register value. + """ + context = self.get_context() + context[register] = value + self.set_context(context) + +#------------------------------------------------------------------------------ + + # TODO: a metaclass would do a better job instead of checking the platform + # during module import, also would support mixing 32 and 64 bits + + if win32.arch in (win32.ARCH_I386, win32.ARCH_AMD64): + + def get_pc(self): + """ + @rtype: int + @return: Value of the program counter register. + """ + context = self.get_context(win32.CONTEXT_CONTROL) + return context.pc + + def set_pc(self, pc): + """ + Sets the value of the program counter register. + + @type pc: int + @param pc: Value of the program counter register. + """ + context = self.get_context(win32.CONTEXT_CONTROL) + context.pc = pc + self.set_context(context) + + def get_sp(self): + """ + @rtype: int + @return: Value of the stack pointer register. + """ + context = self.get_context(win32.CONTEXT_CONTROL) + return context.sp + + def set_sp(self, sp): + """ + Sets the value of the stack pointer register. + + @type sp: int + @param sp: Value of the stack pointer register. + """ + context = self.get_context(win32.CONTEXT_CONTROL) + context.sp = sp + self.set_context(context) + + def get_fp(self): + """ + @rtype: int + @return: Value of the frame pointer register. + """ + flags = win32.CONTEXT_CONTROL | win32.CONTEXT_INTEGER + context = self.get_context(flags) + return context.fp + + def set_fp(self, fp): + """ + Sets the value of the frame pointer register. + + @type fp: int + @param fp: Value of the frame pointer register. + """ + flags = win32.CONTEXT_CONTROL | win32.CONTEXT_INTEGER + context = self.get_context(flags) + context.fp = fp + self.set_context(context) + +#------------------------------------------------------------------------------ + + if win32.arch in (win32.ARCH_I386, win32.ARCH_AMD64): + + class Flags (object): + 'Commonly used processor flags' + Overflow = 0x800 + Direction = 0x400 + Interrupts = 0x200 + Trap = 0x100 + Sign = 0x80 + Zero = 0x40 + # 0x20 ??? + Auxiliary = 0x10 + # 0x8 ??? + Parity = 0x4 + # 0x2 ??? + Carry = 0x1 + + def get_flags(self, FlagMask = 0xFFFFFFFF): + """ + @type FlagMask: int + @param FlagMask: (Optional) Bitwise-AND mask. + + @rtype: int + @return: Flags register contents, optionally masking out some bits. + """ + context = self.get_context(win32.CONTEXT_CONTROL) + return context['EFlags'] & FlagMask + + def set_flags(self, eflags, FlagMask = 0xFFFFFFFF): + """ + Sets the flags register, optionally masking some bits. + + @type eflags: int + @param eflags: Flags register contents. + + @type FlagMask: int + @param FlagMask: (Optional) Bitwise-AND mask. + """ + context = self.get_context(win32.CONTEXT_CONTROL) + context['EFlags'] = (context['EFlags'] & FlagMask) | eflags + self.set_context(context) + + def get_flag_value(self, FlagBit): + """ + @type FlagBit: int + @param FlagBit: One of the L{Flags}. + + @rtype: bool + @return: Boolean value of the requested flag. + """ + return bool( self.get_flags(FlagBit) ) + + def set_flag_value(self, FlagBit, FlagValue): + """ + Sets a single flag, leaving the others intact. + + @type FlagBit: int + @param FlagBit: One of the L{Flags}. + + @type FlagValue: bool + @param FlagValue: Boolean value of the flag. + """ + if FlagValue: + eflags = FlagBit + else: + eflags = 0 + FlagMask = 0xFFFFFFFF ^ FlagBit + self.set_flags(eflags, FlagMask) + + def get_zf(self): + """ + @rtype: bool + @return: Boolean value of the Zero flag. + """ + return self.get_flag_value(self.Flags.Zero) + + def get_cf(self): + """ + @rtype: bool + @return: Boolean value of the Carry flag. + """ + return self.get_flag_value(self.Flags.Carry) + + def get_sf(self): + """ + @rtype: bool + @return: Boolean value of the Sign flag. + """ + return self.get_flag_value(self.Flags.Sign) + + def get_df(self): + """ + @rtype: bool + @return: Boolean value of the Direction flag. + """ + return self.get_flag_value(self.Flags.Direction) + + def get_tf(self): + """ + @rtype: bool + @return: Boolean value of the Trap flag. + """ + return self.get_flag_value(self.Flags.Trap) + + def clear_zf(self): + 'Clears the Zero flag.' + self.set_flag_value(self.Flags.Zero, False) + + def clear_cf(self): + 'Clears the Carry flag.' + self.set_flag_value(self.Flags.Carry, False) + + def clear_sf(self): + 'Clears the Sign flag.' + self.set_flag_value(self.Flags.Sign, False) + + def clear_df(self): + 'Clears the Direction flag.' + self.set_flag_value(self.Flags.Direction, False) + + def clear_tf(self): + 'Clears the Trap flag.' + self.set_flag_value(self.Flags.Trap, False) + + def set_zf(self): + 'Sets the Zero flag.' + self.set_flag_value(self.Flags.Zero, True) + + def set_cf(self): + 'Sets the Carry flag.' + self.set_flag_value(self.Flags.Carry, True) + + def set_sf(self): + 'Sets the Sign flag.' + self.set_flag_value(self.Flags.Sign, True) + + def set_df(self): + 'Sets the Direction flag.' + self.set_flag_value(self.Flags.Direction, True) + + def set_tf(self): + 'Sets the Trap flag.' + self.set_flag_value(self.Flags.Trap, True) + +#------------------------------------------------------------------------------ + + def is_wow64(self): + """ + Determines if the thread is running under WOW64. + + @rtype: bool + @return: + C{True} if the thread is running under WOW64. That is, it belongs + to a 32-bit application running in a 64-bit Windows. + + C{False} if the thread belongs to either a 32-bit application + running in a 32-bit Windows, or a 64-bit application running in a + 64-bit Windows. + + @raise WindowsError: On error an exception is raised. + + @see: U{http://msdn.microsoft.com/en-us/library/aa384249(VS.85).aspx} + """ + try: + wow64 = self.__wow64 + except AttributeError: + if (win32.bits == 32 and not win32.wow64): + wow64 = False + else: + wow64 = self.get_process().is_wow64() + self.__wow64 = wow64 + return wow64 + + def get_arch(self): + """ + @rtype: str + @return: The architecture in which this thread believes to be running. + For example, if running a 32 bit binary in a 64 bit machine, the + architecture returned by this method will be L{win32.ARCH_I386}, + but the value of L{System.arch} will be L{win32.ARCH_AMD64}. + """ + if win32.bits == 32 and not win32.wow64: + return win32.arch + return self.get_process().get_arch() + + def get_bits(self): + """ + @rtype: str + @return: The number of bits in which this thread believes to be + running. For example, if running a 32 bit binary in a 64 bit + machine, the number of bits returned by this method will be C{32}, + but the value of L{System.arch} will be C{64}. + """ + if win32.bits == 32 and not win32.wow64: + return 32 + return self.get_process().get_bits() + + def is_hidden(self): + """ + Determines if the thread has been hidden from debuggers. + + Some binary packers hide their own threads to thwart debugging. + + @rtype: bool + @return: C{True} if the thread is hidden from debuggers. + This means the thread's execution won't be stopped for debug + events, and thus said events won't be sent to the debugger. + """ + return win32.NtQueryInformationThread( + self.get_handle(), # XXX what permissions do I need? + win32.ThreadHideFromDebugger) + + def get_teb(self): + """ + Returns a copy of the TEB. + To dereference pointers in it call L{Process.read_structure}. + + @rtype: L{TEB} + @return: TEB structure. + @raise WindowsError: An exception is raised on error. + """ + return self.get_process().read_structure( self.get_teb_address(), + win32.TEB ) + + def get_teb_address(self): + """ + Returns a remote pointer to the TEB. + + @rtype: int + @return: Remote pointer to the L{TEB} structure. + @raise WindowsError: An exception is raised on error. + """ + try: + return self._teb_ptr + except AttributeError: + try: + hThread = self.get_handle(win32.THREAD_QUERY_INFORMATION) + tbi = win32.NtQueryInformationThread( hThread, + win32.ThreadBasicInformation) + address = tbi.TebBaseAddress + except WindowsError: + address = self.get_linear_address('SegFs', 0) # fs:[0] + if not address: + raise + self._teb_ptr = address + return address + + def get_linear_address(self, segment, address): + """ + Translates segment-relative addresses to linear addresses. + + Linear addresses can be used to access a process memory, + calling L{Process.read} and L{Process.write}. + + @type segment: str + @param segment: Segment register name. + + @type address: int + @param address: Segment relative memory address. + + @rtype: int + @return: Linear memory address. + + @raise ValueError: Address is too large for selector. + + @raise WindowsError: + The current architecture does not support selectors. + Selectors only exist in x86-based systems. + """ + hThread = self.get_handle(win32.THREAD_QUERY_INFORMATION) + selector = self.get_register(segment) + ldt = win32.GetThreadSelectorEntry(hThread, selector) + BaseLow = ldt.BaseLow + BaseMid = ldt.HighWord.Bytes.BaseMid << 16 + BaseHi = ldt.HighWord.Bytes.BaseHi << 24 + Base = BaseLow | BaseMid | BaseHi + LimitLow = ldt.LimitLow + LimitHi = ldt.HighWord.Bits.LimitHi << 16 + Limit = LimitLow | LimitHi + if address > Limit: + msg = "Address %s too large for segment %s (selector %d)" + msg = msg % (HexDump.address(address, self.get_bits()), + segment, selector) + raise ValueError(msg) + return Base + address + + def get_label_at_pc(self): + """ + @rtype: str + @return: Label that points to the instruction currently being executed. + """ + return self.get_process().get_label_at_address( self.get_pc() ) + + def get_seh_chain_pointer(self): + """ + Get the pointer to the first structured exception handler block. + + @rtype: int + @return: Remote pointer to the first block of the structured exception + handlers linked list. If the list is empty, the returned value is + C{0xFFFFFFFF}. + + @raise NotImplementedError: + This method is only supported in 32 bits versions of Windows. + """ + if win32.arch != win32.ARCH_I386: + raise NotImplementedError( + "SEH chain parsing is only supported in 32-bit Windows.") + + process = self.get_process() + address = self.get_linear_address( 'SegFs', 0 ) + return process.read_pointer( address ) + + def set_seh_chain_pointer(self, value): + """ + Change the pointer to the first structured exception handler block. + + @type value: int + @param value: Value of the remote pointer to the first block of the + structured exception handlers linked list. To disable SEH set the + value C{0xFFFFFFFF}. + + @raise NotImplementedError: + This method is only supported in 32 bits versions of Windows. + """ + if win32.arch != win32.ARCH_I386: + raise NotImplementedError( + "SEH chain parsing is only supported in 32-bit Windows.") + + process = self.get_process() + address = self.get_linear_address( 'SegFs', 0 ) + process.write_pointer( address, value ) + + def get_seh_chain(self): + """ + @rtype: list of tuple( int, int ) + @return: List of structured exception handlers. + Each SEH is represented as a tuple of two addresses: + - Address of this SEH block + - Address of the SEH callback function + Do not confuse this with the contents of the SEH block itself, + where the first member is a pointer to the B{next} block instead. + + @raise NotImplementedError: + This method is only supported in 32 bits versions of Windows. + """ + seh_chain = list() + try: + process = self.get_process() + seh = self.get_seh_chain_pointer() + while seh != 0xFFFFFFFF: + seh_func = process.read_pointer( seh + 4 ) + seh_chain.append( (seh, seh_func) ) + seh = process.read_pointer( seh ) + except WindowsError: + seh_chain.append( (seh, None) ) + return seh_chain + + def get_wait_chain(self): + """ + @rtype: + tuple of ( + list of L{win32.WaitChainNodeInfo} structures, + bool) + @return: + Wait chain for the thread. + The boolean indicates if there's a cycle in the chain (a deadlock). + @raise AttributeError: + This method is only suppported in Windows Vista and above. + @see: + U{http://msdn.microsoft.com/en-us/library/ms681622%28VS.85%29.aspx} + """ + with win32.OpenThreadWaitChainSession() as hWct: + return win32.GetThreadWaitChain(hWct, ThreadId = self.get_tid()) + + def get_stack_range(self): + """ + @rtype: tuple( int, int ) + @return: Stack beginning and end pointers, in memory addresses order. + That is, the first pointer is the stack top, and the second pointer + is the stack bottom, since the stack grows towards lower memory + addresses. + @raise WindowsError: Raises an exception on error. + """ + # TODO use teb.DeallocationStack too (max. possible stack size) + teb = self.get_teb() + tib = teb.NtTib + return ( tib.StackLimit, tib.StackBase ) # top, bottom + + def __get_stack_trace(self, depth = 16, bUseLabels = True, + bMakePretty = True): + """ + Tries to get a stack trace for the current function using the debug + helper API (dbghelp.dll). + + @type depth: int + @param depth: Maximum depth of stack trace. + + @type bUseLabels: bool + @param bUseLabels: C{True} to use labels, C{False} to use addresses. + + @type bMakePretty: bool + @param bMakePretty: + C{True} for user readable labels, + C{False} for labels that can be passed to L{Process.resolve_label}. + + "Pretty" labels look better when producing output for the user to + read, while pure labels are more useful programatically. + + @rtype: tuple of tuple( int, int, str ) + @return: Stack trace of the thread as a tuple of + ( return address, frame pointer address, module filename ) + when C{bUseLabels} is C{True}, or a tuple of + ( return address, frame pointer label ) + when C{bUseLabels} is C{False}. + + @raise WindowsError: Raises an exception on error. + """ + + aProcess = self.get_process() + arch = aProcess.get_arch() + bits = aProcess.get_bits() + + if arch == win32.ARCH_I386: + MachineType = win32.IMAGE_FILE_MACHINE_I386 + elif arch == win32.ARCH_AMD64: + MachineType = win32.IMAGE_FILE_MACHINE_AMD64 + elif arch == win32.ARCH_IA64: + MachineType = win32.IMAGE_FILE_MACHINE_IA64 + else: + msg = "Stack walking is not available for this architecture: %s" + raise NotImplementedError(msg % arch) + + hProcess = aProcess.get_handle( win32.PROCESS_VM_READ | + win32.PROCESS_QUERY_INFORMATION ) + hThread = self.get_handle( win32.THREAD_GET_CONTEXT | + win32.THREAD_QUERY_INFORMATION ) + + StackFrame = win32.STACKFRAME64() + StackFrame.AddrPC = win32.ADDRESS64( self.get_pc() ) + StackFrame.AddrFrame = win32.ADDRESS64( self.get_fp() ) + StackFrame.AddrStack = win32.ADDRESS64( self.get_sp() ) + + trace = list() + while win32.StackWalk64(MachineType, hProcess, hThread, StackFrame): + if depth <= 0: + break + fp = StackFrame.AddrFrame.Offset + ra = aProcess.peek_pointer(fp + 4) + if ra == 0: + break + lib = aProcess.get_module_at_address(ra) + if lib is None: + lib = "" + else: + if lib.fileName: + lib = lib.fileName + else: + lib = "%s" % HexDump.address(lib.lpBaseOfDll, bits) + if bUseLabels: + label = aProcess.get_label_at_address(ra) + if bMakePretty: + label = '%s (%s)' % (HexDump.address(ra, bits), label) + trace.append( (fp, label) ) + else: + trace.append( (fp, ra, lib) ) + fp = aProcess.peek_pointer(fp) + return tuple(trace) + + def __get_stack_trace_manually(self, depth = 16, bUseLabels = True, + bMakePretty = True): + """ + Tries to get a stack trace for the current function. + Only works for functions with standard prologue and epilogue. + + @type depth: int + @param depth: Maximum depth of stack trace. + + @type bUseLabels: bool + @param bUseLabels: C{True} to use labels, C{False} to use addresses. + + @type bMakePretty: bool + @param bMakePretty: + C{True} for user readable labels, + C{False} for labels that can be passed to L{Process.resolve_label}. + + "Pretty" labels look better when producing output for the user to + read, while pure labels are more useful programatically. + + @rtype: tuple of tuple( int, int, str ) + @return: Stack trace of the thread as a tuple of + ( return address, frame pointer address, module filename ) + when C{bUseLabels} is C{True}, or a tuple of + ( return address, frame pointer label ) + when C{bUseLabels} is C{False}. + + @raise WindowsError: Raises an exception on error. + """ + aProcess = self.get_process() + st, sb = self.get_stack_range() # top, bottom + fp = self.get_fp() + trace = list() + if aProcess.get_module_count() == 0: + aProcess.scan_modules() + bits = aProcess.get_bits() + while depth > 0: + if fp == 0: + break + if not st <= fp < sb: + break + ra = aProcess.peek_pointer(fp + 4) + if ra == 0: + break + lib = aProcess.get_module_at_address(ra) + if lib is None: + lib = "" + else: + if lib.fileName: + lib = lib.fileName + else: + lib = "%s" % HexDump.address(lib.lpBaseOfDll, bits) + if bUseLabels: + label = aProcess.get_label_at_address(ra) + if bMakePretty: + label = '%s (%s)' % (HexDump.address(ra, bits), label) + trace.append( (fp, label) ) + else: + trace.append( (fp, ra, lib) ) + fp = aProcess.peek_pointer(fp) + return tuple(trace) + + def get_stack_trace(self, depth = 16): + """ + Tries to get a stack trace for the current function. + Only works for functions with standard prologue and epilogue. + + @type depth: int + @param depth: Maximum depth of stack trace. + + @rtype: tuple of tuple( int, int, str ) + @return: Stack trace of the thread as a tuple of + ( return address, frame pointer address, module filename ). + + @raise WindowsError: Raises an exception on error. + """ + try: + trace = self.__get_stack_trace(depth, False) + except Exception: + import traceback + traceback.print_exc() + trace = () + if not trace: + trace = self.__get_stack_trace_manually(depth, False) + return trace + + def get_stack_trace_with_labels(self, depth = 16, bMakePretty = True): + """ + Tries to get a stack trace for the current function. + Only works for functions with standard prologue and epilogue. + + @type depth: int + @param depth: Maximum depth of stack trace. + + @type bMakePretty: bool + @param bMakePretty: + C{True} for user readable labels, + C{False} for labels that can be passed to L{Process.resolve_label}. + + "Pretty" labels look better when producing output for the user to + read, while pure labels are more useful programatically. + + @rtype: tuple of tuple( int, int, str ) + @return: Stack trace of the thread as a tuple of + ( return address, frame pointer label ). + + @raise WindowsError: Raises an exception on error. + """ + try: + trace = self.__get_stack_trace(depth, True, bMakePretty) + except Exception: + trace = () + if not trace: + trace = self.__get_stack_trace_manually(depth, True, bMakePretty) + return trace + + def get_stack_frame_range(self): + """ + Returns the starting and ending addresses of the stack frame. + Only works for functions with standard prologue and epilogue. + + @rtype: tuple( int, int ) + @return: Stack frame range. + May not be accurate, depending on the compiler used. + + @raise RuntimeError: The stack frame is invalid, + or the function doesn't have a standard prologue + and epilogue. + + @raise WindowsError: An error occured when getting the thread context. + """ + st, sb = self.get_stack_range() # top, bottom + sp = self.get_sp() + fp = self.get_fp() + size = fp - sp + if not st <= sp < sb: + raise RuntimeError('Stack pointer lies outside the stack') + if not st <= fp < sb: + raise RuntimeError('Frame pointer lies outside the stack') + if sp > fp: + raise RuntimeError('No valid stack frame found') + return (sp, fp) + + def get_stack_frame(self, max_size = None): + """ + Reads the contents of the current stack frame. + Only works for functions with standard prologue and epilogue. + + @type max_size: int + @param max_size: (Optional) Maximum amount of bytes to read. + + @rtype: str + @return: Stack frame data. + May not be accurate, depending on the compiler used. + May return an empty string. + + @raise RuntimeError: The stack frame is invalid, + or the function doesn't have a standard prologue + and epilogue. + + @raise WindowsError: An error occured when getting the thread context + or reading data from the process memory. + """ + sp, fp = self.get_stack_frame_range() + size = fp - sp + if max_size and size > max_size: + size = max_size + return self.get_process().peek(sp, size) + + def read_stack_data(self, size = 128, offset = 0): + """ + Reads the contents of the top of the stack. + + @type size: int + @param size: Number of bytes to read. + + @type offset: int + @param offset: Offset from the stack pointer to begin reading. + + @rtype: str + @return: Stack data. + + @raise WindowsError: Could not read the requested data. + """ + aProcess = self.get_process() + return aProcess.read(self.get_sp() + offset, size) + + def peek_stack_data(self, size = 128, offset = 0): + """ + Tries to read the contents of the top of the stack. + + @type size: int + @param size: Number of bytes to read. + + @type offset: int + @param offset: Offset from the stack pointer to begin reading. + + @rtype: str + @return: Stack data. + Returned data may be less than the requested size. + """ + aProcess = self.get_process() + return aProcess.peek(self.get_sp() + offset, size) + + def read_stack_dwords(self, count, offset = 0): + """ + Reads DWORDs from the top of the stack. + + @type count: int + @param count: Number of DWORDs to read. + + @type offset: int + @param offset: Offset from the stack pointer to begin reading. + + @rtype: tuple( int... ) + @return: Tuple of integers read from the stack. + + @raise WindowsError: Could not read the requested data. + """ + if count > 0: + stackData = self.read_stack_data(count * 4, offset) + return struct.unpack('<'+('L'*count), stackData) + return () + + def peek_stack_dwords(self, count, offset = 0): + """ + Tries to read DWORDs from the top of the stack. + + @type count: int + @param count: Number of DWORDs to read. + + @type offset: int + @param offset: Offset from the stack pointer to begin reading. + + @rtype: tuple( int... ) + @return: Tuple of integers read from the stack. + May be less than the requested number of DWORDs. + """ + stackData = self.peek_stack_data(count * 4, offset) + if len(stackData) & 3: + stackData = stackData[:-len(stackData) & 3] + if not stackData: + return () + return struct.unpack('<'+('L'*count), stackData) + + def read_stack_qwords(self, count, offset = 0): + """ + Reads QWORDs from the top of the stack. + + @type count: int + @param count: Number of QWORDs to read. + + @type offset: int + @param offset: Offset from the stack pointer to begin reading. + + @rtype: tuple( int... ) + @return: Tuple of integers read from the stack. + + @raise WindowsError: Could not read the requested data. + """ + stackData = self.read_stack_data(count * 8, offset) + return struct.unpack('<'+('Q'*count), stackData) + + def peek_stack_qwords(self, count, offset = 0): + """ + Tries to read QWORDs from the top of the stack. + + @type count: int + @param count: Number of QWORDs to read. + + @type offset: int + @param offset: Offset from the stack pointer to begin reading. + + @rtype: tuple( int... ) + @return: Tuple of integers read from the stack. + May be less than the requested number of QWORDs. + """ + stackData = self.peek_stack_data(count * 8, offset) + if len(stackData) & 7: + stackData = stackData[:-len(stackData) & 7] + if not stackData: + return () + return struct.unpack('<'+('Q'*count), stackData) + + def read_stack_structure(self, structure, offset = 0): + """ + Reads the given structure at the top of the stack. + + @type structure: ctypes.Structure + @param structure: Structure of the data to read from the stack. + + @type offset: int + @param offset: Offset from the stack pointer to begin reading. + The stack pointer is the same returned by the L{get_sp} method. + + @rtype: tuple + @return: Tuple of elements read from the stack. The type of each + element matches the types in the stack frame structure. + """ + aProcess = self.get_process() + stackData = aProcess.read_structure(self.get_sp() + offset, structure) + return tuple([ stackData.__getattribute__(name) + for (name, type) in stackData._fields_ ]) + + def read_stack_frame(self, structure, offset = 0): + """ + Reads the stack frame of the thread. + + @type structure: ctypes.Structure + @param structure: Structure of the stack frame. + + @type offset: int + @param offset: Offset from the frame pointer to begin reading. + The frame pointer is the same returned by the L{get_fp} method. + + @rtype: tuple + @return: Tuple of elements read from the stack frame. The type of each + element matches the types in the stack frame structure. + """ + aProcess = self.get_process() + stackData = aProcess.read_structure(self.get_fp() + offset, structure) + return tuple([ stackData.__getattribute__(name) + for (name, type) in stackData._fields_ ]) + + def read_code_bytes(self, size = 128, offset = 0): + """ + Tries to read some bytes of the code currently being executed. + + @type size: int + @param size: Number of bytes to read. + + @type offset: int + @param offset: Offset from the program counter to begin reading. + + @rtype: str + @return: Bytes read from the process memory. + + @raise WindowsError: Could not read the requested data. + """ + return self.get_process().read(self.get_pc() + offset, size) + + def peek_code_bytes(self, size = 128, offset = 0): + """ + Tries to read some bytes of the code currently being executed. + + @type size: int + @param size: Number of bytes to read. + + @type offset: int + @param offset: Offset from the program counter to begin reading. + + @rtype: str + @return: Bytes read from the process memory. + May be less than the requested number of bytes. + """ + return self.get_process().peek(self.get_pc() + offset, size) + + def peek_pointers_in_registers(self, peekSize = 16, context = None): + """ + Tries to guess which values in the registers are valid pointers, + and reads some data from them. + + @type peekSize: int + @param peekSize: Number of bytes to read from each pointer found. + + @type context: dict( str S{->} int ) + @param context: (Optional) + Dictionary mapping register names to their values. + If not given, the current thread context will be used. + + @rtype: dict( str S{->} str ) + @return: Dictionary mapping register names to the data they point to. + """ + peekable_registers = ( + 'Eax', 'Ebx', 'Ecx', 'Edx', 'Esi', 'Edi', 'Ebp' + ) + if not context: + context = self.get_context(win32.CONTEXT_CONTROL | \ + win32.CONTEXT_INTEGER) + aProcess = self.get_process() + data = dict() + for (reg_name, reg_value) in compat.iteritems(context): + if reg_name not in peekable_registers: + continue +## if reg_name == 'Ebp': +## stack_begin, stack_end = self.get_stack_range() +## print hex(stack_end), hex(reg_value), hex(stack_begin) +## if stack_begin and stack_end and stack_end < stack_begin and \ +## stack_begin <= reg_value <= stack_end: +## continue + reg_data = aProcess.peek(reg_value, peekSize) + if reg_data: + data[reg_name] = reg_data + return data + + # TODO + # try to avoid reading the same page twice by caching it + def peek_pointers_in_data(self, data, peekSize = 16, peekStep = 1): + """ + Tries to guess which values in the given data are valid pointers, + and reads some data from them. + + @type data: str + @param data: Binary data to find pointers in. + + @type peekSize: int + @param peekSize: Number of bytes to read from each pointer found. + + @type peekStep: int + @param peekStep: Expected data alignment. + Tipically you specify 1 when data alignment is unknown, + or 4 when you expect data to be DWORD aligned. + Any other value may be specified. + + @rtype: dict( str S{->} str ) + @return: Dictionary mapping stack offsets to the data they point to. + """ + aProcess = self.get_process() + return aProcess.peek_pointers_in_data(data, peekSize, peekStep) + +#------------------------------------------------------------------------------ + + # TODO + # The disassemble_around and disassemble_around_pc methods + # should take as parameter instruction counts rather than sizes + + def disassemble_string(self, lpAddress, code): + """ + Disassemble instructions from a block of binary code. + + @type lpAddress: int + @param lpAddress: Memory address where the code was read from. + + @type code: str + @param code: Binary code to disassemble. + + @rtype: list of tuple( long, int, str, str ) + @return: List of tuples. Each tuple represents an assembly instruction + and contains: + - Memory address of instruction. + - Size of instruction in bytes. + - Disassembly line of instruction. + - Hexadecimal dump of instruction. + """ + aProcess = self.get_process() + return aProcess.disassemble_string(lpAddress, code) + + def disassemble(self, lpAddress, dwSize): + """ + Disassemble instructions from the address space of the process. + + @type lpAddress: int + @param lpAddress: Memory address where to read the code from. + + @type dwSize: int + @param dwSize: Size of binary code to disassemble. + + @rtype: list of tuple( long, int, str, str ) + @return: List of tuples. Each tuple represents an assembly instruction + and contains: + - Memory address of instruction. + - Size of instruction in bytes. + - Disassembly line of instruction. + - Hexadecimal dump of instruction. + """ + aProcess = self.get_process() + return aProcess.disassemble(lpAddress, dwSize) + + def disassemble_around(self, lpAddress, dwSize = 64): + """ + Disassemble around the given address. + + @type lpAddress: int + @param lpAddress: Memory address where to read the code from. + + @type dwSize: int + @param dwSize: Delta offset. + Code will be read from lpAddress - dwSize to lpAddress + dwSize. + + @rtype: list of tuple( long, int, str, str ) + @return: List of tuples. Each tuple represents an assembly instruction + and contains: + - Memory address of instruction. + - Size of instruction in bytes. + - Disassembly line of instruction. + - Hexadecimal dump of instruction. + """ + aProcess = self.get_process() + return aProcess.disassemble_around(lpAddress, dwSize) + + def disassemble_around_pc(self, dwSize = 64): + """ + Disassemble around the program counter of the given thread. + + @type dwSize: int + @param dwSize: Delta offset. + Code will be read from pc - dwSize to pc + dwSize. + + @rtype: list of tuple( long, int, str, str ) + @return: List of tuples. Each tuple represents an assembly instruction + and contains: + - Memory address of instruction. + - Size of instruction in bytes. + - Disassembly line of instruction. + - Hexadecimal dump of instruction. + """ + aProcess = self.get_process() + return aProcess.disassemble_around(self.get_pc(), dwSize) + + def disassemble_instruction(self, lpAddress): + """ + Disassemble the instruction at the given memory address. + + @type lpAddress: int + @param lpAddress: Memory address where to read the code from. + + @rtype: tuple( long, int, str, str ) + @return: The tuple represents an assembly instruction + and contains: + - Memory address of instruction. + - Size of instruction in bytes. + - Disassembly line of instruction. + - Hexadecimal dump of instruction. + """ + aProcess = self.get_process() + return aProcess.disassemble(lpAddress, 15)[0] + + def disassemble_current(self): + """ + Disassemble the instruction at the program counter of the given thread. + + @rtype: tuple( long, int, str, str ) + @return: The tuple represents an assembly instruction + and contains: + - Memory address of instruction. + - Size of instruction in bytes. + - Disassembly line of instruction. + - Hexadecimal dump of instruction. + """ + return self.disassemble_instruction( self.get_pc() ) + +#============================================================================== + +class _ThreadContainer (object): + """ + Encapsulates the capability to contain Thread objects. + + @group Instrumentation: + start_thread + + @group Threads snapshot: + scan_threads, + get_thread, get_thread_count, get_thread_ids, + has_thread, iter_threads, iter_thread_ids, + find_threads_by_name, get_windows, + clear_threads, clear_dead_threads, close_thread_handles + """ + + def __init__(self): + self.__threadDict = dict() + + def __initialize_snapshot(self): + """ + Private method to automatically initialize the snapshot + when you try to use it without calling any of the scan_* + methods first. You don't need to call this yourself. + """ + if not self.__threadDict: + self.scan_threads() + + def __contains__(self, anObject): + """ + @type anObject: L{Thread}, int + @param anObject: + - C{int}: Global ID of the thread to look for. + - C{Thread}: Thread object to look for. + + @rtype: bool + @return: C{True} if the snapshot contains + a L{Thread} object with the same ID. + """ + if isinstance(anObject, Thread): + anObject = anObject.dwThreadId + return self.has_thread(anObject) + + def __iter__(self): + """ + @see: L{iter_threads} + @rtype: dictionary-valueiterator + @return: Iterator of L{Thread} objects in this snapshot. + """ + return self.iter_threads() + + def __len__(self): + """ + @see: L{get_thread_count} + @rtype: int + @return: Count of L{Thread} objects in this snapshot. + """ + return self.get_thread_count() + + def has_thread(self, dwThreadId): + """ + @type dwThreadId: int + @param dwThreadId: Global ID of the thread to look for. + + @rtype: bool + @return: C{True} if the snapshot contains a + L{Thread} object with the given global ID. + """ + self.__initialize_snapshot() + return dwThreadId in self.__threadDict + + def get_thread(self, dwThreadId): + """ + @type dwThreadId: int + @param dwThreadId: Global ID of the thread to look for. + + @rtype: L{Thread} + @return: Thread object with the given global ID. + """ + self.__initialize_snapshot() + if dwThreadId not in self.__threadDict: + msg = "Unknown thread ID: %d" % dwThreadId + raise KeyError(msg) + return self.__threadDict[dwThreadId] + + def iter_thread_ids(self): + """ + @see: L{iter_threads} + @rtype: dictionary-keyiterator + @return: Iterator of global thread IDs in this snapshot. + """ + self.__initialize_snapshot() + return compat.iterkeys(self.__threadDict) + + def iter_threads(self): + """ + @see: L{iter_thread_ids} + @rtype: dictionary-valueiterator + @return: Iterator of L{Thread} objects in this snapshot. + """ + self.__initialize_snapshot() + return compat.itervalues(self.__threadDict) + + def get_thread_ids(self): + """ + @rtype: list( int ) + @return: List of global thread IDs in this snapshot. + """ + self.__initialize_snapshot() + return compat.keys(self.__threadDict) + + def get_thread_count(self): + """ + @rtype: int + @return: Count of L{Thread} objects in this snapshot. + """ + self.__initialize_snapshot() + return len(self.__threadDict) + +#------------------------------------------------------------------------------ + + def find_threads_by_name(self, name, bExactMatch = True): + """ + Find threads by name, using different search methods. + + @type name: str, None + @param name: Name to look for. Use C{None} to find nameless threads. + + @type bExactMatch: bool + @param bExactMatch: C{True} if the name must be + B{exactly} as given, C{False} if the name can be + loosely matched. + + This parameter is ignored when C{name} is C{None}. + + @rtype: list( L{Thread} ) + @return: All threads matching the given name. + """ + found_threads = list() + + # Find threads with no name. + if name is None: + for aThread in self.iter_threads(): + if aThread.get_name() is None: + found_threads.append(aThread) + + # Find threads matching the given name exactly. + elif bExactMatch: + for aThread in self.iter_threads(): + if aThread.get_name() == name: + found_threads.append(aThread) + + # Find threads whose names match the given substring. + else: + for aThread in self.iter_threads(): + t_name = aThread.get_name() + if t_name is not None and name in t_name: + found_threads.append(aThread) + + return found_threads + +#------------------------------------------------------------------------------ + + # XXX TODO + # Support for string searches on the window captions. + + def get_windows(self): + """ + @rtype: list of L{Window} + @return: Returns a list of windows handled by this process. + """ + window_list = list() + for thread in self.iter_threads(): + window_list.extend( thread.get_windows() ) + return window_list + +#------------------------------------------------------------------------------ + + def start_thread(self, lpStartAddress, lpParameter=0, bSuspended = False): + """ + Remotely creates a new thread in the process. + + @type lpStartAddress: int + @param lpStartAddress: Start address for the new thread. + + @type lpParameter: int + @param lpParameter: Optional argument for the new thread. + + @type bSuspended: bool + @param bSuspended: C{True} if the new thread should be suspended. + In that case use L{Thread.resume} to start execution. + """ + if bSuspended: + dwCreationFlags = win32.CREATE_SUSPENDED + else: + dwCreationFlags = 0 + hProcess = self.get_handle( win32.PROCESS_CREATE_THREAD | + win32.PROCESS_QUERY_INFORMATION | + win32.PROCESS_VM_OPERATION | + win32.PROCESS_VM_WRITE | + win32.PROCESS_VM_READ ) + hThread, dwThreadId = win32.CreateRemoteThread( + hProcess, 0, 0, lpStartAddress, lpParameter, dwCreationFlags) + aThread = Thread(dwThreadId, hThread, self) + self._add_thread(aThread) + return aThread + +#------------------------------------------------------------------------------ + + # TODO + # maybe put all the toolhelp code into their own set of classes? + # + # XXX this method musn't end up calling __initialize_snapshot by accident! + def scan_threads(self): + """ + Populates the snapshot with running threads. + """ + + # Ignore special process IDs. + # PID 0: System Idle Process. Also has a special meaning to the + # toolhelp APIs (current process). + # PID 4: System Integrity Group. See this forum post for more info: + # http://tinyurl.com/ycza8jo + # (points to social.technet.microsoft.com) + # Only on XP and above + # PID 8: System (?) only in Windows 2000 and below AFAIK. + # It's probably the same as PID 4 in XP and above. + dwProcessId = self.get_pid() + if dwProcessId in (0, 4, 8): + return + +## dead_tids = set( self.get_thread_ids() ) # XXX triggers a scan + dead_tids = self._get_thread_ids() + dwProcessId = self.get_pid() + hSnapshot = win32.CreateToolhelp32Snapshot(win32.TH32CS_SNAPTHREAD, + dwProcessId) + try: + te = win32.Thread32First(hSnapshot) + while te is not None: + if te.th32OwnerProcessID == dwProcessId: + dwThreadId = te.th32ThreadID + if dwThreadId in dead_tids: + dead_tids.remove(dwThreadId) +## if not self.has_thread(dwThreadId): # XXX triggers a scan + if not self._has_thread_id(dwThreadId): + aThread = Thread(dwThreadId, process = self) + self._add_thread(aThread) + te = win32.Thread32Next(hSnapshot) + finally: + win32.CloseHandle(hSnapshot) + for tid in dead_tids: + self._del_thread(tid) + + def clear_dead_threads(self): + """ + Remove Thread objects from the snapshot + referring to threads no longer running. + """ + for tid in self.get_thread_ids(): + aThread = self.get_thread(tid) + if not aThread.is_alive(): + self._del_thread(aThread) + + def clear_threads(self): + """ + Clears the threads snapshot. + """ + for aThread in compat.itervalues(self.__threadDict): + aThread.clear() + self.__threadDict = dict() + + def close_thread_handles(self): + """ + Closes all open handles to threads in the snapshot. + """ + for aThread in self.iter_threads(): + try: + aThread.close_handle() + except Exception: + try: + e = sys.exc_info()[1] + msg = "Cannot close thread handle %s, reason: %s" + msg %= (aThread.hThread.value, str(e)) + warnings.warn(msg) + except Exception: + pass + +#------------------------------------------------------------------------------ + + # XXX _notify_* methods should not trigger a scan + + def _add_thread(self, aThread): + """ + Private method to add a thread object to the snapshot. + + @type aThread: L{Thread} + @param aThread: Thread object. + """ +## if not isinstance(aThread, Thread): +## if hasattr(aThread, '__class__'): +## typename = aThread.__class__.__name__ +## else: +## typename = str(type(aThread)) +## msg = "Expected Thread, got %s instead" % typename +## raise TypeError(msg) + dwThreadId = aThread.dwThreadId +## if dwThreadId in self.__threadDict: +## msg = "Already have a Thread object with ID %d" % dwThreadId +## raise KeyError(msg) + aThread.set_process(self) + self.__threadDict[dwThreadId] = aThread + + def _del_thread(self, dwThreadId): + """ + Private method to remove a thread object from the snapshot. + + @type dwThreadId: int + @param dwThreadId: Global thread ID. + """ + try: + aThread = self.__threadDict[dwThreadId] + del self.__threadDict[dwThreadId] + except KeyError: + aThread = None + msg = "Unknown thread ID %d" % dwThreadId + warnings.warn(msg, RuntimeWarning) + if aThread: + aThread.clear() # remove circular references + + def _has_thread_id(self, dwThreadId): + """ + Private method to test for a thread in the snapshot without triggering + an automatic scan. + """ + return dwThreadId in self.__threadDict + + def _get_thread_ids(self): + """ + Private method to get the list of thread IDs currently in the snapshot + without triggering an automatic scan. + """ + return compat.keys(self.__threadDict) + + def __add_created_thread(self, event): + """ + Private method to automatically add new thread objects from debug events. + + @type event: L{Event} + @param event: Event object. + """ + dwThreadId = event.get_tid() + hThread = event.get_thread_handle() +## if not self.has_thread(dwThreadId): # XXX this would trigger a scan + if not self._has_thread_id(dwThreadId): + aThread = Thread(dwThreadId, hThread, self) + teb_ptr = event.get_teb() # remember the TEB pointer + if teb_ptr: + aThread._teb_ptr = teb_ptr + self._add_thread(aThread) + #else: + # aThread = self.get_thread(dwThreadId) + # if hThread != win32.INVALID_HANDLE_VALUE: + # aThread.hThread = hThread # may have more privileges + + def _notify_create_process(self, event): + """ + Notify the creation of the main thread of this process. + + This is done automatically by the L{Debug} class, you shouldn't need + to call it yourself. + + @type event: L{CreateProcessEvent} + @param event: Create process event. + + @rtype: bool + @return: C{True} to call the user-defined handle, C{False} otherwise. + """ + self.__add_created_thread(event) + return True + + def _notify_create_thread(self, event): + """ + Notify the creation of a new thread in this process. + + This is done automatically by the L{Debug} class, you shouldn't need + to call it yourself. + + @type event: L{CreateThreadEvent} + @param event: Create thread event. + + @rtype: bool + @return: C{True} to call the user-defined handle, C{False} otherwise. + """ + self.__add_created_thread(event) + return True + + def _notify_exit_thread(self, event): + """ + Notify the termination of a thread. + + This is done automatically by the L{Debug} class, you shouldn't need + to call it yourself. + + @type event: L{ExitThreadEvent} + @param event: Exit thread event. + + @rtype: bool + @return: C{True} to call the user-defined handle, C{False} otherwise. + """ + dwThreadId = event.get_tid() +## if self.has_thread(dwThreadId): # XXX this would trigger a scan + if self._has_thread_id(dwThreadId): + self._del_thread(dwThreadId) + return True diff --git a/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/util.py b/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/util.py new file mode 100644 index 00000000..4a9a9842 --- /dev/null +++ b/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/util.py @@ -0,0 +1,1038 @@ +#!~/.wine/drive_c/Python25/python.exe +# -*- coding: utf-8 -*- + +# Copyright (c) 2009-2014, Mario Vilas +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright +# notice,this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# * Neither the name of the copyright holder nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. + +""" +Miscellaneous utility classes and functions. + +@group Helpers: + PathOperations, + MemoryAddresses, + CustomAddressIterator, + DataAddressIterator, + ImageAddressIterator, + MappedAddressIterator, + ExecutableAddressIterator, + ReadableAddressIterator, + WriteableAddressIterator, + ExecutableAndWriteableAddressIterator, + DebugRegister, + Regenerator, + BannerHelpFormatter, + StaticClass, + classproperty +""" + +__revision__ = "$Id$" + +__all__ = [ + + # Filename and pathname manipulation + 'PathOperations', + + # Memory address operations + 'MemoryAddresses', + 'CustomAddressIterator', + 'DataAddressIterator', + 'ImageAddressIterator', + 'MappedAddressIterator', + 'ExecutableAddressIterator', + 'ReadableAddressIterator', + 'WriteableAddressIterator', + 'ExecutableAndWriteableAddressIterator', + + # Debug registers manipulation + 'DebugRegister', + + # Miscellaneous + 'Regenerator', + ] + +import sys +import os +import ctypes +import optparse + +from winappdbg import win32 +from winappdbg import compat + +#============================================================================== + +class classproperty(property): + """ + Class property method. + + Only works for getting properties, if you set them + the symbol gets overwritten in the class namespace. + + Inspired on: U{http://stackoverflow.com/a/7864317/426293} + """ + def __init__(self, fget=None, fset=None, fdel=None, doc=""): + if fset is not None or fdel is not None: + raise NotImplementedError() + super(classproperty, self).__init__(fget=classmethod(fget), doc=doc) + def __get__(self, cls, owner): + return self.fget.__get__(None, owner)() + +class BannerHelpFormatter(optparse.IndentedHelpFormatter): + "Just a small tweak to optparse to be able to print a banner." + def __init__(self, banner, *argv, **argd): + self.banner = banner + optparse.IndentedHelpFormatter.__init__(self, *argv, **argd) + def format_usage(self, usage): + msg = optparse.IndentedHelpFormatter.format_usage(self, usage) + return '%s\n%s' % (self.banner, msg) + +# See Process.generate_memory_snapshot() +class Regenerator(object): + """ + Calls a generator and iterates it. When it's finished iterating, the + generator is called again. This allows you to iterate a generator more + than once (well, sort of). + """ + + def __init__(self, g_function, *v_args, **d_args): + """ + @type g_function: function + @param g_function: Function that when called returns a generator. + + @type v_args: tuple + @param v_args: Variable arguments to pass to the generator function. + + @type d_args: dict + @param d_args: Variable arguments to pass to the generator function. + """ + self.__g_function = g_function + self.__v_args = v_args + self.__d_args = d_args + self.__g_object = None + + def __iter__(self): + 'x.__iter__() <==> iter(x)' + return self + + def next(self): + 'x.next() -> the next value, or raise StopIteration' + if self.__g_object is None: + self.__g_object = self.__g_function( *self.__v_args, **self.__d_args ) + try: + return self.__g_object.next() + except StopIteration: + self.__g_object = None + raise + +class StaticClass (object): + def __new__(cls, *argv, **argd): + "Don't try to instance this class, just use the static methods." + raise NotImplementedError( + "Cannot instance static class %s" % cls.__name__) + +#============================================================================== + +class PathOperations (StaticClass): + """ + Static methods for filename and pathname manipulation. + """ + + @staticmethod + def path_is_relative(path): + """ + @see: L{path_is_absolute} + + @type path: str + @param path: Absolute or relative path. + + @rtype: bool + @return: C{True} if the path is relative, C{False} if it's absolute. + """ + return win32.PathIsRelative(path) + + @staticmethod + def path_is_absolute(path): + """ + @see: L{path_is_relative} + + @type path: str + @param path: Absolute or relative path. + + @rtype: bool + @return: C{True} if the path is absolute, C{False} if it's relative. + """ + return not win32.PathIsRelative(path) + + @staticmethod + def make_relative(path, current = None): + """ + @type path: str + @param path: Absolute path. + + @type current: str + @param current: (Optional) Path to the current directory. + + @rtype: str + @return: Relative path. + + @raise WindowsError: It's impossible to make the path relative. + This happens when the path and the current path are not on the + same disk drive or network share. + """ + return win32.PathRelativePathTo(pszFrom = current, pszTo = path) + + @staticmethod + def make_absolute(path): + """ + @type path: str + @param path: Relative path. + + @rtype: str + @return: Absolute path. + """ + return win32.GetFullPathName(path)[0] + + @staticmethod + def split_extension(pathname): + """ + @type pathname: str + @param pathname: Absolute path. + + @rtype: tuple( str, str ) + @return: + Tuple containing the file and extension components of the filename. + """ + filepart = win32.PathRemoveExtension(pathname) + extpart = win32.PathFindExtension(pathname) + return (filepart, extpart) + + @staticmethod + def split_filename(pathname): + """ + @type pathname: str + @param pathname: Absolute path. + + @rtype: tuple( str, str ) + @return: Tuple containing the path to the file and the base filename. + """ + filepart = win32.PathFindFileName(pathname) + pathpart = win32.PathRemoveFileSpec(pathname) + return (pathpart, filepart) + + @staticmethod + def split_path(path): + """ + @see: L{join_path} + + @type path: str + @param path: Absolute or relative path. + + @rtype: list( str... ) + @return: List of path components. + """ + components = list() + while path: + next = win32.PathFindNextComponent(path) + if next: + prev = path[ : -len(next) ] + components.append(prev) + path = next + return components + + @staticmethod + def join_path(*components): + """ + @see: L{split_path} + + @type components: tuple( str... ) + @param components: Path components. + + @rtype: str + @return: Absolute or relative path. + """ + if components: + path = components[0] + for next in components[1:]: + path = win32.PathAppend(path, next) + else: + path = "" + return path + + @staticmethod + def native_to_win32_pathname(name): + """ + @type name: str + @param name: Native (NT) absolute pathname. + + @rtype: str + @return: Win32 absolute pathname. + """ + # XXX TODO + # There are probably some native paths that + # won't be converted by this naive approach. + if name.startswith(compat.b("\\")): + if name.startswith(compat.b("\\??\\")): + name = name[4:] + elif name.startswith(compat.b("\\SystemRoot\\")): + system_root_path = os.environ['SYSTEMROOT'] + if system_root_path.endswith('\\'): + system_root_path = system_root_path[:-1] + name = system_root_path + name[11:] + else: + for drive_number in compat.xrange(ord('A'), ord('Z') + 1): + drive_letter = '%c:' % drive_number + try: + device_native_path = win32.QueryDosDevice(drive_letter) + except WindowsError: + e = sys.exc_info()[1] + if e.winerror in (win32.ERROR_FILE_NOT_FOUND, \ + win32.ERROR_PATH_NOT_FOUND): + continue + raise + if not device_native_path.endswith(compat.b('\\')): + device_native_path += compat.b('\\') + if name.startswith(device_native_path): + name = drive_letter + compat.b('\\') + \ + name[ len(device_native_path) : ] + break + return name + + @staticmethod + def pathname_to_filename(pathname): + """ + Equivalent to: C{PathOperations.split_filename(pathname)[0]} + + @note: This function is preserved for backwards compatibility with + WinAppDbg 1.4 and earlier. It may be removed in future versions. + + @type pathname: str + @param pathname: Absolute path to a file. + + @rtype: str + @return: Filename component of the path. + """ + return win32.PathFindFileName(pathname) + +#============================================================================== + +class MemoryAddresses (StaticClass): + """ + Class to manipulate memory addresses. + + @type pageSize: int + @cvar pageSize: Page size in bytes. Defaults to 0x1000 but it's + automatically updated on runtime when importing the module. + """ + + @classproperty + def pageSize(cls): + """ + Try to get the pageSize value on runtime. + """ + try: + try: + pageSize = win32.GetSystemInfo().dwPageSize + except WindowsError: + pageSize = 0x1000 + except NameError: + pageSize = 0x1000 + cls.pageSize = pageSize # now this function won't be called again + return pageSize + + @classmethod + def align_address_to_page_start(cls, address): + """ + Align the given address to the start of the page it occupies. + + @type address: int + @param address: Memory address. + + @rtype: int + @return: Aligned memory address. + """ + return address - ( address % cls.pageSize ) + + @classmethod + def align_address_to_page_end(cls, address): + """ + Align the given address to the end of the page it occupies. + That is, to point to the start of the next page. + + @type address: int + @param address: Memory address. + + @rtype: int + @return: Aligned memory address. + """ + return address + cls.pageSize - ( address % cls.pageSize ) + + @classmethod + def align_address_range(cls, begin, end): + """ + Align the given address range to the start and end of the page(s) it occupies. + + @type begin: int + @param begin: Memory address of the beginning of the buffer. + Use C{None} for the first legal address in the address space. + + @type end: int + @param end: Memory address of the end of the buffer. + Use C{None} for the last legal address in the address space. + + @rtype: tuple( int, int ) + @return: Aligned memory addresses. + """ + if begin is None: + begin = 0 + if end is None: + end = win32.LPVOID(-1).value # XXX HACK + if end < begin: + begin, end = end, begin + begin = cls.align_address_to_page_start(begin) + if end != cls.align_address_to_page_start(end): + end = cls.align_address_to_page_end(end) + return (begin, end) + + @classmethod + def get_buffer_size_in_pages(cls, address, size): + """ + Get the number of pages in use by the given buffer. + + @type address: int + @param address: Aligned memory address. + + @type size: int + @param size: Buffer size. + + @rtype: int + @return: Buffer size in number of pages. + """ + if size < 0: + size = -size + address = address - size + begin, end = cls.align_address_range(address, address + size) + # XXX FIXME + # I think this rounding fails at least for address 0xFFFFFFFF size 1 + return int(float(end - begin) / float(cls.pageSize)) + + @staticmethod + def do_ranges_intersect(begin, end, old_begin, old_end): + """ + Determine if the two given memory address ranges intersect. + + @type begin: int + @param begin: Start address of the first range. + + @type end: int + @param end: End address of the first range. + + @type old_begin: int + @param old_begin: Start address of the second range. + + @type old_end: int + @param old_end: End address of the second range. + + @rtype: bool + @return: C{True} if the two ranges intersect, C{False} otherwise. + """ + return (old_begin <= begin < old_end) or \ + (old_begin < end <= old_end) or \ + (begin <= old_begin < end) or \ + (begin < old_end <= end) + +#============================================================================== + +def CustomAddressIterator(memory_map, condition): + """ + Generator function that iterates through a memory map, filtering memory + region blocks by any given condition. + + @type memory_map: list( L{win32.MemoryBasicInformation} ) + @param memory_map: List of memory region information objects. + Returned by L{Process.get_memory_map}. + + @type condition: function + @param condition: Callback function that returns C{True} if the memory + block should be returned, or C{False} if it should be filtered. + + @rtype: generator of L{win32.MemoryBasicInformation} + @return: Generator object to iterate memory blocks. + """ + for mbi in memory_map: + if condition(mbi): + address = mbi.BaseAddress + max_addr = address + mbi.RegionSize + while address < max_addr: + yield address + address = address + 1 + +def DataAddressIterator(memory_map): + """ + Generator function that iterates through a memory map, returning only those + memory blocks that contain data. + + @type memory_map: list( L{win32.MemoryBasicInformation} ) + @param memory_map: List of memory region information objects. + Returned by L{Process.get_memory_map}. + + @rtype: generator of L{win32.MemoryBasicInformation} + @return: Generator object to iterate memory blocks. + """ + return CustomAddressIterator(memory_map, + win32.MemoryBasicInformation.has_content) + +def ImageAddressIterator(memory_map): + """ + Generator function that iterates through a memory map, returning only those + memory blocks that belong to executable images. + + @type memory_map: list( L{win32.MemoryBasicInformation} ) + @param memory_map: List of memory region information objects. + Returned by L{Process.get_memory_map}. + + @rtype: generator of L{win32.MemoryBasicInformation} + @return: Generator object to iterate memory blocks. + """ + return CustomAddressIterator(memory_map, + win32.MemoryBasicInformation.is_image) + +def MappedAddressIterator(memory_map): + """ + Generator function that iterates through a memory map, returning only those + memory blocks that belong to memory mapped files. + + @type memory_map: list( L{win32.MemoryBasicInformation} ) + @param memory_map: List of memory region information objects. + Returned by L{Process.get_memory_map}. + + @rtype: generator of L{win32.MemoryBasicInformation} + @return: Generator object to iterate memory blocks. + """ + return CustomAddressIterator(memory_map, + win32.MemoryBasicInformation.is_mapped) + +def ReadableAddressIterator(memory_map): + """ + Generator function that iterates through a memory map, returning only those + memory blocks that are readable. + + @type memory_map: list( L{win32.MemoryBasicInformation} ) + @param memory_map: List of memory region information objects. + Returned by L{Process.get_memory_map}. + + @rtype: generator of L{win32.MemoryBasicInformation} + @return: Generator object to iterate memory blocks. + """ + return CustomAddressIterator(memory_map, + win32.MemoryBasicInformation.is_readable) + +def WriteableAddressIterator(memory_map): + """ + Generator function that iterates through a memory map, returning only those + memory blocks that are writeable. + + @note: Writeable memory is always readable too. + + @type memory_map: list( L{win32.MemoryBasicInformation} ) + @param memory_map: List of memory region information objects. + Returned by L{Process.get_memory_map}. + + @rtype: generator of L{win32.MemoryBasicInformation} + @return: Generator object to iterate memory blocks. + """ + return CustomAddressIterator(memory_map, + win32.MemoryBasicInformation.is_writeable) + +def ExecutableAddressIterator(memory_map): + """ + Generator function that iterates through a memory map, returning only those + memory blocks that are executable. + + @note: Executable memory is always readable too. + + @type memory_map: list( L{win32.MemoryBasicInformation} ) + @param memory_map: List of memory region information objects. + Returned by L{Process.get_memory_map}. + + @rtype: generator of L{win32.MemoryBasicInformation} + @return: Generator object to iterate memory blocks. + """ + return CustomAddressIterator(memory_map, + win32.MemoryBasicInformation.is_executable) + +def ExecutableAndWriteableAddressIterator(memory_map): + """ + Generator function that iterates through a memory map, returning only those + memory blocks that are executable and writeable. + + @note: The presence of such pages make memory corruption vulnerabilities + much easier to exploit. + + @type memory_map: list( L{win32.MemoryBasicInformation} ) + @param memory_map: List of memory region information objects. + Returned by L{Process.get_memory_map}. + + @rtype: generator of L{win32.MemoryBasicInformation} + @return: Generator object to iterate memory blocks. + """ + return CustomAddressIterator(memory_map, + win32.MemoryBasicInformation.is_executable_and_writeable) + +#============================================================================== +try: + _registerMask = win32.SIZE_T(-1).value +except TypeError: + if win32.SIZEOF(win32.SIZE_T) == 4: + _registerMask = 0xFFFFFFFF + elif win32.SIZEOF(win32.SIZE_T) == 8: + _registerMask = 0xFFFFFFFFFFFFFFFF + else: + raise + +class DebugRegister (StaticClass): + """ + Class to manipulate debug registers. + Used by L{HardwareBreakpoint}. + + @group Trigger flags used by HardwareBreakpoint: + BREAK_ON_EXECUTION, BREAK_ON_WRITE, BREAK_ON_ACCESS, BREAK_ON_IO_ACCESS + @group Size flags used by HardwareBreakpoint: + WATCH_BYTE, WATCH_WORD, WATCH_DWORD, WATCH_QWORD + @group Bitwise masks for Dr7: + enableMask, disableMask, triggerMask, watchMask, clearMask, + generalDetectMask + @group Bitwise masks for Dr6: + hitMask, hitMaskAll, debugAccessMask, singleStepMask, taskSwitchMask, + clearDr6Mask, clearHitMask + @group Debug control MSR definitions: + DebugCtlMSR, LastBranchRecord, BranchTrapFlag, PinControl, + LastBranchToIP, LastBranchFromIP, + LastExceptionToIP, LastExceptionFromIP + + @type BREAK_ON_EXECUTION: int + @cvar BREAK_ON_EXECUTION: Break on execution. + + @type BREAK_ON_WRITE: int + @cvar BREAK_ON_WRITE: Break on write. + + @type BREAK_ON_ACCESS: int + @cvar BREAK_ON_ACCESS: Break on read or write. + + @type BREAK_ON_IO_ACCESS: int + @cvar BREAK_ON_IO_ACCESS: Break on I/O port access. + Not supported by any hardware. + + @type WATCH_BYTE: int + @cvar WATCH_BYTE: Watch a byte. + + @type WATCH_WORD: int + @cvar WATCH_WORD: Watch a word. + + @type WATCH_DWORD: int + @cvar WATCH_DWORD: Watch a double word. + + @type WATCH_QWORD: int + @cvar WATCH_QWORD: Watch one quad word. + + @type enableMask: 4-tuple of integers + @cvar enableMask: + Enable bit on C{Dr7} for each slot. + Works as a bitwise-OR mask. + + @type disableMask: 4-tuple of integers + @cvar disableMask: + Mask of the enable bit on C{Dr7} for each slot. + Works as a bitwise-AND mask. + + @type triggerMask: 4-tuple of 2-tuples of integers + @cvar triggerMask: + Trigger bits on C{Dr7} for each trigger flag value. + Each 2-tuple has the bitwise-OR mask and the bitwise-AND mask. + + @type watchMask: 4-tuple of 2-tuples of integers + @cvar watchMask: + Watch bits on C{Dr7} for each watch flag value. + Each 2-tuple has the bitwise-OR mask and the bitwise-AND mask. + + @type clearMask: 4-tuple of integers + @cvar clearMask: + Mask of all important bits on C{Dr7} for each slot. + Works as a bitwise-AND mask. + + @type generalDetectMask: integer + @cvar generalDetectMask: + General detect mode bit. It enables the processor to notify the + debugger when the debugee is trying to access one of the debug + registers. + + @type hitMask: 4-tuple of integers + @cvar hitMask: + Hit bit on C{Dr6} for each slot. + Works as a bitwise-AND mask. + + @type hitMaskAll: integer + @cvar hitMaskAll: + Bitmask for all hit bits in C{Dr6}. Useful to know if at least one + hardware breakpoint was hit, or to clear the hit bits only. + + @type clearHitMask: integer + @cvar clearHitMask: + Bitmask to clear all the hit bits in C{Dr6}. + + @type debugAccessMask: integer + @cvar debugAccessMask: + The debugee tried to access a debug register. Needs bit + L{generalDetectMask} enabled in C{Dr7}. + + @type singleStepMask: integer + @cvar singleStepMask: + A single step exception was raised. Needs the trap flag enabled. + + @type taskSwitchMask: integer + @cvar taskSwitchMask: + A task switch has occurred. Needs the TSS T-bit set to 1. + + @type clearDr6Mask: integer + @cvar clearDr6Mask: + Bitmask to clear all meaningful bits in C{Dr6}. + """ + + BREAK_ON_EXECUTION = 0 + BREAK_ON_WRITE = 1 + BREAK_ON_ACCESS = 3 + BREAK_ON_IO_ACCESS = 2 + + WATCH_BYTE = 0 + WATCH_WORD = 1 + WATCH_DWORD = 3 + WATCH_QWORD = 2 + + registerMask = _registerMask + +#------------------------------------------------------------------------------ + + ########################################################################### + # http://en.wikipedia.org/wiki/Debug_register + # + # DR7 - Debug control + # + # The low-order eight bits of DR7 (0,2,4,6 and 1,3,5,7) selectively enable + # the four address breakpoint conditions. There are two levels of enabling: + # the local (0,2,4,6) and global (1,3,5,7) levels. The local enable bits + # are automatically reset by the processor at every task switch to avoid + # unwanted breakpoint conditions in the new task. The global enable bits + # are not reset by a task switch; therefore, they can be used for + # conditions that are global to all tasks. + # + # Bits 16-17 (DR0), 20-21 (DR1), 24-25 (DR2), 28-29 (DR3), define when + # breakpoints trigger. Each breakpoint has a two-bit entry that specifies + # whether they break on execution (00b), data write (01b), data read or + # write (11b). 10b is defined to mean break on IO read or write but no + # hardware supports it. Bits 18-19 (DR0), 22-23 (DR1), 26-27 (DR2), 30-31 + # (DR3), define how large area of memory is watched by breakpoints. Again + # each breakpoint has a two-bit entry that specifies whether they watch + # one (00b), two (01b), eight (10b) or four (11b) bytes. + ########################################################################### + + # Dr7 |= enableMask[register] + enableMask = ( + 1 << 0, # Dr0 (bit 0) + 1 << 2, # Dr1 (bit 2) + 1 << 4, # Dr2 (bit 4) + 1 << 6, # Dr3 (bit 6) + ) + + # Dr7 &= disableMask[register] + disableMask = tuple( [_registerMask ^ x for x in enableMask] ) # The registerMask from the class is not there in py3 + try: + del x # It's not there in py3 + except: + pass + + # orMask, andMask = triggerMask[register][trigger] + # Dr7 = (Dr7 & andMask) | orMask # to set + # Dr7 = Dr7 & andMask # to remove + triggerMask = ( + # Dr0 (bits 16-17) + ( + ((0 << 16), (3 << 16) ^ registerMask), # execute + ((1 << 16), (3 << 16) ^ registerMask), # write + ((2 << 16), (3 << 16) ^ registerMask), # io read + ((3 << 16), (3 << 16) ^ registerMask), # access + ), + # Dr1 (bits 20-21) + ( + ((0 << 20), (3 << 20) ^ registerMask), # execute + ((1 << 20), (3 << 20) ^ registerMask), # write + ((2 << 20), (3 << 20) ^ registerMask), # io read + ((3 << 20), (3 << 20) ^ registerMask), # access + ), + # Dr2 (bits 24-25) + ( + ((0 << 24), (3 << 24) ^ registerMask), # execute + ((1 << 24), (3 << 24) ^ registerMask), # write + ((2 << 24), (3 << 24) ^ registerMask), # io read + ((3 << 24), (3 << 24) ^ registerMask), # access + ), + # Dr3 (bits 28-29) + ( + ((0 << 28), (3 << 28) ^ registerMask), # execute + ((1 << 28), (3 << 28) ^ registerMask), # write + ((2 << 28), (3 << 28) ^ registerMask), # io read + ((3 << 28), (3 << 28) ^ registerMask), # access + ), + ) + + # orMask, andMask = watchMask[register][watch] + # Dr7 = (Dr7 & andMask) | orMask # to set + # Dr7 = Dr7 & andMask # to remove + watchMask = ( + # Dr0 (bits 18-19) + ( + ((0 << 18), (3 << 18) ^ registerMask), # byte + ((1 << 18), (3 << 18) ^ registerMask), # word + ((2 << 18), (3 << 18) ^ registerMask), # qword + ((3 << 18), (3 << 18) ^ registerMask), # dword + ), + # Dr1 (bits 22-23) + ( + ((0 << 23), (3 << 23) ^ registerMask), # byte + ((1 << 23), (3 << 23) ^ registerMask), # word + ((2 << 23), (3 << 23) ^ registerMask), # qword + ((3 << 23), (3 << 23) ^ registerMask), # dword + ), + # Dr2 (bits 26-27) + ( + ((0 << 26), (3 << 26) ^ registerMask), # byte + ((1 << 26), (3 << 26) ^ registerMask), # word + ((2 << 26), (3 << 26) ^ registerMask), # qword + ((3 << 26), (3 << 26) ^ registerMask), # dword + ), + # Dr3 (bits 30-31) + ( + ((0 << 30), (3 << 31) ^ registerMask), # byte + ((1 << 30), (3 << 31) ^ registerMask), # word + ((2 << 30), (3 << 31) ^ registerMask), # qword + ((3 << 30), (3 << 31) ^ registerMask), # dword + ), + ) + + # Dr7 = Dr7 & clearMask[register] + clearMask = ( + registerMask ^ ( (1 << 0) + (3 << 16) + (3 << 18) ), # Dr0 + registerMask ^ ( (1 << 2) + (3 << 20) + (3 << 22) ), # Dr1 + registerMask ^ ( (1 << 4) + (3 << 24) + (3 << 26) ), # Dr2 + registerMask ^ ( (1 << 6) + (3 << 28) + (3 << 30) ), # Dr3 + ) + + # Dr7 = Dr7 | generalDetectMask + generalDetectMask = (1 << 13) + + ########################################################################### + # http://en.wikipedia.org/wiki/Debug_register + # + # DR6 - Debug status + # + # The debug status register permits the debugger to determine which debug + # conditions have occurred. When the processor detects an enabled debug + # exception, it sets the low-order bits of this register (0,1,2,3) before + # entering the debug exception handler. + # + # Note that the bits of DR6 are never cleared by the processor. To avoid + # any confusion in identifying the next debug exception, the debug handler + # should move zeros to DR6 immediately before returning. + ########################################################################### + + # bool(Dr6 & hitMask[register]) + hitMask = ( + (1 << 0), # Dr0 + (1 << 1), # Dr1 + (1 << 2), # Dr2 + (1 << 3), # Dr3 + ) + + # bool(Dr6 & anyHitMask) + hitMaskAll = hitMask[0] | hitMask[1] | hitMask[2] | hitMask[3] + + # Dr6 = Dr6 & clearHitMask + clearHitMask = registerMask ^ hitMaskAll + + # bool(Dr6 & debugAccessMask) + debugAccessMask = (1 << 13) + + # bool(Dr6 & singleStepMask) + singleStepMask = (1 << 14) + + # bool(Dr6 & taskSwitchMask) + taskSwitchMask = (1 << 15) + + # Dr6 = Dr6 & clearDr6Mask + clearDr6Mask = registerMask ^ (hitMaskAll | \ + debugAccessMask | singleStepMask | taskSwitchMask) + +#------------------------------------------------------------------------------ + +############################################################################### +# +# (from the AMD64 manuals) +# +# The fields within the DebugCtlMSR register are: +# +# Last-Branch Record (LBR) - Bit 0, read/write. Software sets this bit to 1 +# to cause the processor to record the source and target addresses of the +# last control transfer taken before a debug exception occurs. The recorded +# control transfers include branch instructions, interrupts, and exceptions. +# +# Branch Single Step (BTF) - Bit 1, read/write. Software uses this bit to +# change the behavior of the rFLAGS.TF bit. When this bit is cleared to 0, +# the rFLAGS.TF bit controls instruction single stepping, (normal behavior). +# When this bit is set to 1, the rFLAGS.TF bit controls single stepping on +# control transfers. The single-stepped control transfers include branch +# instructions, interrupts, and exceptions. Control-transfer single stepping +# requires both BTF=1 and rFLAGS.TF=1. +# +# Performance-Monitoring/Breakpoint Pin-Control (PBi) - Bits 5-2, read/write. +# Software uses these bits to control the type of information reported by +# the four external performance-monitoring/breakpoint pins on the processor. +# When a PBi bit is cleared to 0, the corresponding external pin (BPi) +# reports performance-monitor information. When a PBi bit is set to 1, the +# corresponding external pin (BPi) reports breakpoint information. +# +# All remaining bits in the DebugCtlMSR register are reserved. +# +# Software can enable control-transfer single stepping by setting +# DebugCtlMSR.BTF to 1 and rFLAGS.TF to 1. The processor automatically +# disables control-transfer single stepping when a debug exception (#DB) +# occurs by clearing DebugCtlMSR.BTF to 0. rFLAGS.TF is also cleared when a +# #DB exception occurs. Before exiting the debug-exception handler, software +# must set both DebugCtlMSR.BTF and rFLAGS.TF to 1 to restart single +# stepping. +# +############################################################################### + + DebugCtlMSR = 0x1D9 + LastBranchRecord = (1 << 0) + BranchTrapFlag = (1 << 1) + PinControl = ( + (1 << 2), # PB1 + (1 << 3), # PB2 + (1 << 4), # PB3 + (1 << 5), # PB4 + ) + +############################################################################### +# +# (from the AMD64 manuals) +# +# Control-transfer recording MSRs: LastBranchToIP, LastBranchFromIP, +# LastExceptionToIP, and LastExceptionFromIP. These registers are loaded +# automatically by the processor when the DebugCtlMSR.LBR bit is set to 1. +# These MSRs are read-only. +# +# The processor automatically disables control-transfer recording when a +# debug exception (#DB) occurs by clearing DebugCtlMSR.LBR to 0. The +# contents of the control-transfer recording MSRs are not altered by the +# processor when the #DB occurs. Before exiting the debug-exception handler, +# software can set DebugCtlMSR.LBR to 1 to re-enable the recording mechanism. +# +############################################################################### + + LastBranchToIP = 0x1DC + LastBranchFromIP = 0x1DB + LastExceptionToIP = 0x1DE + LastExceptionFromIP = 0x1DD + +#------------------------------------------------------------------------------ + + @classmethod + def clear_bp(cls, ctx, register): + """ + Clears a hardware breakpoint. + + @see: find_slot, set_bp + + @type ctx: dict( str S{->} int ) + @param ctx: Thread context dictionary. + + @type register: int + @param register: Slot (debug register) for hardware breakpoint. + """ + ctx['Dr7'] &= cls.clearMask[register] + ctx['Dr%d' % register] = 0 + + @classmethod + def set_bp(cls, ctx, register, address, trigger, watch): + """ + Sets a hardware breakpoint. + + @see: clear_bp, find_slot + + @type ctx: dict( str S{->} int ) + @param ctx: Thread context dictionary. + + @type register: int + @param register: Slot (debug register). + + @type address: int + @param address: Memory address. + + @type trigger: int + @param trigger: Trigger flag. See L{HardwareBreakpoint.validTriggers}. + + @type watch: int + @param watch: Watch flag. See L{HardwareBreakpoint.validWatchSizes}. + """ + Dr7 = ctx['Dr7'] + Dr7 |= cls.enableMask[register] + orMask, andMask = cls.triggerMask[register][trigger] + Dr7 &= andMask + Dr7 |= orMask + orMask, andMask = cls.watchMask[register][watch] + Dr7 &= andMask + Dr7 |= orMask + ctx['Dr7'] = Dr7 + ctx['Dr%d' % register] = address + + @classmethod + def find_slot(cls, ctx): + """ + Finds an empty slot to set a hardware breakpoint. + + @see: clear_bp, set_bp + + @type ctx: dict( str S{->} int ) + @param ctx: Thread context dictionary. + + @rtype: int + @return: Slot (debug register) for hardware breakpoint. + """ + Dr7 = ctx['Dr7'] + slot = 0 + for m in cls.enableMask: + if (Dr7 & m) == 0: + return slot + slot += 1 + return None diff --git a/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/win32/__init__.py b/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/win32/__init__.py new file mode 100644 index 00000000..b5536c17 --- /dev/null +++ b/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/win32/__init__.py @@ -0,0 +1,72 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# Copyright (c) 2009-2014, Mario Vilas +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright +# notice,this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# * Neither the name of the copyright holder nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. + +""" +Debugging API wrappers in ctypes. +""" + +__revision__ = "$Id$" + +from winappdbg.win32 import defines +from winappdbg.win32 import kernel32 +from winappdbg.win32 import user32 +from winappdbg.win32 import advapi32 +from winappdbg.win32 import wtsapi32 +from winappdbg.win32 import shell32 +from winappdbg.win32 import shlwapi +from winappdbg.win32 import psapi +from winappdbg.win32 import dbghelp +from winappdbg.win32 import ntdll + +from winappdbg.win32.defines import * +from winappdbg.win32.kernel32 import * +from winappdbg.win32.user32 import * +from winappdbg.win32.advapi32 import * +from winappdbg.win32.wtsapi32 import * +from winappdbg.win32.shell32 import * +from winappdbg.win32.shlwapi import * +from winappdbg.win32.psapi import * +from winappdbg.win32.dbghelp import * +from winappdbg.win32.ntdll import * + +# This calculates the list of exported symbols. +_all = set() +_all.update(defines._all) +_all.update(kernel32._all) +_all.update(user32._all) +_all.update(advapi32._all) +_all.update(wtsapi32._all) +_all.update(shell32._all) +_all.update(shlwapi._all) +_all.update(psapi._all) +_all.update(dbghelp._all) +_all.update(ntdll._all) +__all__ = [_x for _x in _all if not _x.startswith('_')] +__all__.sort() diff --git a/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/win32/advapi32.py b/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/win32/advapi32.py new file mode 100644 index 00000000..4e49889e --- /dev/null +++ b/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/win32/advapi32.py @@ -0,0 +1,3209 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# Copyright (c) 2009-2014, Mario Vilas +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright +# notice,this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# * Neither the name of the copyright holder nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. + +""" +Wrapper for advapi32.dll in ctypes. +""" + +__revision__ = "$Id$" + +from winappdbg.win32.defines import * +from winappdbg.win32.kernel32 import * + +# XXX TODO +# + add transacted registry operations + +#============================================================================== +# This is used later on to calculate the list of exported symbols. +_all = None +_all = set(vars().keys()) +#============================================================================== + +#--- Constants ---------------------------------------------------------------- + +# Privilege constants +SE_ASSIGNPRIMARYTOKEN_NAME = "SeAssignPrimaryTokenPrivilege" +SE_AUDIT_NAME = "SeAuditPrivilege" +SE_BACKUP_NAME = "SeBackupPrivilege" +SE_CHANGE_NOTIFY_NAME = "SeChangeNotifyPrivilege" +SE_CREATE_GLOBAL_NAME = "SeCreateGlobalPrivilege" +SE_CREATE_PAGEFILE_NAME = "SeCreatePagefilePrivilege" +SE_CREATE_PERMANENT_NAME = "SeCreatePermanentPrivilege" +SE_CREATE_SYMBOLIC_LINK_NAME = "SeCreateSymbolicLinkPrivilege" +SE_CREATE_TOKEN_NAME = "SeCreateTokenPrivilege" +SE_DEBUG_NAME = "SeDebugPrivilege" +SE_ENABLE_DELEGATION_NAME = "SeEnableDelegationPrivilege" +SE_IMPERSONATE_NAME = "SeImpersonatePrivilege" +SE_INC_BASE_PRIORITY_NAME = "SeIncreaseBasePriorityPrivilege" +SE_INCREASE_QUOTA_NAME = "SeIncreaseQuotaPrivilege" +SE_INC_WORKING_SET_NAME = "SeIncreaseWorkingSetPrivilege" +SE_LOAD_DRIVER_NAME = "SeLoadDriverPrivilege" +SE_LOCK_MEMORY_NAME = "SeLockMemoryPrivilege" +SE_MACHINE_ACCOUNT_NAME = "SeMachineAccountPrivilege" +SE_MANAGE_VOLUME_NAME = "SeManageVolumePrivilege" +SE_PROF_SINGLE_PROCESS_NAME = "SeProfileSingleProcessPrivilege" +SE_RELABEL_NAME = "SeRelabelPrivilege" +SE_REMOTE_SHUTDOWN_NAME = "SeRemoteShutdownPrivilege" +SE_RESTORE_NAME = "SeRestorePrivilege" +SE_SECURITY_NAME = "SeSecurityPrivilege" +SE_SHUTDOWN_NAME = "SeShutdownPrivilege" +SE_SYNC_AGENT_NAME = "SeSyncAgentPrivilege" +SE_SYSTEM_ENVIRONMENT_NAME = "SeSystemEnvironmentPrivilege" +SE_SYSTEM_PROFILE_NAME = "SeSystemProfilePrivilege" +SE_SYSTEMTIME_NAME = "SeSystemtimePrivilege" +SE_TAKE_OWNERSHIP_NAME = "SeTakeOwnershipPrivilege" +SE_TCB_NAME = "SeTcbPrivilege" +SE_TIME_ZONE_NAME = "SeTimeZonePrivilege" +SE_TRUSTED_CREDMAN_ACCESS_NAME = "SeTrustedCredManAccessPrivilege" +SE_UNDOCK_NAME = "SeUndockPrivilege" +SE_UNSOLICITED_INPUT_NAME = "SeUnsolicitedInputPrivilege" + +SE_PRIVILEGE_ENABLED_BY_DEFAULT = 0x00000001 +SE_PRIVILEGE_ENABLED = 0x00000002 +SE_PRIVILEGE_REMOVED = 0x00000004 +SE_PRIVILEGE_USED_FOR_ACCESS = 0x80000000 + +TOKEN_ADJUST_PRIVILEGES = 0x00000020 + +LOGON_WITH_PROFILE = 0x00000001 +LOGON_NETCREDENTIALS_ONLY = 0x00000002 + +# Token access rights +TOKEN_ASSIGN_PRIMARY = 0x0001 +TOKEN_DUPLICATE = 0x0002 +TOKEN_IMPERSONATE = 0x0004 +TOKEN_QUERY = 0x0008 +TOKEN_QUERY_SOURCE = 0x0010 +TOKEN_ADJUST_PRIVILEGES = 0x0020 +TOKEN_ADJUST_GROUPS = 0x0040 +TOKEN_ADJUST_DEFAULT = 0x0080 +TOKEN_ADJUST_SESSIONID = 0x0100 +TOKEN_READ = (STANDARD_RIGHTS_READ | TOKEN_QUERY) +TOKEN_ALL_ACCESS = (STANDARD_RIGHTS_REQUIRED | TOKEN_ASSIGN_PRIMARY | + TOKEN_DUPLICATE | TOKEN_IMPERSONATE | TOKEN_QUERY | TOKEN_QUERY_SOURCE | + TOKEN_ADJUST_PRIVILEGES | TOKEN_ADJUST_GROUPS | TOKEN_ADJUST_DEFAULT | + TOKEN_ADJUST_SESSIONID) + +# Predefined HKEY values +HKEY_CLASSES_ROOT = 0x80000000 +HKEY_CURRENT_USER = 0x80000001 +HKEY_LOCAL_MACHINE = 0x80000002 +HKEY_USERS = 0x80000003 +HKEY_PERFORMANCE_DATA = 0x80000004 +HKEY_CURRENT_CONFIG = 0x80000005 + +# Registry access rights +KEY_ALL_ACCESS = 0xF003F +KEY_CREATE_LINK = 0x0020 +KEY_CREATE_SUB_KEY = 0x0004 +KEY_ENUMERATE_SUB_KEYS = 0x0008 +KEY_EXECUTE = 0x20019 +KEY_NOTIFY = 0x0010 +KEY_QUERY_VALUE = 0x0001 +KEY_READ = 0x20019 +KEY_SET_VALUE = 0x0002 +KEY_WOW64_32KEY = 0x0200 +KEY_WOW64_64KEY = 0x0100 +KEY_WRITE = 0x20006 + +# Registry value types +REG_NONE = 0 +REG_SZ = 1 +REG_EXPAND_SZ = 2 +REG_BINARY = 3 +REG_DWORD = 4 +REG_DWORD_LITTLE_ENDIAN = REG_DWORD +REG_DWORD_BIG_ENDIAN = 5 +REG_LINK = 6 +REG_MULTI_SZ = 7 +REG_RESOURCE_LIST = 8 +REG_FULL_RESOURCE_DESCRIPTOR = 9 +REG_RESOURCE_REQUIREMENTS_LIST = 10 +REG_QWORD = 11 +REG_QWORD_LITTLE_ENDIAN = REG_QWORD + +#--- TOKEN_PRIVILEGE structure ------------------------------------------------ + +# typedef struct _LUID { +# DWORD LowPart; +# LONG HighPart; +# } LUID, +# *PLUID; +class LUID(Structure): + _fields_ = [ + ("LowPart", DWORD), + ("HighPart", LONG), + ] + +PLUID = POINTER(LUID) + +# typedef struct _LUID_AND_ATTRIBUTES { +# LUID Luid; +# DWORD Attributes; +# } LUID_AND_ATTRIBUTES, +# *PLUID_AND_ATTRIBUTES; +class LUID_AND_ATTRIBUTES(Structure): + _fields_ = [ + ("Luid", LUID), + ("Attributes", DWORD), + ] + +# typedef struct _TOKEN_PRIVILEGES { +# DWORD PrivilegeCount; +# LUID_AND_ATTRIBUTES Privileges[ANYSIZE_ARRAY]; +# } TOKEN_PRIVILEGES, +# *PTOKEN_PRIVILEGES; +class TOKEN_PRIVILEGES(Structure): + _fields_ = [ + ("PrivilegeCount", DWORD), +## ("Privileges", LUID_AND_ATTRIBUTES * ANYSIZE_ARRAY), + ("Privileges", LUID_AND_ATTRIBUTES), + ] + # See comments on AdjustTokenPrivileges about this structure + +PTOKEN_PRIVILEGES = POINTER(TOKEN_PRIVILEGES) + +#--- GetTokenInformation enums and structures --------------------------------- + +# typedef enum _TOKEN_INFORMATION_CLASS { +# TokenUser = 1, +# TokenGroups, +# TokenPrivileges, +# TokenOwner, +# TokenPrimaryGroup, +# TokenDefaultDacl, +# TokenSource, +# TokenType, +# TokenImpersonationLevel, +# TokenStatistics, +# TokenRestrictedSids, +# TokenSessionId, +# TokenGroupsAndPrivileges, +# TokenSessionReference, +# TokenSandBoxInert, +# TokenAuditPolicy, +# TokenOrigin, +# TokenElevationType, +# TokenLinkedToken, +# TokenElevation, +# TokenHasRestrictions, +# TokenAccessInformation, +# TokenVirtualizationAllowed, +# TokenVirtualizationEnabled, +# TokenIntegrityLevel, +# TokenUIAccess, +# TokenMandatoryPolicy, +# TokenLogonSid, +# TokenIsAppContainer, +# TokenCapabilities, +# TokenAppContainerSid, +# TokenAppContainerNumber, +# TokenUserClaimAttributes, +# TokenDeviceClaimAttributes, +# TokenRestrictedUserClaimAttributes, +# TokenRestrictedDeviceClaimAttributes, +# TokenDeviceGroups, +# TokenRestrictedDeviceGroups, +# TokenSecurityAttributes, +# TokenIsRestricted, +# MaxTokenInfoClass +# } TOKEN_INFORMATION_CLASS, *PTOKEN_INFORMATION_CLASS; + +TOKEN_INFORMATION_CLASS = ctypes.c_int + +TokenUser = 1 +TokenGroups = 2 +TokenPrivileges = 3 +TokenOwner = 4 +TokenPrimaryGroup = 5 +TokenDefaultDacl = 6 +TokenSource = 7 +TokenType = 8 +TokenImpersonationLevel = 9 +TokenStatistics = 10 +TokenRestrictedSids = 11 +TokenSessionId = 12 +TokenGroupsAndPrivileges = 13 +TokenSessionReference = 14 +TokenSandBoxInert = 15 +TokenAuditPolicy = 16 +TokenOrigin = 17 +TokenElevationType = 18 +TokenLinkedToken = 19 +TokenElevation = 20 +TokenHasRestrictions = 21 +TokenAccessInformation = 22 +TokenVirtualizationAllowed = 23 +TokenVirtualizationEnabled = 24 +TokenIntegrityLevel = 25 +TokenUIAccess = 26 +TokenMandatoryPolicy = 27 +TokenLogonSid = 28 +TokenIsAppContainer = 29 +TokenCapabilities = 30 +TokenAppContainerSid = 31 +TokenAppContainerNumber = 32 +TokenUserClaimAttributes = 33 +TokenDeviceClaimAttributes = 34 +TokenRestrictedUserClaimAttributes = 35 +TokenRestrictedDeviceClaimAttributes = 36 +TokenDeviceGroups = 37 +TokenRestrictedDeviceGroups = 38 +TokenSecurityAttributes = 39 +TokenIsRestricted = 40 +MaxTokenInfoClass = 41 + +# typedef enum tagTOKEN_TYPE { +# TokenPrimary = 1, +# TokenImpersonation +# } TOKEN_TYPE, *PTOKEN_TYPE; + +TOKEN_TYPE = ctypes.c_int +PTOKEN_TYPE = POINTER(TOKEN_TYPE) + +TokenPrimary = 1 +TokenImpersonation = 2 + +# typedef enum { +# TokenElevationTypeDefault = 1, +# TokenElevationTypeFull, +# TokenElevationTypeLimited +# } TOKEN_ELEVATION_TYPE , *PTOKEN_ELEVATION_TYPE; + +TokenElevationTypeDefault = 1 +TokenElevationTypeFull = 2 +TokenElevationTypeLimited = 3 + +TOKEN_ELEVATION_TYPE = ctypes.c_int +PTOKEN_ELEVATION_TYPE = POINTER(TOKEN_ELEVATION_TYPE) + +# typedef enum _SECURITY_IMPERSONATION_LEVEL { +# SecurityAnonymous, +# SecurityIdentification, +# SecurityImpersonation, +# SecurityDelegation +# } SECURITY_IMPERSONATION_LEVEL, *PSECURITY_IMPERSONATION_LEVEL; + +SecurityAnonymous = 0 +SecurityIdentification = 1 +SecurityImpersonation = 2 +SecurityDelegation = 3 + +SECURITY_IMPERSONATION_LEVEL = ctypes.c_int +PSECURITY_IMPERSONATION_LEVEL = POINTER(SECURITY_IMPERSONATION_LEVEL) + +# typedef struct _SID_AND_ATTRIBUTES { +# PSID Sid; +# DWORD Attributes; +# } SID_AND_ATTRIBUTES, *PSID_AND_ATTRIBUTES; +class SID_AND_ATTRIBUTES(Structure): + _fields_ = [ + ("Sid", PSID), + ("Attributes", DWORD), + ] +PSID_AND_ATTRIBUTES = POINTER(SID_AND_ATTRIBUTES) + +# typedef struct _TOKEN_USER { +# SID_AND_ATTRIBUTES User; +# } TOKEN_USER, *PTOKEN_USER; +class TOKEN_USER(Structure): + _fields_ = [ + ("User", SID_AND_ATTRIBUTES), + ] +PTOKEN_USER = POINTER(TOKEN_USER) + +# typedef struct _TOKEN_MANDATORY_LABEL { +# SID_AND_ATTRIBUTES Label; +# } TOKEN_MANDATORY_LABEL, *PTOKEN_MANDATORY_LABEL; +class TOKEN_MANDATORY_LABEL(Structure): + _fields_ = [ + ("Label", SID_AND_ATTRIBUTES), + ] +PTOKEN_MANDATORY_LABEL = POINTER(TOKEN_MANDATORY_LABEL) + +# typedef struct _TOKEN_OWNER { +# PSID Owner; +# } TOKEN_OWNER, *PTOKEN_OWNER; +class TOKEN_OWNER(Structure): + _fields_ = [ + ("Owner", PSID), + ] +PTOKEN_OWNER = POINTER(TOKEN_OWNER) + +# typedef struct _TOKEN_PRIMARY_GROUP { +# PSID PrimaryGroup; +# } TOKEN_PRIMARY_GROUP, *PTOKEN_PRIMARY_GROUP; +class TOKEN_PRIMARY_GROUP(Structure): + _fields_ = [ + ("PrimaryGroup", PSID), + ] +PTOKEN_PRIMARY_GROUP = POINTER(TOKEN_PRIMARY_GROUP) + +# typedef struct _TOKEN_APPCONTAINER_INFORMATION { +# PSID TokenAppContainer; +# } TOKEN_APPCONTAINER_INFORMATION, *PTOKEN_APPCONTAINER_INFORMATION; +class TOKEN_APPCONTAINER_INFORMATION(Structure): + _fields_ = [ + ("TokenAppContainer", PSID), + ] +PTOKEN_APPCONTAINER_INFORMATION = POINTER(TOKEN_APPCONTAINER_INFORMATION) + +# typedef struct _TOKEN_ORIGIN { +# LUID OriginatingLogonSession; +# } TOKEN_ORIGIN, *PTOKEN_ORIGIN; +class TOKEN_ORIGIN(Structure): + _fields_ = [ + ("OriginatingLogonSession", LUID), + ] +PTOKEN_ORIGIN = POINTER(TOKEN_ORIGIN) + +# typedef struct _TOKEN_LINKED_TOKEN { +# HANDLE LinkedToken; +# } TOKEN_LINKED_TOKEN, *PTOKEN_LINKED_TOKEN; +class TOKEN_LINKED_TOKEN(Structure): + _fields_ = [ + ("LinkedToken", HANDLE), + ] +PTOKEN_LINKED_TOKEN = POINTER(TOKEN_LINKED_TOKEN) + +# typedef struct _TOKEN_STATISTICS { +# LUID TokenId; +# LUID AuthenticationId; +# LARGE_INTEGER ExpirationTime; +# TOKEN_TYPE TokenType; +# SECURITY_IMPERSONATION_LEVEL ImpersonationLevel; +# DWORD DynamicCharged; +# DWORD DynamicAvailable; +# DWORD GroupCount; +# DWORD PrivilegeCount; +# LUID ModifiedId; +# } TOKEN_STATISTICS, *PTOKEN_STATISTICS; +class TOKEN_STATISTICS(Structure): + _fields_ = [ + ("TokenId", LUID), + ("AuthenticationId", LUID), + ("ExpirationTime", LONGLONG), # LARGE_INTEGER + ("TokenType", TOKEN_TYPE), + ("ImpersonationLevel", SECURITY_IMPERSONATION_LEVEL), + ("DynamicCharged", DWORD), + ("DynamicAvailable", DWORD), + ("GroupCount", DWORD), + ("PrivilegeCount", DWORD), + ("ModifiedId", LUID), + ] +PTOKEN_STATISTICS = POINTER(TOKEN_STATISTICS) + +#--- SID_NAME_USE enum -------------------------------------------------------- + +# typedef enum _SID_NAME_USE { +# SidTypeUser = 1, +# SidTypeGroup, +# SidTypeDomain, +# SidTypeAlias, +# SidTypeWellKnownGroup, +# SidTypeDeletedAccount, +# SidTypeInvalid, +# SidTypeUnknown, +# SidTypeComputer, +# SidTypeLabel +# } SID_NAME_USE, *PSID_NAME_USE; + +SidTypeUser = 1 +SidTypeGroup = 2 +SidTypeDomain = 3 +SidTypeAlias = 4 +SidTypeWellKnownGroup = 5 +SidTypeDeletedAccount = 6 +SidTypeInvalid = 7 +SidTypeUnknown = 8 +SidTypeComputer = 9 +SidTypeLabel = 10 + +#--- WAITCHAIN_NODE_INFO structure and types ---------------------------------- + +WCT_MAX_NODE_COUNT = 16 +WCT_OBJNAME_LENGTH = 128 +WCT_ASYNC_OPEN_FLAG = 1 +WCTP_OPEN_ALL_FLAGS = WCT_ASYNC_OPEN_FLAG +WCT_OUT_OF_PROC_FLAG = 1 +WCT_OUT_OF_PROC_COM_FLAG = 2 +WCT_OUT_OF_PROC_CS_FLAG = 4 +WCTP_GETINFO_ALL_FLAGS = WCT_OUT_OF_PROC_FLAG | WCT_OUT_OF_PROC_COM_FLAG | WCT_OUT_OF_PROC_CS_FLAG + +HWCT = LPVOID + +# typedef enum _WCT_OBJECT_TYPE +# { +# WctCriticalSectionType = 1, +# WctSendMessageType, +# WctMutexType, +# WctAlpcType, +# WctComType, +# WctThreadWaitType, +# WctProcessWaitType, +# WctThreadType, +# WctComActivationType, +# WctUnknownType, +# WctMaxType +# } WCT_OBJECT_TYPE; + +WCT_OBJECT_TYPE = DWORD + +WctCriticalSectionType = 1 +WctSendMessageType = 2 +WctMutexType = 3 +WctAlpcType = 4 +WctComType = 5 +WctThreadWaitType = 6 +WctProcessWaitType = 7 +WctThreadType = 8 +WctComActivationType = 9 +WctUnknownType = 10 +WctMaxType = 11 + +# typedef enum _WCT_OBJECT_STATUS +# { +# WctStatusNoAccess = 1, // ACCESS_DENIED for this object +# WctStatusRunning, // Thread status +# WctStatusBlocked, // Thread status +# WctStatusPidOnly, // Thread status +# WctStatusPidOnlyRpcss, // Thread status +# WctStatusOwned, // Dispatcher object status +# WctStatusNotOwned, // Dispatcher object status +# WctStatusAbandoned, // Dispatcher object status +# WctStatusUnknown, // All objects +# WctStatusError, // All objects +# WctStatusMax +# } WCT_OBJECT_STATUS; + +WCT_OBJECT_STATUS = DWORD + +WctStatusNoAccess = 1 # ACCESS_DENIED for this object +WctStatusRunning = 2 # Thread status +WctStatusBlocked = 3 # Thread status +WctStatusPidOnly = 4 # Thread status +WctStatusPidOnlyRpcss = 5 # Thread status +WctStatusOwned = 6 # Dispatcher object status +WctStatusNotOwned = 7 # Dispatcher object status +WctStatusAbandoned = 8 # Dispatcher object status +WctStatusUnknown = 9 # All objects +WctStatusError = 10 # All objects +WctStatusMax = 11 + +# typedef struct _WAITCHAIN_NODE_INFO { +# WCT_OBJECT_TYPE ObjectType; +# WCT_OBJECT_STATUS ObjectStatus; +# union { +# struct { +# WCHAR ObjectName[WCT_OBJNAME_LENGTH]; +# LARGE_INTEGER Timeout; +# BOOL Alertable; +# } LockObject; +# struct { +# DWORD ProcessId; +# DWORD ThreadId; +# DWORD WaitTime; +# DWORD ContextSwitches; +# } ThreadObject; +# } ; +# }WAITCHAIN_NODE_INFO, *PWAITCHAIN_NODE_INFO; + +class _WAITCHAIN_NODE_INFO_STRUCT_1(Structure): + _fields_ = [ + ("ObjectName", WCHAR * WCT_OBJNAME_LENGTH), + ("Timeout", LONGLONG), # LARGE_INTEGER + ("Alertable", BOOL), + ] + +class _WAITCHAIN_NODE_INFO_STRUCT_2(Structure): + _fields_ = [ + ("ProcessId", DWORD), + ("ThreadId", DWORD), + ("WaitTime", DWORD), + ("ContextSwitches", DWORD), + ] + +class _WAITCHAIN_NODE_INFO_UNION(Union): + _fields_ = [ + ("LockObject", _WAITCHAIN_NODE_INFO_STRUCT_1), + ("ThreadObject", _WAITCHAIN_NODE_INFO_STRUCT_2), + ] + +class WAITCHAIN_NODE_INFO(Structure): + _fields_ = [ + ("ObjectType", WCT_OBJECT_TYPE), + ("ObjectStatus", WCT_OBJECT_STATUS), + ("u", _WAITCHAIN_NODE_INFO_UNION), + ] + +PWAITCHAIN_NODE_INFO = POINTER(WAITCHAIN_NODE_INFO) + +class WaitChainNodeInfo (object): + """ + Represents a node in the wait chain. + + It's a wrapper on the L{WAITCHAIN_NODE_INFO} structure. + + The following members are defined only + if the node is of L{WctThreadType} type: + - C{ProcessId} + - C{ThreadId} + - C{WaitTime} + - C{ContextSwitches} + + @see: L{GetThreadWaitChain} + + @type ObjectName: unicode + @ivar ObjectName: Object name. May be an empty string. + + @type ObjectType: int + @ivar ObjectType: Object type. + Should be one of the following values: + - L{WctCriticalSectionType} + - L{WctSendMessageType} + - L{WctMutexType} + - L{WctAlpcType} + - L{WctComType} + - L{WctThreadWaitType} + - L{WctProcessWaitType} + - L{WctThreadType} + - L{WctComActivationType} + - L{WctUnknownType} + + @type ObjectStatus: int + @ivar ObjectStatus: Wait status. + Should be one of the following values: + - L{WctStatusNoAccess} I{(ACCESS_DENIED for this object)} + - L{WctStatusRunning} I{(Thread status)} + - L{WctStatusBlocked} I{(Thread status)} + - L{WctStatusPidOnly} I{(Thread status)} + - L{WctStatusPidOnlyRpcss} I{(Thread status)} + - L{WctStatusOwned} I{(Dispatcher object status)} + - L{WctStatusNotOwned} I{(Dispatcher object status)} + - L{WctStatusAbandoned} I{(Dispatcher object status)} + - L{WctStatusUnknown} I{(All objects)} + - L{WctStatusError} I{(All objects)} + + @type ProcessId: int + @ivar ProcessId: Process global ID. + + @type ThreadId: int + @ivar ThreadId: Thread global ID. + + @type WaitTime: int + @ivar WaitTime: Wait time. + + @type ContextSwitches: int + @ivar ContextSwitches: Number of context switches. + """ + + #@type Timeout: int + #@ivar Timeout: Currently not documented in MSDN. + # + #@type Alertable: bool + #@ivar Alertable: Currently not documented in MSDN. + + # TODO: __repr__ + + def __init__(self, aStructure): + self.ObjectType = aStructure.ObjectType + self.ObjectStatus = aStructure.ObjectStatus + if self.ObjectType == WctThreadType: + self.ProcessId = aStructure.u.ThreadObject.ProcessId + self.ThreadId = aStructure.u.ThreadObject.ThreadId + self.WaitTime = aStructure.u.ThreadObject.WaitTime + self.ContextSwitches = aStructure.u.ThreadObject.ContextSwitches + self.ObjectName = u'' + else: + self.ObjectName = aStructure.u.LockObject.ObjectName.value + #self.Timeout = aStructure.u.LockObject.Timeout + #self.Alertable = bool(aStructure.u.LockObject.Alertable) + +class ThreadWaitChainSessionHandle (Handle): + """ + Thread wait chain session handle. + + Returned by L{OpenThreadWaitChainSession}. + + @see: L{Handle} + """ + + def __init__(self, aHandle = None): + """ + @type aHandle: int + @param aHandle: Win32 handle value. + """ + super(ThreadWaitChainSessionHandle, self).__init__(aHandle, + bOwnership = True) + + def _close(self): + if self.value is None: + raise ValueError("Handle was already closed!") + CloseThreadWaitChainSession(self.value) + + def dup(self): + raise NotImplementedError() + + def wait(self, dwMilliseconds = None): + raise NotImplementedError() + + @property + def inherit(self): + return False + + @property + def protectFromClose(self): + return False + +#--- Privilege dropping ------------------------------------------------------- + +SAFER_LEVEL_HANDLE = HANDLE + +SAFER_SCOPEID_MACHINE = 1 +SAFER_SCOPEID_USER = 2 + +SAFER_LEVEL_OPEN = 1 + +SAFER_LEVELID_DISALLOWED = 0x00000 +SAFER_LEVELID_UNTRUSTED = 0x01000 +SAFER_LEVELID_CONSTRAINED = 0x10000 +SAFER_LEVELID_NORMALUSER = 0x20000 +SAFER_LEVELID_FULLYTRUSTED = 0x40000 + +SAFER_POLICY_INFO_CLASS = DWORD +SaferPolicyLevelList = 1 +SaferPolicyEnableTransparentEnforcement = 2 +SaferPolicyDefaultLevel = 3 +SaferPolicyEvaluateUserScope = 4 +SaferPolicyScopeFlags = 5 + +SAFER_TOKEN_NULL_IF_EQUAL = 1 +SAFER_TOKEN_COMPARE_ONLY = 2 +SAFER_TOKEN_MAKE_INERT = 4 +SAFER_TOKEN_WANT_FLAGS = 8 +SAFER_TOKEN_MASK = 15 + +#--- Service Control Manager types, constants and structures ------------------ + +SC_HANDLE = HANDLE + +SERVICES_ACTIVE_DATABASEW = u"ServicesActive" +SERVICES_FAILED_DATABASEW = u"ServicesFailed" + +SERVICES_ACTIVE_DATABASEA = "ServicesActive" +SERVICES_FAILED_DATABASEA = "ServicesFailed" + +SC_GROUP_IDENTIFIERW = u'+' +SC_GROUP_IDENTIFIERA = '+' + +SERVICE_NO_CHANGE = 0xffffffff + +# enum SC_STATUS_TYPE +SC_STATUS_TYPE = ctypes.c_int +SC_STATUS_PROCESS_INFO = 0 + +# enum SC_ENUM_TYPE +SC_ENUM_TYPE = ctypes.c_int +SC_ENUM_PROCESS_INFO = 0 + +# Access rights +# http://msdn.microsoft.com/en-us/library/windows/desktop/ms685981(v=vs.85).aspx + +SERVICE_ALL_ACCESS = 0xF01FF +SERVICE_QUERY_CONFIG = 0x0001 +SERVICE_CHANGE_CONFIG = 0x0002 +SERVICE_QUERY_STATUS = 0x0004 +SERVICE_ENUMERATE_DEPENDENTS = 0x0008 +SERVICE_START = 0x0010 +SERVICE_STOP = 0x0020 +SERVICE_PAUSE_CONTINUE = 0x0040 +SERVICE_INTERROGATE = 0x0080 +SERVICE_USER_DEFINED_CONTROL = 0x0100 + +SC_MANAGER_ALL_ACCESS = 0xF003F +SC_MANAGER_CONNECT = 0x0001 +SC_MANAGER_CREATE_SERVICE = 0x0002 +SC_MANAGER_ENUMERATE_SERVICE = 0x0004 +SC_MANAGER_LOCK = 0x0008 +SC_MANAGER_QUERY_LOCK_STATUS = 0x0010 +SC_MANAGER_MODIFY_BOOT_CONFIG = 0x0020 + +# CreateService() service start type +SERVICE_BOOT_START = 0x00000000 +SERVICE_SYSTEM_START = 0x00000001 +SERVICE_AUTO_START = 0x00000002 +SERVICE_DEMAND_START = 0x00000003 +SERVICE_DISABLED = 0x00000004 + +# CreateService() error control flags +SERVICE_ERROR_IGNORE = 0x00000000 +SERVICE_ERROR_NORMAL = 0x00000001 +SERVICE_ERROR_SEVERE = 0x00000002 +SERVICE_ERROR_CRITICAL = 0x00000003 + +# EnumServicesStatusEx() service state filters +SERVICE_ACTIVE = 1 +SERVICE_INACTIVE = 2 +SERVICE_STATE_ALL = 3 + +# SERVICE_STATUS_PROCESS.dwServiceType +SERVICE_KERNEL_DRIVER = 0x00000001 +SERVICE_FILE_SYSTEM_DRIVER = 0x00000002 +SERVICE_ADAPTER = 0x00000004 +SERVICE_RECOGNIZER_DRIVER = 0x00000008 +SERVICE_WIN32_OWN_PROCESS = 0x00000010 +SERVICE_WIN32_SHARE_PROCESS = 0x00000020 +SERVICE_INTERACTIVE_PROCESS = 0x00000100 + +# EnumServicesStatusEx() service type filters (in addition to actual types) +SERVICE_DRIVER = 0x0000000B # SERVICE_KERNEL_DRIVER and SERVICE_FILE_SYSTEM_DRIVER +SERVICE_WIN32 = 0x00000030 # SERVICE_WIN32_OWN_PROCESS and SERVICE_WIN32_SHARE_PROCESS + +# SERVICE_STATUS_PROCESS.dwCurrentState +SERVICE_STOPPED = 0x00000001 +SERVICE_START_PENDING = 0x00000002 +SERVICE_STOP_PENDING = 0x00000003 +SERVICE_RUNNING = 0x00000004 +SERVICE_CONTINUE_PENDING = 0x00000005 +SERVICE_PAUSE_PENDING = 0x00000006 +SERVICE_PAUSED = 0x00000007 + +# SERVICE_STATUS_PROCESS.dwControlsAccepted +SERVICE_ACCEPT_STOP = 0x00000001 +SERVICE_ACCEPT_PAUSE_CONTINUE = 0x00000002 +SERVICE_ACCEPT_SHUTDOWN = 0x00000004 +SERVICE_ACCEPT_PARAMCHANGE = 0x00000008 +SERVICE_ACCEPT_NETBINDCHANGE = 0x00000010 +SERVICE_ACCEPT_HARDWAREPROFILECHANGE = 0x00000020 +SERVICE_ACCEPT_POWEREVENT = 0x00000040 +SERVICE_ACCEPT_SESSIONCHANGE = 0x00000080 +SERVICE_ACCEPT_PRESHUTDOWN = 0x00000100 + +# SERVICE_STATUS_PROCESS.dwServiceFlags +SERVICE_RUNS_IN_SYSTEM_PROCESS = 0x00000001 + +# Service control flags +SERVICE_CONTROL_STOP = 0x00000001 +SERVICE_CONTROL_PAUSE = 0x00000002 +SERVICE_CONTROL_CONTINUE = 0x00000003 +SERVICE_CONTROL_INTERROGATE = 0x00000004 +SERVICE_CONTROL_SHUTDOWN = 0x00000005 +SERVICE_CONTROL_PARAMCHANGE = 0x00000006 +SERVICE_CONTROL_NETBINDADD = 0x00000007 +SERVICE_CONTROL_NETBINDREMOVE = 0x00000008 +SERVICE_CONTROL_NETBINDENABLE = 0x00000009 +SERVICE_CONTROL_NETBINDDISABLE = 0x0000000A +SERVICE_CONTROL_DEVICEEVENT = 0x0000000B +SERVICE_CONTROL_HARDWAREPROFILECHANGE = 0x0000000C +SERVICE_CONTROL_POWEREVENT = 0x0000000D +SERVICE_CONTROL_SESSIONCHANGE = 0x0000000E + +# Service control accepted bitmasks +SERVICE_ACCEPT_STOP = 0x00000001 +SERVICE_ACCEPT_PAUSE_CONTINUE = 0x00000002 +SERVICE_ACCEPT_SHUTDOWN = 0x00000004 +SERVICE_ACCEPT_PARAMCHANGE = 0x00000008 +SERVICE_ACCEPT_NETBINDCHANGE = 0x00000010 +SERVICE_ACCEPT_HARDWAREPROFILECHANGE = 0x00000020 +SERVICE_ACCEPT_POWEREVENT = 0x00000040 +SERVICE_ACCEPT_SESSIONCHANGE = 0x00000080 +SERVICE_ACCEPT_PRESHUTDOWN = 0x00000100 +SERVICE_ACCEPT_TIMECHANGE = 0x00000200 +SERVICE_ACCEPT_TRIGGEREVENT = 0x00000400 +SERVICE_ACCEPT_USERMODEREBOOT = 0x00000800 + +# enum SC_ACTION_TYPE +SC_ACTION_NONE = 0 +SC_ACTION_RESTART = 1 +SC_ACTION_REBOOT = 2 +SC_ACTION_RUN_COMMAND = 3 + +# QueryServiceConfig2 +SERVICE_CONFIG_DESCRIPTION = 1 +SERVICE_CONFIG_FAILURE_ACTIONS = 2 + +# typedef struct _SERVICE_STATUS { +# DWORD dwServiceType; +# DWORD dwCurrentState; +# DWORD dwControlsAccepted; +# DWORD dwWin32ExitCode; +# DWORD dwServiceSpecificExitCode; +# DWORD dwCheckPoint; +# DWORD dwWaitHint; +# } SERVICE_STATUS, *LPSERVICE_STATUS; +class SERVICE_STATUS(Structure): + _fields_ = [ + ("dwServiceType", DWORD), + ("dwCurrentState", DWORD), + ("dwControlsAccepted", DWORD), + ("dwWin32ExitCode", DWORD), + ("dwServiceSpecificExitCode", DWORD), + ("dwCheckPoint", DWORD), + ("dwWaitHint", DWORD), + ] +LPSERVICE_STATUS = POINTER(SERVICE_STATUS) + +# typedef struct _SERVICE_STATUS_PROCESS { +# DWORD dwServiceType; +# DWORD dwCurrentState; +# DWORD dwControlsAccepted; +# DWORD dwWin32ExitCode; +# DWORD dwServiceSpecificExitCode; +# DWORD dwCheckPoint; +# DWORD dwWaitHint; +# DWORD dwProcessId; +# DWORD dwServiceFlags; +# } SERVICE_STATUS_PROCESS, *LPSERVICE_STATUS_PROCESS; +class SERVICE_STATUS_PROCESS(Structure): + _fields_ = SERVICE_STATUS._fields_ + [ + ("dwProcessId", DWORD), + ("dwServiceFlags", DWORD), + ] +LPSERVICE_STATUS_PROCESS = POINTER(SERVICE_STATUS_PROCESS) + +# typedef struct _ENUM_SERVICE_STATUS { +# LPTSTR lpServiceName; +# LPTSTR lpDisplayName; +# SERVICE_STATUS ServiceStatus; +# } ENUM_SERVICE_STATUS, *LPENUM_SERVICE_STATUS; +class ENUM_SERVICE_STATUSA(Structure): + _fields_ = [ + ("lpServiceName", LPSTR), + ("lpDisplayName", LPSTR), + ("ServiceStatus", SERVICE_STATUS), + ] +class ENUM_SERVICE_STATUSW(Structure): + _fields_ = [ + ("lpServiceName", LPWSTR), + ("lpDisplayName", LPWSTR), + ("ServiceStatus", SERVICE_STATUS), + ] +LPENUM_SERVICE_STATUSA = POINTER(ENUM_SERVICE_STATUSA) +LPENUM_SERVICE_STATUSW = POINTER(ENUM_SERVICE_STATUSW) + +# typedef struct _ENUM_SERVICE_STATUS_PROCESS { +# LPTSTR lpServiceName; +# LPTSTR lpDisplayName; +# SERVICE_STATUS_PROCESS ServiceStatusProcess; +# } ENUM_SERVICE_STATUS_PROCESS, *LPENUM_SERVICE_STATUS_PROCESS; +class ENUM_SERVICE_STATUS_PROCESSA(Structure): + _fields_ = [ + ("lpServiceName", LPSTR), + ("lpDisplayName", LPSTR), + ("ServiceStatusProcess", SERVICE_STATUS_PROCESS), + ] +class ENUM_SERVICE_STATUS_PROCESSW(Structure): + _fields_ = [ + ("lpServiceName", LPWSTR), + ("lpDisplayName", LPWSTR), + ("ServiceStatusProcess", SERVICE_STATUS_PROCESS), + ] +LPENUM_SERVICE_STATUS_PROCESSA = POINTER(ENUM_SERVICE_STATUS_PROCESSA) +LPENUM_SERVICE_STATUS_PROCESSW = POINTER(ENUM_SERVICE_STATUS_PROCESSW) + +class ServiceStatus(object): + """ + Wrapper for the L{SERVICE_STATUS} structure. + """ + + def __init__(self, raw): + """ + @type raw: L{SERVICE_STATUS} + @param raw: Raw structure for this service status data. + """ + self.ServiceType = raw.dwServiceType + self.CurrentState = raw.dwCurrentState + self.ControlsAccepted = raw.dwControlsAccepted + self.Win32ExitCode = raw.dwWin32ExitCode + self.ServiceSpecificExitCode = raw.dwServiceSpecificExitCode + self.CheckPoint = raw.dwCheckPoint + self.WaitHint = raw.dwWaitHint + +class ServiceStatusProcess(object): + """ + Wrapper for the L{SERVICE_STATUS_PROCESS} structure. + """ + + def __init__(self, raw): + """ + @type raw: L{SERVICE_STATUS_PROCESS} + @param raw: Raw structure for this service status data. + """ + self.ServiceType = raw.dwServiceType + self.CurrentState = raw.dwCurrentState + self.ControlsAccepted = raw.dwControlsAccepted + self.Win32ExitCode = raw.dwWin32ExitCode + self.ServiceSpecificExitCode = raw.dwServiceSpecificExitCode + self.CheckPoint = raw.dwCheckPoint + self.WaitHint = raw.dwWaitHint + self.ProcessId = raw.dwProcessId + self.ServiceFlags = raw.dwServiceFlags + +class ServiceStatusEntry(object): + """ + Service status entry returned by L{EnumServicesStatus}. + """ + + def __init__(self, raw): + """ + @type raw: L{ENUM_SERVICE_STATUSA} or L{ENUM_SERVICE_STATUSW} + @param raw: Raw structure for this service status entry. + """ + self.ServiceName = raw.lpServiceName + self.DisplayName = raw.lpDisplayName + self.ServiceType = raw.ServiceStatus.dwServiceType + self.CurrentState = raw.ServiceStatus.dwCurrentState + self.ControlsAccepted = raw.ServiceStatus.dwControlsAccepted + self.Win32ExitCode = raw.ServiceStatus.dwWin32ExitCode + self.ServiceSpecificExitCode = raw.ServiceStatus.dwServiceSpecificExitCode + self.CheckPoint = raw.ServiceStatus.dwCheckPoint + self.WaitHint = raw.ServiceStatus.dwWaitHint + + def __str__(self): + output = [] + if self.ServiceType & SERVICE_INTERACTIVE_PROCESS: + output.append("Interactive service") + else: + output.append("Service") + if self.DisplayName: + output.append("\"%s\" (%s)" % (self.DisplayName, self.ServiceName)) + else: + output.append("\"%s\"" % self.ServiceName) + if self.CurrentState == SERVICE_CONTINUE_PENDING: + output.append("is about to continue.") + elif self.CurrentState == SERVICE_PAUSE_PENDING: + output.append("is pausing.") + elif self.CurrentState == SERVICE_PAUSED: + output.append("is paused.") + elif self.CurrentState == SERVICE_RUNNING: + output.append("is running.") + elif self.CurrentState == SERVICE_START_PENDING: + output.append("is starting.") + elif self.CurrentState == SERVICE_STOP_PENDING: + output.append("is stopping.") + elif self.CurrentState == SERVICE_STOPPED: + output.append("is stopped.") + return " ".join(output) + +class ServiceStatusProcessEntry(object): + """ + Service status entry returned by L{EnumServicesStatusEx}. + """ + + def __init__(self, raw): + """ + @type raw: L{ENUM_SERVICE_STATUS_PROCESSA} or L{ENUM_SERVICE_STATUS_PROCESSW} + @param raw: Raw structure for this service status entry. + """ + self.ServiceName = raw.lpServiceName + self.DisplayName = raw.lpDisplayName + self.ServiceType = raw.ServiceStatusProcess.dwServiceType + self.CurrentState = raw.ServiceStatusProcess.dwCurrentState + self.ControlsAccepted = raw.ServiceStatusProcess.dwControlsAccepted + self.Win32ExitCode = raw.ServiceStatusProcess.dwWin32ExitCode + self.ServiceSpecificExitCode = raw.ServiceStatusProcess.dwServiceSpecificExitCode + self.CheckPoint = raw.ServiceStatusProcess.dwCheckPoint + self.WaitHint = raw.ServiceStatusProcess.dwWaitHint + self.ProcessId = raw.ServiceStatusProcess.dwProcessId + self.ServiceFlags = raw.ServiceStatusProcess.dwServiceFlags + + def __str__(self): + output = [] + if self.ServiceType & SERVICE_INTERACTIVE_PROCESS: + output.append("Interactive service ") + else: + output.append("Service ") + if self.DisplayName: + output.append("\"%s\" (%s)" % (self.DisplayName, self.ServiceName)) + else: + output.append("\"%s\"" % self.ServiceName) + if self.CurrentState == SERVICE_CONTINUE_PENDING: + output.append(" is about to continue") + elif self.CurrentState == SERVICE_PAUSE_PENDING: + output.append(" is pausing") + elif self.CurrentState == SERVICE_PAUSED: + output.append(" is paused") + elif self.CurrentState == SERVICE_RUNNING: + output.append(" is running") + elif self.CurrentState == SERVICE_START_PENDING: + output.append(" is starting") + elif self.CurrentState == SERVICE_STOP_PENDING: + output.append(" is stopping") + elif self.CurrentState == SERVICE_STOPPED: + output.append(" is stopped") + if self.ProcessId: + output.append(" at process %d" % self.ProcessId) + output.append(".") + return "".join(output) + +#--- Handle wrappers ---------------------------------------------------------- + +# XXX maybe add functions related to the tokens here? +class TokenHandle (Handle): + """ + Access token handle. + + @see: L{Handle} + """ + pass + +class RegistryKeyHandle (UserModeHandle): + """ + Registry key handle. + """ + + _TYPE = HKEY + + def _close(self): + RegCloseKey(self.value) + +class SaferLevelHandle (UserModeHandle): + """ + Safer level handle. + + @see: U{http://msdn.microsoft.com/en-us/library/ms722425(VS.85).aspx} + """ + + _TYPE = SAFER_LEVEL_HANDLE + + def _close(self): + SaferCloseLevel(self.value) + +class ServiceHandle (UserModeHandle): + """ + Service handle. + + @see: U{http://msdn.microsoft.com/en-us/library/windows/desktop/ms684330(v=vs.85).aspx} + """ + + _TYPE = SC_HANDLE + + def _close(self): + CloseServiceHandle(self.value) + +class ServiceControlManagerHandle (UserModeHandle): + """ + Service Control Manager (SCM) handle. + + @see: U{http://msdn.microsoft.com/en-us/library/windows/desktop/ms684323(v=vs.85).aspx} + """ + + _TYPE = SC_HANDLE + + def _close(self): + CloseServiceHandle(self.value) + +#--- advapi32.dll ------------------------------------------------------------- + +# BOOL WINAPI GetUserName( +# __out LPTSTR lpBuffer, +# __inout LPDWORD lpnSize +# ); +def GetUserNameA(): + _GetUserNameA = windll.advapi32.GetUserNameA + _GetUserNameA.argtypes = [LPSTR, LPDWORD] + _GetUserNameA.restype = bool + + nSize = DWORD(0) + _GetUserNameA(None, byref(nSize)) + error = GetLastError() + if error != ERROR_INSUFFICIENT_BUFFER: + raise ctypes.WinError(error) + lpBuffer = ctypes.create_string_buffer('', nSize.value + 1) + success = _GetUserNameA(lpBuffer, byref(nSize)) + if not success: + raise ctypes.WinError() + return lpBuffer.value + +def GetUserNameW(): + _GetUserNameW = windll.advapi32.GetUserNameW + _GetUserNameW.argtypes = [LPWSTR, LPDWORD] + _GetUserNameW.restype = bool + + nSize = DWORD(0) + _GetUserNameW(None, byref(nSize)) + error = GetLastError() + if error != ERROR_INSUFFICIENT_BUFFER: + raise ctypes.WinError(error) + lpBuffer = ctypes.create_unicode_buffer(u'', nSize.value + 1) + success = _GetUserNameW(lpBuffer, byref(nSize)) + if not success: + raise ctypes.WinError() + return lpBuffer.value + +GetUserName = DefaultStringType(GetUserNameA, GetUserNameW) + +# BOOL WINAPI LookupAccountName( +# __in_opt LPCTSTR lpSystemName, +# __in LPCTSTR lpAccountName, +# __out_opt PSID Sid, +# __inout LPDWORD cbSid, +# __out_opt LPTSTR ReferencedDomainName, +# __inout LPDWORD cchReferencedDomainName, +# __out PSID_NAME_USE peUse +# ); + +# XXX TO DO + +# BOOL WINAPI LookupAccountSid( +# __in_opt LPCTSTR lpSystemName, +# __in PSID lpSid, +# __out_opt LPTSTR lpName, +# __inout LPDWORD cchName, +# __out_opt LPTSTR lpReferencedDomainName, +# __inout LPDWORD cchReferencedDomainName, +# __out PSID_NAME_USE peUse +# ); +def LookupAccountSidA(lpSystemName, lpSid): + _LookupAccountSidA = windll.advapi32.LookupAccountSidA + _LookupAccountSidA.argtypes = [LPSTR, PSID, LPSTR, LPDWORD, LPSTR, LPDWORD, LPDWORD] + _LookupAccountSidA.restype = bool + + cchName = DWORD(0) + cchReferencedDomainName = DWORD(0) + peUse = DWORD(0) + _LookupAccountSidA(lpSystemName, lpSid, None, byref(cchName), None, byref(cchReferencedDomainName), byref(peUse)) + error = GetLastError() + if error != ERROR_INSUFFICIENT_BUFFER: + raise ctypes.WinError(error) + lpName = ctypes.create_string_buffer('', cchName + 1) + lpReferencedDomainName = ctypes.create_string_buffer('', cchReferencedDomainName + 1) + success = _LookupAccountSidA(lpSystemName, lpSid, lpName, byref(cchName), lpReferencedDomainName, byref(cchReferencedDomainName), byref(peUse)) + if not success: + raise ctypes.WinError() + return lpName.value, lpReferencedDomainName.value, peUse.value + +def LookupAccountSidW(lpSystemName, lpSid): + _LookupAccountSidW = windll.advapi32.LookupAccountSidA + _LookupAccountSidW.argtypes = [LPSTR, PSID, LPWSTR, LPDWORD, LPWSTR, LPDWORD, LPDWORD] + _LookupAccountSidW.restype = bool + + cchName = DWORD(0) + cchReferencedDomainName = DWORD(0) + peUse = DWORD(0) + _LookupAccountSidW(lpSystemName, lpSid, None, byref(cchName), None, byref(cchReferencedDomainName), byref(peUse)) + error = GetLastError() + if error != ERROR_INSUFFICIENT_BUFFER: + raise ctypes.WinError(error) + lpName = ctypes.create_unicode_buffer(u'', cchName + 1) + lpReferencedDomainName = ctypes.create_unicode_buffer(u'', cchReferencedDomainName + 1) + success = _LookupAccountSidW(lpSystemName, lpSid, lpName, byref(cchName), lpReferencedDomainName, byref(cchReferencedDomainName), byref(peUse)) + if not success: + raise ctypes.WinError() + return lpName.value, lpReferencedDomainName.value, peUse.value + +LookupAccountSid = GuessStringType(LookupAccountSidA, LookupAccountSidW) + +# BOOL ConvertSidToStringSid( +# __in PSID Sid, +# __out LPTSTR *StringSid +# ); +def ConvertSidToStringSidA(Sid): + _ConvertSidToStringSidA = windll.advapi32.ConvertSidToStringSidA + _ConvertSidToStringSidA.argtypes = [PSID, LPSTR] + _ConvertSidToStringSidA.restype = bool + _ConvertSidToStringSidA.errcheck = RaiseIfZero + + pStringSid = LPSTR() + _ConvertSidToStringSidA(Sid, byref(pStringSid)) + try: + StringSid = pStringSid.value + finally: + LocalFree(pStringSid) + return StringSid + +def ConvertSidToStringSidW(Sid): + _ConvertSidToStringSidW = windll.advapi32.ConvertSidToStringSidW + _ConvertSidToStringSidW.argtypes = [PSID, LPWSTR] + _ConvertSidToStringSidW.restype = bool + _ConvertSidToStringSidW.errcheck = RaiseIfZero + + pStringSid = LPWSTR() + _ConvertSidToStringSidW(Sid, byref(pStringSid)) + try: + StringSid = pStringSid.value + finally: + LocalFree(pStringSid) + return StringSid + +ConvertSidToStringSid = DefaultStringType(ConvertSidToStringSidA, ConvertSidToStringSidW) + +# BOOL WINAPI ConvertStringSidToSid( +# __in LPCTSTR StringSid, +# __out PSID *Sid +# ); +def ConvertStringSidToSidA(StringSid): + _ConvertStringSidToSidA = windll.advapi32.ConvertStringSidToSidA + _ConvertStringSidToSidA.argtypes = [LPSTR, PVOID] + _ConvertStringSidToSidA.restype = bool + _ConvertStringSidToSidA.errcheck = RaiseIfZero + + Sid = PVOID() + _ConvertStringSidToSidA(StringSid, ctypes.pointer(Sid)) + return Sid.value + +def ConvertStringSidToSidW(StringSid): + _ConvertStringSidToSidW = windll.advapi32.ConvertStringSidToSidW + _ConvertStringSidToSidW.argtypes = [LPWSTR, PVOID] + _ConvertStringSidToSidW.restype = bool + _ConvertStringSidToSidW.errcheck = RaiseIfZero + + Sid = PVOID() + _ConvertStringSidToSidW(StringSid, ctypes.pointer(Sid)) + return Sid.value + +ConvertStringSidToSid = GuessStringType(ConvertStringSidToSidA, ConvertStringSidToSidW) + +# BOOL WINAPI IsValidSid( +# __in PSID pSid +# ); +def IsValidSid(pSid): + _IsValidSid = windll.advapi32.IsValidSid + _IsValidSid.argtypes = [PSID] + _IsValidSid.restype = bool + return _IsValidSid(pSid) + +# BOOL WINAPI EqualSid( +# __in PSID pSid1, +# __in PSID pSid2 +# ); +def EqualSid(pSid1, pSid2): + _EqualSid = windll.advapi32.EqualSid + _EqualSid.argtypes = [PSID, PSID] + _EqualSid.restype = bool + return _EqualSid(pSid1, pSid2) + +# DWORD WINAPI GetLengthSid( +# __in PSID pSid +# ); +def GetLengthSid(pSid): + _GetLengthSid = windll.advapi32.GetLengthSid + _GetLengthSid.argtypes = [PSID] + _GetLengthSid.restype = DWORD + return _GetLengthSid(pSid) + +# BOOL WINAPI CopySid( +# __in DWORD nDestinationSidLength, +# __out PSID pDestinationSid, +# __in PSID pSourceSid +# ); +def CopySid(pSourceSid): + _CopySid = windll.advapi32.CopySid + _CopySid.argtypes = [DWORD, PVOID, PSID] + _CopySid.restype = bool + _CopySid.errcheck = RaiseIfZero + + nDestinationSidLength = GetLengthSid(pSourceSid) + DestinationSid = ctypes.create_string_buffer('', nDestinationSidLength) + pDestinationSid = ctypes.cast(ctypes.pointer(DestinationSid), PVOID) + _CopySid(nDestinationSidLength, pDestinationSid, pSourceSid) + return ctypes.cast(pDestinationSid, PSID) + +# PVOID WINAPI FreeSid( +# __in PSID pSid +# ); +def FreeSid(pSid): + _FreeSid = windll.advapi32.FreeSid + _FreeSid.argtypes = [PSID] + _FreeSid.restype = PSID + _FreeSid.errcheck = RaiseIfNotZero + _FreeSid(pSid) + +# BOOL WINAPI OpenProcessToken( +# __in HANDLE ProcessHandle, +# __in DWORD DesiredAccess, +# __out PHANDLE TokenHandle +# ); +def OpenProcessToken(ProcessHandle, DesiredAccess = TOKEN_ALL_ACCESS): + _OpenProcessToken = windll.advapi32.OpenProcessToken + _OpenProcessToken.argtypes = [HANDLE, DWORD, PHANDLE] + _OpenProcessToken.restype = bool + _OpenProcessToken.errcheck = RaiseIfZero + + NewTokenHandle = HANDLE(INVALID_HANDLE_VALUE) + _OpenProcessToken(ProcessHandle, DesiredAccess, byref(NewTokenHandle)) + return TokenHandle(NewTokenHandle.value) + +# BOOL WINAPI OpenThreadToken( +# __in HANDLE ThreadHandle, +# __in DWORD DesiredAccess, +# __in BOOL OpenAsSelf, +# __out PHANDLE TokenHandle +# ); +def OpenThreadToken(ThreadHandle, DesiredAccess, OpenAsSelf = True): + _OpenThreadToken = windll.advapi32.OpenThreadToken + _OpenThreadToken.argtypes = [HANDLE, DWORD, BOOL, PHANDLE] + _OpenThreadToken.restype = bool + _OpenThreadToken.errcheck = RaiseIfZero + + NewTokenHandle = HANDLE(INVALID_HANDLE_VALUE) + _OpenThreadToken(ThreadHandle, DesiredAccess, OpenAsSelf, byref(NewTokenHandle)) + return TokenHandle(NewTokenHandle.value) + +# BOOL WINAPI DuplicateToken( +# _In_ HANDLE ExistingTokenHandle, +# _In_ SECURITY_IMPERSONATION_LEVEL ImpersonationLevel, +# _Out_ PHANDLE DuplicateTokenHandle +# ); +def DuplicateToken(ExistingTokenHandle, ImpersonationLevel = SecurityImpersonation): + _DuplicateToken = windll.advapi32.DuplicateToken + _DuplicateToken.argtypes = [HANDLE, SECURITY_IMPERSONATION_LEVEL, PHANDLE] + _DuplicateToken.restype = bool + _DuplicateToken.errcheck = RaiseIfZero + + DuplicateTokenHandle = HANDLE(INVALID_HANDLE_VALUE) + _DuplicateToken(ExistingTokenHandle, ImpersonationLevel, byref(DuplicateTokenHandle)) + return TokenHandle(DuplicateTokenHandle.value) + +# BOOL WINAPI DuplicateTokenEx( +# _In_ HANDLE hExistingToken, +# _In_ DWORD dwDesiredAccess, +# _In_opt_ LPSECURITY_ATTRIBUTES lpTokenAttributes, +# _In_ SECURITY_IMPERSONATION_LEVEL ImpersonationLevel, +# _In_ TOKEN_TYPE TokenType, +# _Out_ PHANDLE phNewToken +# ); +def DuplicateTokenEx(hExistingToken, dwDesiredAccess = TOKEN_ALL_ACCESS, lpTokenAttributes = None, ImpersonationLevel = SecurityImpersonation, TokenType = TokenPrimary): + _DuplicateTokenEx = windll.advapi32.DuplicateTokenEx + _DuplicateTokenEx.argtypes = [HANDLE, DWORD, LPSECURITY_ATTRIBUTES, SECURITY_IMPERSONATION_LEVEL, TOKEN_TYPE, PHANDLE] + _DuplicateTokenEx.restype = bool + _DuplicateTokenEx.errcheck = RaiseIfZero + + DuplicateTokenHandle = HANDLE(INVALID_HANDLE_VALUE) + _DuplicateTokenEx(hExistingToken, dwDesiredAccess, lpTokenAttributes, ImpersonationLevel, TokenType, byref(DuplicateTokenHandle)) + return TokenHandle(DuplicateTokenHandle.value) + +# BOOL WINAPI IsTokenRestricted( +# __in HANDLE TokenHandle +# ); +def IsTokenRestricted(hTokenHandle): + _IsTokenRestricted = windll.advapi32.IsTokenRestricted + _IsTokenRestricted.argtypes = [HANDLE] + _IsTokenRestricted.restype = bool + _IsTokenRestricted.errcheck = RaiseIfNotErrorSuccess + + SetLastError(ERROR_SUCCESS) + return _IsTokenRestricted(hTokenHandle) + +# BOOL WINAPI LookupPrivilegeValue( +# __in_opt LPCTSTR lpSystemName, +# __in LPCTSTR lpName, +# __out PLUID lpLuid +# ); +def LookupPrivilegeValueA(lpSystemName, lpName): + _LookupPrivilegeValueA = windll.advapi32.LookupPrivilegeValueA + _LookupPrivilegeValueA.argtypes = [LPSTR, LPSTR, PLUID] + _LookupPrivilegeValueA.restype = bool + _LookupPrivilegeValueA.errcheck = RaiseIfZero + + lpLuid = LUID() + if not lpSystemName: + lpSystemName = None + _LookupPrivilegeValueA(lpSystemName, lpName, byref(lpLuid)) + return lpLuid + +def LookupPrivilegeValueW(lpSystemName, lpName): + _LookupPrivilegeValueW = windll.advapi32.LookupPrivilegeValueW + _LookupPrivilegeValueW.argtypes = [LPWSTR, LPWSTR, PLUID] + _LookupPrivilegeValueW.restype = bool + _LookupPrivilegeValueW.errcheck = RaiseIfZero + + lpLuid = LUID() + if not lpSystemName: + lpSystemName = None + _LookupPrivilegeValueW(lpSystemName, lpName, byref(lpLuid)) + return lpLuid + +LookupPrivilegeValue = GuessStringType(LookupPrivilegeValueA, LookupPrivilegeValueW) + +# BOOL WINAPI LookupPrivilegeName( +# __in_opt LPCTSTR lpSystemName, +# __in PLUID lpLuid, +# __out_opt LPTSTR lpName, +# __inout LPDWORD cchName +# ); + +def LookupPrivilegeNameA(lpSystemName, lpLuid): + _LookupPrivilegeNameA = windll.advapi32.LookupPrivilegeNameA + _LookupPrivilegeNameA.argtypes = [LPSTR, PLUID, LPSTR, LPDWORD] + _LookupPrivilegeNameA.restype = bool + _LookupPrivilegeNameA.errcheck = RaiseIfZero + + cchName = DWORD(0) + _LookupPrivilegeNameA(lpSystemName, byref(lpLuid), NULL, byref(cchName)) + lpName = ctypes.create_string_buffer("", cchName.value) + _LookupPrivilegeNameA(lpSystemName, byref(lpLuid), byref(lpName), byref(cchName)) + return lpName.value + +def LookupPrivilegeNameW(lpSystemName, lpLuid): + _LookupPrivilegeNameW = windll.advapi32.LookupPrivilegeNameW + _LookupPrivilegeNameW.argtypes = [LPWSTR, PLUID, LPWSTR, LPDWORD] + _LookupPrivilegeNameW.restype = bool + _LookupPrivilegeNameW.errcheck = RaiseIfZero + + cchName = DWORD(0) + _LookupPrivilegeNameW(lpSystemName, byref(lpLuid), NULL, byref(cchName)) + lpName = ctypes.create_unicode_buffer(u"", cchName.value) + _LookupPrivilegeNameW(lpSystemName, byref(lpLuid), byref(lpName), byref(cchName)) + return lpName.value + +LookupPrivilegeName = GuessStringType(LookupPrivilegeNameA, LookupPrivilegeNameW) + +# BOOL WINAPI AdjustTokenPrivileges( +# __in HANDLE TokenHandle, +# __in BOOL DisableAllPrivileges, +# __in_opt PTOKEN_PRIVILEGES NewState, +# __in DWORD BufferLength, +# __out_opt PTOKEN_PRIVILEGES PreviousState, +# __out_opt PDWORD ReturnLength +# ); +def AdjustTokenPrivileges(TokenHandle, NewState = ()): + _AdjustTokenPrivileges = windll.advapi32.AdjustTokenPrivileges + _AdjustTokenPrivileges.argtypes = [HANDLE, BOOL, LPVOID, DWORD, LPVOID, LPVOID] + _AdjustTokenPrivileges.restype = bool + _AdjustTokenPrivileges.errcheck = RaiseIfZero + # + # I don't know how to allocate variable sized structures in ctypes :( + # so this hack will work by using always TOKEN_PRIVILEGES of one element + # and calling the API many times. This also means the PreviousState + # parameter won't be supported yet as it's too much hassle. In a future + # version I look forward to implementing this function correctly. + # + if not NewState: + _AdjustTokenPrivileges(TokenHandle, TRUE, NULL, 0, NULL, NULL) + else: + success = True + for (privilege, enabled) in NewState: + if not isinstance(privilege, LUID): + privilege = LookupPrivilegeValue(NULL, privilege) + if enabled == True: + flags = SE_PRIVILEGE_ENABLED + elif enabled == False: + flags = SE_PRIVILEGE_REMOVED + elif enabled == None: + flags = 0 + else: + flags = enabled + laa = LUID_AND_ATTRIBUTES(privilege, flags) + tp = TOKEN_PRIVILEGES(1, laa) + _AdjustTokenPrivileges(TokenHandle, FALSE, byref(tp), sizeof(tp), NULL, NULL) + +# BOOL WINAPI GetTokenInformation( +# __in HANDLE TokenHandle, +# __in TOKEN_INFORMATION_CLASS TokenInformationClass, +# __out_opt LPVOID TokenInformation, +# __in DWORD TokenInformationLength, +# __out PDWORD ReturnLength +# ); +def GetTokenInformation(hTokenHandle, TokenInformationClass): + if TokenInformationClass <= 0 or TokenInformationClass > MaxTokenInfoClass: + raise ValueError("Invalid value for TokenInformationClass (%i)" % TokenInformationClass) + + # User SID. + if TokenInformationClass == TokenUser: + TokenInformation = TOKEN_USER() + _internal_GetTokenInformation(hTokenHandle, TokenInformationClass, TokenInformation) + return TokenInformation.User.Sid.value + + # Owner SID. + if TokenInformationClass == TokenOwner: + TokenInformation = TOKEN_OWNER() + _internal_GetTokenInformation(hTokenHandle, TokenInformationClass, TokenInformation) + return TokenInformation.Owner.value + + # Primary group SID. + if TokenInformationClass == TokenOwner: + TokenInformation = TOKEN_PRIMARY_GROUP() + _internal_GetTokenInformation(hTokenHandle, TokenInformationClass, TokenInformation) + return TokenInformation.PrimaryGroup.value + + # App container SID. + if TokenInformationClass == TokenAppContainerSid: + TokenInformation = TOKEN_APPCONTAINER_INFORMATION() + _internal_GetTokenInformation(hTokenHandle, TokenInformationClass, TokenInformation) + return TokenInformation.TokenAppContainer.value + + # Integrity level SID. + if TokenInformationClass == TokenIntegrityLevel: + TokenInformation = TOKEN_MANDATORY_LABEL() + _internal_GetTokenInformation(hTokenHandle, TokenInformationClass, TokenInformation) + return TokenInformation.Label.Sid.value, TokenInformation.Label.Attributes + + # Logon session LUID. + if TokenInformationClass == TokenOrigin: + TokenInformation = TOKEN_ORIGIN() + _internal_GetTokenInformation(hTokenHandle, TokenInformationClass, TokenInformation) + return TokenInformation.OriginatingLogonSession + + # Primary or impersonation token. + if TokenInformationClass == TokenType: + TokenInformation = TOKEN_TYPE(0) + _internal_GetTokenInformation(hTokenHandle, TokenInformationClass, TokenInformation) + return TokenInformation.value + + # Elevated token. + if TokenInformationClass == TokenElevation: + TokenInformation = TOKEN_ELEVATION(0) + _internal_GetTokenInformation(hTokenHandle, TokenInformationClass, TokenInformation) + return TokenInformation.value + + # Security impersonation level. + if TokenInformationClass == TokenElevation: + TokenInformation = SECURITY_IMPERSONATION_LEVEL(0) + _internal_GetTokenInformation(hTokenHandle, TokenInformationClass, TokenInformation) + return TokenInformation.value + + # Session ID and other DWORD values. + if TokenInformationClass in (TokenSessionId, TokenAppContainerNumber): + TokenInformation = DWORD(0) + _internal_GetTokenInformation(hTokenHandle, TokenInformationClass, TokenInformation) + return TokenInformation.value + + # Various boolean flags. + if TokenInformationClass in (TokenSandBoxInert, TokenHasRestrictions, TokenUIAccess, + TokenVirtualizationAllowed, TokenVirtualizationEnabled): + TokenInformation = DWORD(0) + _internal_GetTokenInformation(hTokenHandle, TokenInformationClass, TokenInformation) + return bool(TokenInformation.value) + + # Linked token. + if TokenInformationClass == TokenLinkedToken: + TokenInformation = TOKEN_LINKED_TOKEN(0) + _internal_GetTokenInformation(hTokenHandle, TokenInformationClass, TokenInformation) + return TokenHandle(TokenInformation.LinkedToken.value, bOwnership = True) + + # Token statistics. + if TokenInformationClass == TokenStatistics: + TokenInformation = TOKEN_STATISTICS() + _internal_GetTokenInformation(hTokenHandle, TokenInformationClass, TokenInformation) + return TokenInformation # TODO add a class wrapper? + + # Currently unsupported flags. + raise NotImplementedError("TokenInformationClass(%i) not yet supported!" % TokenInformationClass) + +def _internal_GetTokenInformation(hTokenHandle, TokenInformationClass, TokenInformation): + _GetTokenInformation = windll.advapi32.GetTokenInformation + _GetTokenInformation.argtypes = [HANDLE, TOKEN_INFORMATION_CLASS, LPVOID, DWORD, PDWORD] + _GetTokenInformation.restype = bool + _GetTokenInformation.errcheck = RaiseIfZero + + ReturnLength = DWORD(0) + TokenInformationLength = SIZEOF(TokenInformation) + _GetTokenInformation(hTokenHandle, TokenInformationClass, byref(TokenInformation), TokenInformationLength, byref(ReturnLength)) + if ReturnLength.value != TokenInformationLength: + raise ctypes.WinError(ERROR_INSUFFICIENT_BUFFER) + return TokenInformation + +# BOOL WINAPI SetTokenInformation( +# __in HANDLE TokenHandle, +# __in TOKEN_INFORMATION_CLASS TokenInformationClass, +# __in LPVOID TokenInformation, +# __in DWORD TokenInformationLength +# ); + +# XXX TODO + +# BOOL WINAPI CreateProcessWithLogonW( +# __in LPCWSTR lpUsername, +# __in_opt LPCWSTR lpDomain, +# __in LPCWSTR lpPassword, +# __in DWORD dwLogonFlags, +# __in_opt LPCWSTR lpApplicationName, +# __inout_opt LPWSTR lpCommandLine, +# __in DWORD dwCreationFlags, +# __in_opt LPVOID lpEnvironment, +# __in_opt LPCWSTR lpCurrentDirectory, +# __in LPSTARTUPINFOW lpStartupInfo, +# __out LPPROCESS_INFORMATION lpProcessInfo +# ); +def CreateProcessWithLogonW(lpUsername = None, lpDomain = None, lpPassword = None, dwLogonFlags = 0, lpApplicationName = None, lpCommandLine = None, dwCreationFlags = 0, lpEnvironment = None, lpCurrentDirectory = None, lpStartupInfo = None): + _CreateProcessWithLogonW = windll.advapi32.CreateProcessWithLogonW + _CreateProcessWithLogonW.argtypes = [LPWSTR, LPWSTR, LPWSTR, DWORD, LPWSTR, LPWSTR, DWORD, LPVOID, LPWSTR, LPVOID, LPPROCESS_INFORMATION] + _CreateProcessWithLogonW.restype = bool + _CreateProcessWithLogonW.errcheck = RaiseIfZero + + if not lpUsername: + lpUsername = None + if not lpDomain: + lpDomain = None + if not lpPassword: + lpPassword = None + if not lpApplicationName: + lpApplicationName = None + if not lpCommandLine: + lpCommandLine = None + else: + lpCommandLine = ctypes.create_unicode_buffer(lpCommandLine, max(MAX_PATH, len(lpCommandLine))) + if not lpEnvironment: + lpEnvironment = None + else: + lpEnvironment = ctypes.create_unicode_buffer(lpEnvironment) + if not lpCurrentDirectory: + lpCurrentDirectory = None + if not lpStartupInfo: + lpStartupInfo = STARTUPINFOW() + lpStartupInfo.cb = sizeof(STARTUPINFOW) + lpStartupInfo.lpReserved = 0 + lpStartupInfo.lpDesktop = 0 + lpStartupInfo.lpTitle = 0 + lpStartupInfo.dwFlags = 0 + lpStartupInfo.cbReserved2 = 0 + lpStartupInfo.lpReserved2 = 0 + lpProcessInformation = PROCESS_INFORMATION() + lpProcessInformation.hProcess = INVALID_HANDLE_VALUE + lpProcessInformation.hThread = INVALID_HANDLE_VALUE + lpProcessInformation.dwProcessId = 0 + lpProcessInformation.dwThreadId = 0 + _CreateProcessWithLogonW(lpUsername, lpDomain, lpPassword, dwLogonFlags, lpApplicationName, lpCommandLine, dwCreationFlags, lpEnvironment, lpCurrentDirectory, byref(lpStartupInfo), byref(lpProcessInformation)) + return ProcessInformation(lpProcessInformation) + +CreateProcessWithLogonA = MakeANSIVersion(CreateProcessWithLogonW) +CreateProcessWithLogon = DefaultStringType(CreateProcessWithLogonA, CreateProcessWithLogonW) + +# BOOL WINAPI CreateProcessWithTokenW( +# __in HANDLE hToken, +# __in DWORD dwLogonFlags, +# __in_opt LPCWSTR lpApplicationName, +# __inout_opt LPWSTR lpCommandLine, +# __in DWORD dwCreationFlags, +# __in_opt LPVOID lpEnvironment, +# __in_opt LPCWSTR lpCurrentDirectory, +# __in LPSTARTUPINFOW lpStartupInfo, +# __out LPPROCESS_INFORMATION lpProcessInfo +# ); +def CreateProcessWithTokenW(hToken = None, dwLogonFlags = 0, lpApplicationName = None, lpCommandLine = None, dwCreationFlags = 0, lpEnvironment = None, lpCurrentDirectory = None, lpStartupInfo = None): + _CreateProcessWithTokenW = windll.advapi32.CreateProcessWithTokenW + _CreateProcessWithTokenW.argtypes = [HANDLE, DWORD, LPWSTR, LPWSTR, DWORD, LPVOID, LPWSTR, LPVOID, LPPROCESS_INFORMATION] + _CreateProcessWithTokenW.restype = bool + _CreateProcessWithTokenW.errcheck = RaiseIfZero + + if not hToken: + hToken = None + if not lpApplicationName: + lpApplicationName = None + if not lpCommandLine: + lpCommandLine = None + else: + lpCommandLine = ctypes.create_unicode_buffer(lpCommandLine, max(MAX_PATH, len(lpCommandLine))) + if not lpEnvironment: + lpEnvironment = None + else: + lpEnvironment = ctypes.create_unicode_buffer(lpEnvironment) + if not lpCurrentDirectory: + lpCurrentDirectory = None + if not lpStartupInfo: + lpStartupInfo = STARTUPINFOW() + lpStartupInfo.cb = sizeof(STARTUPINFOW) + lpStartupInfo.lpReserved = 0 + lpStartupInfo.lpDesktop = 0 + lpStartupInfo.lpTitle = 0 + lpStartupInfo.dwFlags = 0 + lpStartupInfo.cbReserved2 = 0 + lpStartupInfo.lpReserved2 = 0 + lpProcessInformation = PROCESS_INFORMATION() + lpProcessInformation.hProcess = INVALID_HANDLE_VALUE + lpProcessInformation.hThread = INVALID_HANDLE_VALUE + lpProcessInformation.dwProcessId = 0 + lpProcessInformation.dwThreadId = 0 + _CreateProcessWithTokenW(hToken, dwLogonFlags, lpApplicationName, lpCommandLine, dwCreationFlags, lpEnvironment, lpCurrentDirectory, byref(lpStartupInfo), byref(lpProcessInformation)) + return ProcessInformation(lpProcessInformation) + +CreateProcessWithTokenA = MakeANSIVersion(CreateProcessWithTokenW) +CreateProcessWithToken = DefaultStringType(CreateProcessWithTokenA, CreateProcessWithTokenW) + +# BOOL WINAPI CreateProcessAsUser( +# __in_opt HANDLE hToken, +# __in_opt LPCTSTR lpApplicationName, +# __inout_opt LPTSTR lpCommandLine, +# __in_opt LPSECURITY_ATTRIBUTES lpProcessAttributes, +# __in_opt LPSECURITY_ATTRIBUTES lpThreadAttributes, +# __in BOOL bInheritHandles, +# __in DWORD dwCreationFlags, +# __in_opt LPVOID lpEnvironment, +# __in_opt LPCTSTR lpCurrentDirectory, +# __in LPSTARTUPINFO lpStartupInfo, +# __out LPPROCESS_INFORMATION lpProcessInformation +# ); +def CreateProcessAsUserA(hToken = None, lpApplicationName = None, lpCommandLine=None, lpProcessAttributes=None, lpThreadAttributes=None, bInheritHandles=False, dwCreationFlags=0, lpEnvironment=None, lpCurrentDirectory=None, lpStartupInfo=None): + _CreateProcessAsUserA = windll.advapi32.CreateProcessAsUserA + _CreateProcessAsUserA.argtypes = [HANDLE, LPSTR, LPSTR, LPSECURITY_ATTRIBUTES, LPSECURITY_ATTRIBUTES, BOOL, DWORD, LPVOID, LPSTR, LPVOID, LPPROCESS_INFORMATION] + _CreateProcessAsUserA.restype = bool + _CreateProcessAsUserA.errcheck = RaiseIfZero + + if not lpApplicationName: + lpApplicationName = None + if not lpCommandLine: + lpCommandLine = None + else: + lpCommandLine = ctypes.create_string_buffer(lpCommandLine, max(MAX_PATH, len(lpCommandLine))) + if not lpEnvironment: + lpEnvironment = None + else: + lpEnvironment = ctypes.create_string_buffer(lpEnvironment) + if not lpCurrentDirectory: + lpCurrentDirectory = None + if not lpProcessAttributes: + lpProcessAttributes = None + else: + lpProcessAttributes = byref(lpProcessAttributes) + if not lpThreadAttributes: + lpThreadAttributes = None + else: + lpThreadAttributes = byref(lpThreadAttributes) + if not lpStartupInfo: + lpStartupInfo = STARTUPINFO() + lpStartupInfo.cb = sizeof(STARTUPINFO) + lpStartupInfo.lpReserved = 0 + lpStartupInfo.lpDesktop = 0 + lpStartupInfo.lpTitle = 0 + lpStartupInfo.dwFlags = 0 + lpStartupInfo.cbReserved2 = 0 + lpStartupInfo.lpReserved2 = 0 + lpProcessInformation = PROCESS_INFORMATION() + lpProcessInformation.hProcess = INVALID_HANDLE_VALUE + lpProcessInformation.hThread = INVALID_HANDLE_VALUE + lpProcessInformation.dwProcessId = 0 + lpProcessInformation.dwThreadId = 0 + _CreateProcessAsUserA(hToken, lpApplicationName, lpCommandLine, lpProcessAttributes, lpThreadAttributes, bool(bInheritHandles), dwCreationFlags, lpEnvironment, lpCurrentDirectory, byref(lpStartupInfo), byref(lpProcessInformation)) + return ProcessInformation(lpProcessInformation) + +def CreateProcessAsUserW(hToken = None, lpApplicationName = None, lpCommandLine=None, lpProcessAttributes=None, lpThreadAttributes=None, bInheritHandles=False, dwCreationFlags=0, lpEnvironment=None, lpCurrentDirectory=None, lpStartupInfo=None): + _CreateProcessAsUserW = windll.advapi32.CreateProcessAsUserW + _CreateProcessAsUserW.argtypes = [HANDLE, LPWSTR, LPWSTR, LPSECURITY_ATTRIBUTES, LPSECURITY_ATTRIBUTES, BOOL, DWORD, LPVOID, LPWSTR, LPVOID, LPPROCESS_INFORMATION] + _CreateProcessAsUserW.restype = bool + _CreateProcessAsUserW.errcheck = RaiseIfZero + + if not lpApplicationName: + lpApplicationName = None + if not lpCommandLine: + lpCommandLine = None + else: + lpCommandLine = ctypes.create_unicode_buffer(lpCommandLine, max(MAX_PATH, len(lpCommandLine))) + if not lpEnvironment: + lpEnvironment = None + else: + lpEnvironment = ctypes.create_unicode_buffer(lpEnvironment) + if not lpCurrentDirectory: + lpCurrentDirectory = None + if not lpProcessAttributes: + lpProcessAttributes = None + else: + lpProcessAttributes = byref(lpProcessAttributes) + if not lpThreadAttributes: + lpThreadAttributes = None + else: + lpThreadAttributes = byref(lpThreadAttributes) + if not lpStartupInfo: + lpStartupInfo = STARTUPINFO() + lpStartupInfo.cb = sizeof(STARTUPINFO) + lpStartupInfo.lpReserved = 0 + lpStartupInfo.lpDesktop = 0 + lpStartupInfo.lpTitle = 0 + lpStartupInfo.dwFlags = 0 + lpStartupInfo.cbReserved2 = 0 + lpStartupInfo.lpReserved2 = 0 + lpProcessInformation = PROCESS_INFORMATION() + lpProcessInformation.hProcess = INVALID_HANDLE_VALUE + lpProcessInformation.hThread = INVALID_HANDLE_VALUE + lpProcessInformation.dwProcessId = 0 + lpProcessInformation.dwThreadId = 0 + _CreateProcessAsUserW(hToken, lpApplicationName, lpCommandLine, lpProcessAttributes, lpThreadAttributes, bool(bInheritHandles), dwCreationFlags, lpEnvironment, lpCurrentDirectory, byref(lpStartupInfo), byref(lpProcessInformation)) + return ProcessInformation(lpProcessInformation) + +CreateProcessAsUser = GuessStringType(CreateProcessAsUserA, CreateProcessAsUserW) + +# VOID CALLBACK WaitChainCallback( +# HWCT WctHandle, +# DWORD_PTR Context, +# DWORD CallbackStatus, +# LPDWORD NodeCount, +# PWAITCHAIN_NODE_INFO NodeInfoArray, +# LPBOOL IsCycle +# ); +PWAITCHAINCALLBACK = WINFUNCTYPE(HWCT, DWORD_PTR, DWORD, LPDWORD, PWAITCHAIN_NODE_INFO, LPBOOL) + +# HWCT WINAPI OpenThreadWaitChainSession( +# __in DWORD Flags, +# __in_opt PWAITCHAINCALLBACK callback +# ); +def OpenThreadWaitChainSession(Flags = 0, callback = None): + _OpenThreadWaitChainSession = windll.advapi32.OpenThreadWaitChainSession + _OpenThreadWaitChainSession.argtypes = [DWORD, PVOID] + _OpenThreadWaitChainSession.restype = HWCT + _OpenThreadWaitChainSession.errcheck = RaiseIfZero + + if callback is not None: + callback = PWAITCHAINCALLBACK(callback) + aHandle = _OpenThreadWaitChainSession(Flags, callback) + return ThreadWaitChainSessionHandle(aHandle) + +# BOOL WINAPI GetThreadWaitChain( +# _In_ HWCT WctHandle, +# _In_opt_ DWORD_PTR Context, +# _In_ DWORD Flags, +# _In_ DWORD ThreadId, +# _Inout_ LPDWORD NodeCount, +# _Out_ PWAITCHAIN_NODE_INFO NodeInfoArray, +# _Out_ LPBOOL IsCycle +# ); +def GetThreadWaitChain(WctHandle, Context = None, Flags = WCTP_GETINFO_ALL_FLAGS, ThreadId = -1, NodeCount = WCT_MAX_NODE_COUNT): + _GetThreadWaitChain = windll.advapi32.GetThreadWaitChain + _GetThreadWaitChain.argtypes = [HWCT, LPDWORD, DWORD, DWORD, LPDWORD, PWAITCHAIN_NODE_INFO, LPBOOL] + _GetThreadWaitChain.restype = bool + _GetThreadWaitChain.errcheck = RaiseIfZero + + dwNodeCount = DWORD(NodeCount) + NodeInfoArray = (WAITCHAIN_NODE_INFO * NodeCount)() + IsCycle = BOOL(0) + _GetThreadWaitChain(WctHandle, Context, Flags, ThreadId, byref(dwNodeCount), ctypes.cast(ctypes.pointer(NodeInfoArray), PWAITCHAIN_NODE_INFO), byref(IsCycle)) + while dwNodeCount.value > NodeCount: + NodeCount = dwNodeCount.value + NodeInfoArray = (WAITCHAIN_NODE_INFO * NodeCount)() + _GetThreadWaitChain(WctHandle, Context, Flags, ThreadId, byref(dwNodeCount), ctypes.cast(ctypes.pointer(NodeInfoArray), PWAITCHAIN_NODE_INFO), byref(IsCycle)) + return ( + [ WaitChainNodeInfo(NodeInfoArray[index]) for index in compat.xrange(dwNodeCount.value) ], + bool(IsCycle.value) + ) + +# VOID WINAPI CloseThreadWaitChainSession( +# __in HWCT WctHandle +# ); +def CloseThreadWaitChainSession(WctHandle): + _CloseThreadWaitChainSession = windll.advapi32.CloseThreadWaitChainSession + _CloseThreadWaitChainSession.argtypes = [HWCT] + _CloseThreadWaitChainSession(WctHandle) + +# BOOL WINAPI SaferCreateLevel( +# __in DWORD dwScopeId, +# __in DWORD dwLevelId, +# __in DWORD OpenFlags, +# __out SAFER_LEVEL_HANDLE *pLevelHandle, +# __reserved LPVOID lpReserved +# ); +def SaferCreateLevel(dwScopeId=SAFER_SCOPEID_USER, dwLevelId=SAFER_LEVELID_NORMALUSER, OpenFlags=0): + _SaferCreateLevel = windll.advapi32.SaferCreateLevel + _SaferCreateLevel.argtypes = [DWORD, DWORD, DWORD, POINTER(SAFER_LEVEL_HANDLE), LPVOID] + _SaferCreateLevel.restype = BOOL + _SaferCreateLevel.errcheck = RaiseIfZero + + hLevelHandle = SAFER_LEVEL_HANDLE(INVALID_HANDLE_VALUE) + _SaferCreateLevel(dwScopeId, dwLevelId, OpenFlags, byref(hLevelHandle), None) + return SaferLevelHandle(hLevelHandle.value) + +# BOOL WINAPI SaferIdentifyLevel( +# __in DWORD dwNumProperties, +# __in_opt PSAFER_CODE_PROPERTIES pCodeProperties, +# __out SAFER_LEVEL_HANDLE *pLevelHandle, +# __reserved LPVOID lpReserved +# ); + +# XXX TODO + +# BOOL WINAPI SaferComputeTokenFromLevel( +# __in SAFER_LEVEL_HANDLE LevelHandle, +# __in_opt HANDLE InAccessToken, +# __out PHANDLE OutAccessToken, +# __in DWORD dwFlags, +# __inout_opt LPVOID lpReserved +# ); +def SaferComputeTokenFromLevel(LevelHandle, InAccessToken=None, dwFlags=0): + _SaferComputeTokenFromLevel = windll.advapi32.SaferComputeTokenFromLevel + _SaferComputeTokenFromLevel.argtypes = [SAFER_LEVEL_HANDLE, HANDLE, PHANDLE, DWORD, LPDWORD] + _SaferComputeTokenFromLevel.restype = BOOL + _SaferComputeTokenFromLevel.errcheck = RaiseIfZero + + OutAccessToken = HANDLE(INVALID_HANDLE_VALUE) + lpReserved = DWORD(0) + _SaferComputeTokenFromLevel(LevelHandle, InAccessToken, byref(OutAccessToken), dwFlags, byref(lpReserved)) + return TokenHandle(OutAccessToken.value), lpReserved.value + +# BOOL WINAPI SaferCloseLevel( +# __in SAFER_LEVEL_HANDLE hLevelHandle +# ); +def SaferCloseLevel(hLevelHandle): + _SaferCloseLevel = windll.advapi32.SaferCloseLevel + _SaferCloseLevel.argtypes = [SAFER_LEVEL_HANDLE] + _SaferCloseLevel.restype = BOOL + _SaferCloseLevel.errcheck = RaiseIfZero + + if hasattr(hLevelHandle, 'value'): + _SaferCloseLevel(hLevelHandle.value) + else: + _SaferCloseLevel(hLevelHandle) + +# BOOL SaferiIsExecutableFileType( +# __in LPCWSTR szFullPath, +# __in BOOLEAN bFromShellExecute +# ); +def SaferiIsExecutableFileType(szFullPath, bFromShellExecute = False): + _SaferiIsExecutableFileType = windll.advapi32.SaferiIsExecutableFileType + _SaferiIsExecutableFileType.argtypes = [LPWSTR, BOOLEAN] + _SaferiIsExecutableFileType.restype = BOOL + _SaferiIsExecutableFileType.errcheck = RaiseIfLastError + + SetLastError(ERROR_SUCCESS) + return bool(_SaferiIsExecutableFileType(compat.unicode(szFullPath), bFromShellExecute)) + +# useful alias since I'm likely to misspell it :P +SaferIsExecutableFileType = SaferiIsExecutableFileType + +#------------------------------------------------------------------------------ + +# LONG WINAPI RegCloseKey( +# __in HKEY hKey +# ); +def RegCloseKey(hKey): + if hasattr(hKey, 'value'): + value = hKey.value + else: + value = hKey + + if value in ( + HKEY_CLASSES_ROOT, + HKEY_CURRENT_USER, + HKEY_LOCAL_MACHINE, + HKEY_USERS, + HKEY_PERFORMANCE_DATA, + HKEY_CURRENT_CONFIG + ): + return + + _RegCloseKey = windll.advapi32.RegCloseKey + _RegCloseKey.argtypes = [HKEY] + _RegCloseKey.restype = LONG + _RegCloseKey.errcheck = RaiseIfNotErrorSuccess + _RegCloseKey(hKey) + +# LONG WINAPI RegConnectRegistry( +# __in_opt LPCTSTR lpMachineName, +# __in HKEY hKey, +# __out PHKEY phkResult +# ); +def RegConnectRegistryA(lpMachineName = None, hKey = HKEY_LOCAL_MACHINE): + _RegConnectRegistryA = windll.advapi32.RegConnectRegistryA + _RegConnectRegistryA.argtypes = [LPSTR, HKEY, PHKEY] + _RegConnectRegistryA.restype = LONG + _RegConnectRegistryA.errcheck = RaiseIfNotErrorSuccess + + hkResult = HKEY(INVALID_HANDLE_VALUE) + _RegConnectRegistryA(lpMachineName, hKey, byref(hkResult)) + return RegistryKeyHandle(hkResult.value) + +def RegConnectRegistryW(lpMachineName = None, hKey = HKEY_LOCAL_MACHINE): + _RegConnectRegistryW = windll.advapi32.RegConnectRegistryW + _RegConnectRegistryW.argtypes = [LPWSTR, HKEY, PHKEY] + _RegConnectRegistryW.restype = LONG + _RegConnectRegistryW.errcheck = RaiseIfNotErrorSuccess + + hkResult = HKEY(INVALID_HANDLE_VALUE) + _RegConnectRegistryW(lpMachineName, hKey, byref(hkResult)) + return RegistryKeyHandle(hkResult.value) + +RegConnectRegistry = GuessStringType(RegConnectRegistryA, RegConnectRegistryW) + +# LONG WINAPI RegCreateKey( +# __in HKEY hKey, +# __in_opt LPCTSTR lpSubKey, +# __out PHKEY phkResult +# ); +def RegCreateKeyA(hKey = HKEY_LOCAL_MACHINE, lpSubKey = None): + _RegCreateKeyA = windll.advapi32.RegCreateKeyA + _RegCreateKeyA.argtypes = [HKEY, LPSTR, PHKEY] + _RegCreateKeyA.restype = LONG + _RegCreateKeyA.errcheck = RaiseIfNotErrorSuccess + + hkResult = HKEY(INVALID_HANDLE_VALUE) + _RegCreateKeyA(hKey, lpSubKey, byref(hkResult)) + return RegistryKeyHandle(hkResult.value) + +def RegCreateKeyW(hKey = HKEY_LOCAL_MACHINE, lpSubKey = None): + _RegCreateKeyW = windll.advapi32.RegCreateKeyW + _RegCreateKeyW.argtypes = [HKEY, LPWSTR, PHKEY] + _RegCreateKeyW.restype = LONG + _RegCreateKeyW.errcheck = RaiseIfNotErrorSuccess + + hkResult = HKEY(INVALID_HANDLE_VALUE) + _RegCreateKeyW(hKey, lpSubKey, byref(hkResult)) + return RegistryKeyHandle(hkResult.value) + +RegCreateKey = GuessStringType(RegCreateKeyA, RegCreateKeyW) + +# LONG WINAPI RegCreateKeyEx( +# __in HKEY hKey, +# __in LPCTSTR lpSubKey, +# __reserved DWORD Reserved, +# __in_opt LPTSTR lpClass, +# __in DWORD dwOptions, +# __in REGSAM samDesired, +# __in_opt LPSECURITY_ATTRIBUTES lpSecurityAttributes, +# __out PHKEY phkResult, +# __out_opt LPDWORD lpdwDisposition +# ); + +# XXX TODO + +# LONG WINAPI RegOpenKey( +# __in HKEY hKey, +# __in_opt LPCTSTR lpSubKey, +# __out PHKEY phkResult +# ); +def RegOpenKeyA(hKey = HKEY_LOCAL_MACHINE, lpSubKey = None): + _RegOpenKeyA = windll.advapi32.RegOpenKeyA + _RegOpenKeyA.argtypes = [HKEY, LPSTR, PHKEY] + _RegOpenKeyA.restype = LONG + _RegOpenKeyA.errcheck = RaiseIfNotErrorSuccess + + hkResult = HKEY(INVALID_HANDLE_VALUE) + _RegOpenKeyA(hKey, lpSubKey, byref(hkResult)) + return RegistryKeyHandle(hkResult.value) + +def RegOpenKeyW(hKey = HKEY_LOCAL_MACHINE, lpSubKey = None): + _RegOpenKeyW = windll.advapi32.RegOpenKeyW + _RegOpenKeyW.argtypes = [HKEY, LPWSTR, PHKEY] + _RegOpenKeyW.restype = LONG + _RegOpenKeyW.errcheck = RaiseIfNotErrorSuccess + + hkResult = HKEY(INVALID_HANDLE_VALUE) + _RegOpenKeyW(hKey, lpSubKey, byref(hkResult)) + return RegistryKeyHandle(hkResult.value) + +RegOpenKey = GuessStringType(RegOpenKeyA, RegOpenKeyW) + +# LONG WINAPI RegOpenKeyEx( +# __in HKEY hKey, +# __in_opt LPCTSTR lpSubKey, +# __reserved DWORD ulOptions, +# __in REGSAM samDesired, +# __out PHKEY phkResult +# ); +def RegOpenKeyExA(hKey = HKEY_LOCAL_MACHINE, lpSubKey = None, samDesired = KEY_ALL_ACCESS): + _RegOpenKeyExA = windll.advapi32.RegOpenKeyExA + _RegOpenKeyExA.argtypes = [HKEY, LPSTR, DWORD, REGSAM, PHKEY] + _RegOpenKeyExA.restype = LONG + _RegOpenKeyExA.errcheck = RaiseIfNotErrorSuccess + + hkResult = HKEY(INVALID_HANDLE_VALUE) + _RegOpenKeyExA(hKey, lpSubKey, 0, samDesired, byref(hkResult)) + return RegistryKeyHandle(hkResult.value) + +def RegOpenKeyExW(hKey = HKEY_LOCAL_MACHINE, lpSubKey = None, samDesired = KEY_ALL_ACCESS): + _RegOpenKeyExW = windll.advapi32.RegOpenKeyExW + _RegOpenKeyExW.argtypes = [HKEY, LPWSTR, DWORD, REGSAM, PHKEY] + _RegOpenKeyExW.restype = LONG + _RegOpenKeyExW.errcheck = RaiseIfNotErrorSuccess + + hkResult = HKEY(INVALID_HANDLE_VALUE) + _RegOpenKeyExW(hKey, lpSubKey, 0, samDesired, byref(hkResult)) + return RegistryKeyHandle(hkResult.value) + +RegOpenKeyEx = GuessStringType(RegOpenKeyExA, RegOpenKeyExW) + +# LONG WINAPI RegOpenCurrentUser( +# __in REGSAM samDesired, +# __out PHKEY phkResult +# ); +def RegOpenCurrentUser(samDesired = KEY_ALL_ACCESS): + _RegOpenCurrentUser = windll.advapi32.RegOpenCurrentUser + _RegOpenCurrentUser.argtypes = [REGSAM, PHKEY] + _RegOpenCurrentUser.restype = LONG + _RegOpenCurrentUser.errcheck = RaiseIfNotErrorSuccess + + hkResult = HKEY(INVALID_HANDLE_VALUE) + _RegOpenCurrentUser(samDesired, byref(hkResult)) + return RegistryKeyHandle(hkResult.value) + +# LONG WINAPI RegOpenUserClassesRoot( +# __in HANDLE hToken, +# __reserved DWORD dwOptions, +# __in REGSAM samDesired, +# __out PHKEY phkResult +# ); +def RegOpenUserClassesRoot(hToken, samDesired = KEY_ALL_ACCESS): + _RegOpenUserClassesRoot = windll.advapi32.RegOpenUserClassesRoot + _RegOpenUserClassesRoot.argtypes = [HANDLE, DWORD, REGSAM, PHKEY] + _RegOpenUserClassesRoot.restype = LONG + _RegOpenUserClassesRoot.errcheck = RaiseIfNotErrorSuccess + + hkResult = HKEY(INVALID_HANDLE_VALUE) + _RegOpenUserClassesRoot(hToken, 0, samDesired, byref(hkResult)) + return RegistryKeyHandle(hkResult.value) + +# LONG WINAPI RegQueryValue( +# __in HKEY hKey, +# __in_opt LPCTSTR lpSubKey, +# __out_opt LPTSTR lpValue, +# __inout_opt PLONG lpcbValue +# ); +def RegQueryValueA(hKey, lpSubKey = None): + _RegQueryValueA = windll.advapi32.RegQueryValueA + _RegQueryValueA.argtypes = [HKEY, LPSTR, LPVOID, PLONG] + _RegQueryValueA.restype = LONG + _RegQueryValueA.errcheck = RaiseIfNotErrorSuccess + + cbValue = LONG(0) + _RegQueryValueA(hKey, lpSubKey, None, byref(cbValue)) + lpValue = ctypes.create_string_buffer(cbValue.value) + _RegQueryValueA(hKey, lpSubKey, lpValue, byref(cbValue)) + return lpValue.value + +def RegQueryValueW(hKey, lpSubKey = None): + _RegQueryValueW = windll.advapi32.RegQueryValueW + _RegQueryValueW.argtypes = [HKEY, LPWSTR, LPVOID, PLONG] + _RegQueryValueW.restype = LONG + _RegQueryValueW.errcheck = RaiseIfNotErrorSuccess + + cbValue = LONG(0) + _RegQueryValueW(hKey, lpSubKey, None, byref(cbValue)) + lpValue = ctypes.create_unicode_buffer(cbValue.value * sizeof(WCHAR)) + _RegQueryValueW(hKey, lpSubKey, lpValue, byref(cbValue)) + return lpValue.value + +RegQueryValue = GuessStringType(RegQueryValueA, RegQueryValueW) + +# LONG WINAPI RegQueryValueEx( +# __in HKEY hKey, +# __in_opt LPCTSTR lpValueName, +# __reserved LPDWORD lpReserved, +# __out_opt LPDWORD lpType, +# __out_opt LPBYTE lpData, +# __inout_opt LPDWORD lpcbData +# ); +def _internal_RegQueryValueEx(ansi, hKey, lpValueName = None, bGetData = True): + _RegQueryValueEx = _caller_RegQueryValueEx(ansi) + + cbData = DWORD(0) + dwType = DWORD(-1) + _RegQueryValueEx(hKey, lpValueName, None, byref(dwType), None, byref(cbData)) + Type = dwType.value + + if not bGetData: + return cbData.value, Type + + if Type in (REG_DWORD, REG_DWORD_BIG_ENDIAN): # REG_DWORD_LITTLE_ENDIAN + if cbData.value != 4: + raise ValueError("REG_DWORD value of size %d" % cbData.value) + dwData = DWORD(0) + _RegQueryValueEx(hKey, lpValueName, None, None, byref(dwData), byref(cbData)) + return dwData.value, Type + + if Type == REG_QWORD: # REG_QWORD_LITTLE_ENDIAN + if cbData.value != 8: + raise ValueError("REG_QWORD value of size %d" % cbData.value) + qwData = QWORD(long(0)) + _RegQueryValueEx(hKey, lpValueName, None, None, byref(qwData), byref(cbData)) + return qwData.value, Type + + if Type in (REG_SZ, REG_EXPAND_SZ): + if ansi: + szData = ctypes.create_string_buffer(cbData.value) + else: + szData = ctypes.create_unicode_buffer(cbData.value) + _RegQueryValueEx(hKey, lpValueName, None, None, byref(szData), byref(cbData)) + return szData.value, Type + + if Type == REG_MULTI_SZ: + if ansi: + szData = ctypes.create_string_buffer(cbData.value) + else: + szData = ctypes.create_unicode_buffer(cbData.value) + _RegQueryValueEx(hKey, lpValueName, None, None, byref(szData), byref(cbData)) + Data = szData[:] + if ansi: + aData = Data.split('\0') + else: + aData = Data.split(u'\0') + aData = [token for token in aData if token] + return aData, Type + + if Type == REG_LINK: + szData = ctypes.create_unicode_buffer(cbData.value) + _RegQueryValueEx(hKey, lpValueName, None, None, byref(szData), byref(cbData)) + return szData.value, Type + + # REG_BINARY, REG_NONE, and any future types + szData = ctypes.create_string_buffer(cbData.value) + _RegQueryValueEx(hKey, lpValueName, None, None, byref(szData), byref(cbData)) + return szData.raw, Type + +def _caller_RegQueryValueEx(ansi): + if ansi: + _RegQueryValueEx = windll.advapi32.RegQueryValueExA + _RegQueryValueEx.argtypes = [HKEY, LPSTR, LPVOID, PDWORD, LPVOID, PDWORD] + else: + _RegQueryValueEx = windll.advapi32.RegQueryValueExW + _RegQueryValueEx.argtypes = [HKEY, LPWSTR, LPVOID, PDWORD, LPVOID, PDWORD] + _RegQueryValueEx.restype = LONG + _RegQueryValueEx.errcheck = RaiseIfNotErrorSuccess + return _RegQueryValueEx + +# see _internal_RegQueryValueEx +def RegQueryValueExA(hKey, lpValueName = None, bGetData = True): + return _internal_RegQueryValueEx(True, hKey, lpValueName, bGetData) + +# see _internal_RegQueryValueEx +def RegQueryValueExW(hKey, lpValueName = None, bGetData = True): + return _internal_RegQueryValueEx(False, hKey, lpValueName, bGetData) + +RegQueryValueEx = GuessStringType(RegQueryValueExA, RegQueryValueExW) + +# LONG WINAPI RegSetValueEx( +# __in HKEY hKey, +# __in_opt LPCTSTR lpValueName, +# __reserved DWORD Reserved, +# __in DWORD dwType, +# __in_opt const BYTE *lpData, +# __in DWORD cbData +# ); +def RegSetValueEx(hKey, lpValueName = None, lpData = None, dwType = None): + + # Determine which version of the API to use, ANSI or Widechar. + if lpValueName is None: + if isinstance(lpData, GuessStringType.t_ansi): + ansi = True + elif isinstance(lpData, GuessStringType.t_unicode): + ansi = False + else: + ansi = (GuessStringType.t_ansi == GuessStringType.t_default) + elif isinstance(lpValueName, GuessStringType.t_ansi): + ansi = True + elif isinstance(lpValueName, GuessStringType.t_unicode): + ansi = False + else: + raise TypeError("String expected, got %s instead" % type(lpValueName)) + + # Autodetect the type when not given. + # TODO: improve detection of DWORD and QWORD by seeing if the value "fits". + if dwType is None: + if lpValueName is None: + dwType = REG_SZ + elif lpData is None: + dwType = REG_NONE + elif isinstance(lpData, GuessStringType.t_ansi): + dwType = REG_SZ + elif isinstance(lpData, GuessStringType.t_unicode): + dwType = REG_SZ + elif isinstance(lpData, int): + dwType = REG_DWORD + elif isinstance(lpData, long): + dwType = REG_QWORD + else: + dwType = REG_BINARY + + # Load the ctypes caller. + if ansi: + _RegSetValueEx = windll.advapi32.RegSetValueExA + _RegSetValueEx.argtypes = [HKEY, LPSTR, DWORD, DWORD, LPVOID, DWORD] + else: + _RegSetValueEx = windll.advapi32.RegSetValueExW + _RegSetValueEx.argtypes = [HKEY, LPWSTR, DWORD, DWORD, LPVOID, DWORD] + _RegSetValueEx.restype = LONG + _RegSetValueEx.errcheck = RaiseIfNotErrorSuccess + + # Convert the arguments so ctypes can understand them. + if lpData is None: + DataRef = None + DataSize = 0 + else: + if dwType in (REG_DWORD, REG_DWORD_BIG_ENDIAN): # REG_DWORD_LITTLE_ENDIAN + Data = DWORD(lpData) + elif dwType == REG_QWORD: # REG_QWORD_LITTLE_ENDIAN + Data = QWORD(lpData) + elif dwType in (REG_SZ, REG_EXPAND_SZ): + if ansi: + Data = ctypes.create_string_buffer(lpData) + else: + Data = ctypes.create_unicode_buffer(lpData) + elif dwType == REG_MULTI_SZ: + if ansi: + Data = ctypes.create_string_buffer('\0'.join(lpData) + '\0\0') + else: + Data = ctypes.create_unicode_buffer(u'\0'.join(lpData) + u'\0\0') + elif dwType == REG_LINK: + Data = ctypes.create_unicode_buffer(lpData) + else: + Data = ctypes.create_string_buffer(lpData) + DataRef = byref(Data) + DataSize = sizeof(Data) + + # Call the API with the converted arguments. + _RegSetValueEx(hKey, lpValueName, 0, dwType, DataRef, DataSize) + +# No "GuessStringType" here since detection is done inside. +RegSetValueExA = RegSetValueExW = RegSetValueEx + +# LONG WINAPI RegEnumKey( +# __in HKEY hKey, +# __in DWORD dwIndex, +# __out LPTSTR lpName, +# __in DWORD cchName +# ); +def RegEnumKeyA(hKey, dwIndex): + _RegEnumKeyA = windll.advapi32.RegEnumKeyA + _RegEnumKeyA.argtypes = [HKEY, DWORD, LPSTR, DWORD] + _RegEnumKeyA.restype = LONG + + cchName = 1024 + while True: + lpName = ctypes.create_string_buffer(cchName) + errcode = _RegEnumKeyA(hKey, dwIndex, lpName, cchName) + if errcode != ERROR_MORE_DATA: + break + cchName = cchName + 1024 + if cchName > 65536: + raise ctypes.WinError(errcode) + if errcode == ERROR_NO_MORE_ITEMS: + return None + if errcode != ERROR_SUCCESS: + raise ctypes.WinError(errcode) + return lpName.value + +def RegEnumKeyW(hKey, dwIndex): + _RegEnumKeyW = windll.advapi32.RegEnumKeyW + _RegEnumKeyW.argtypes = [HKEY, DWORD, LPWSTR, DWORD] + _RegEnumKeyW.restype = LONG + + cchName = 512 + while True: + lpName = ctypes.create_unicode_buffer(cchName) + errcode = _RegEnumKeyW(hKey, dwIndex, lpName, cchName * 2) + if errcode != ERROR_MORE_DATA: + break + cchName = cchName + 512 + if cchName > 32768: + raise ctypes.WinError(errcode) + if errcode == ERROR_NO_MORE_ITEMS: + return None + if errcode != ERROR_SUCCESS: + raise ctypes.WinError(errcode) + return lpName.value + +RegEnumKey = DefaultStringType(RegEnumKeyA, RegEnumKeyW) + +# LONG WINAPI RegEnumKeyEx( +# __in HKEY hKey, +# __in DWORD dwIndex, +# __out LPTSTR lpName, +# __inout LPDWORD lpcName, +# __reserved LPDWORD lpReserved, +# __inout LPTSTR lpClass, +# __inout_opt LPDWORD lpcClass, +# __out_opt PFILETIME lpftLastWriteTime +# ); + +# XXX TODO + +# LONG WINAPI RegEnumValue( +# __in HKEY hKey, +# __in DWORD dwIndex, +# __out LPTSTR lpValueName, +# __inout LPDWORD lpcchValueName, +# __reserved LPDWORD lpReserved, +# __out_opt LPDWORD lpType, +# __out_opt LPBYTE lpData, +# __inout_opt LPDWORD lpcbData +# ); +def _internal_RegEnumValue(ansi, hKey, dwIndex, bGetData = True): + if ansi: + _RegEnumValue = windll.advapi32.RegEnumValueA + _RegEnumValue.argtypes = [HKEY, DWORD, LPSTR, LPDWORD, LPVOID, LPDWORD, LPVOID, LPDWORD] + else: + _RegEnumValue = windll.advapi32.RegEnumValueW + _RegEnumValue.argtypes = [HKEY, DWORD, LPWSTR, LPDWORD, LPVOID, LPDWORD, LPVOID, LPDWORD] + _RegEnumValue.restype = LONG + + cchValueName = DWORD(1024) + dwType = DWORD(-1) + lpcchValueName = byref(cchValueName) + lpType = byref(dwType) + if ansi: + lpValueName = ctypes.create_string_buffer(cchValueName.value) + else: + lpValueName = ctypes.create_unicode_buffer(cchValueName.value) + if bGetData: + cbData = DWORD(0) + lpcbData = byref(cbData) + else: + lpcbData = None + lpData = None + errcode = _RegEnumValue(hKey, dwIndex, lpValueName, lpcchValueName, None, lpType, lpData, lpcbData) + + if errcode == ERROR_MORE_DATA or (bGetData and errcode == ERROR_SUCCESS): + if ansi: + cchValueName.value = cchValueName.value + sizeof(CHAR) + lpValueName = ctypes.create_string_buffer(cchValueName.value) + else: + cchValueName.value = cchValueName.value + sizeof(WCHAR) + lpValueName = ctypes.create_unicode_buffer(cchValueName.value) + + if bGetData: + Type = dwType.value + + if Type in (REG_DWORD, REG_DWORD_BIG_ENDIAN): # REG_DWORD_LITTLE_ENDIAN + if cbData.value != sizeof(DWORD): + raise ValueError("REG_DWORD value of size %d" % cbData.value) + Data = DWORD(0) + + elif Type == REG_QWORD: # REG_QWORD_LITTLE_ENDIAN + if cbData.value != sizeof(QWORD): + raise ValueError("REG_QWORD value of size %d" % cbData.value) + Data = QWORD(long(0)) + + elif Type in (REG_SZ, REG_EXPAND_SZ, REG_MULTI_SZ): + if ansi: + Data = ctypes.create_string_buffer(cbData.value) + else: + Data = ctypes.create_unicode_buffer(cbData.value) + + elif Type == REG_LINK: + Data = ctypes.create_unicode_buffer(cbData.value) + + else: # REG_BINARY, REG_NONE, and any future types + Data = ctypes.create_string_buffer(cbData.value) + + lpData = byref(Data) + + errcode = _RegEnumValue(hKey, dwIndex, lpValueName, lpcchValueName, None, lpType, lpData, lpcbData) + + if errcode == ERROR_NO_MORE_ITEMS: + return None + #if errcode != ERROR_SUCCESS: + # raise ctypes.WinError(errcode) + + if not bGetData: + return lpValueName.value, dwType.value + + if Type in (REG_DWORD, REG_DWORD_BIG_ENDIAN, REG_QWORD, REG_SZ, REG_EXPAND_SZ, REG_LINK): # REG_DWORD_LITTLE_ENDIAN, REG_QWORD_LITTLE_ENDIAN + return lpValueName.value, dwType.value, Data.value + + if Type == REG_MULTI_SZ: + sData = Data[:] + del Data + if ansi: + aData = sData.split('\0') + else: + aData = sData.split(u'\0') + aData = [token for token in aData if token] + return lpValueName.value, dwType.value, aData + + # REG_BINARY, REG_NONE, and any future types + return lpValueName.value, dwType.value, Data.raw + +def RegEnumValueA(hKey, dwIndex, bGetData = True): + return _internal_RegEnumValue(True, hKey, dwIndex, bGetData) + +def RegEnumValueW(hKey, dwIndex, bGetData = True): + return _internal_RegEnumValue(False, hKey, dwIndex, bGetData) + +RegEnumValue = DefaultStringType(RegEnumValueA, RegEnumValueW) + +# XXX TODO + +# LONG WINAPI RegSetKeyValue( +# __in HKEY hKey, +# __in_opt LPCTSTR lpSubKey, +# __in_opt LPCTSTR lpValueName, +# __in DWORD dwType, +# __in_opt LPCVOID lpData, +# __in DWORD cbData +# ); + +# XXX TODO + +# LONG WINAPI RegQueryMultipleValues( +# __in HKEY hKey, +# __out PVALENT val_list, +# __in DWORD num_vals, +# __out_opt LPTSTR lpValueBuf, +# __inout_opt LPDWORD ldwTotsize +# ); + +# XXX TODO + +# LONG WINAPI RegDeleteValue( +# __in HKEY hKey, +# __in_opt LPCTSTR lpValueName +# ); +def RegDeleteValueA(hKeySrc, lpValueName = None): + _RegDeleteValueA = windll.advapi32.RegDeleteValueA + _RegDeleteValueA.argtypes = [HKEY, LPSTR] + _RegDeleteValueA.restype = LONG + _RegDeleteValueA.errcheck = RaiseIfNotErrorSuccess + _RegDeleteValueA(hKeySrc, lpValueName) +def RegDeleteValueW(hKeySrc, lpValueName = None): + _RegDeleteValueW = windll.advapi32.RegDeleteValueW + _RegDeleteValueW.argtypes = [HKEY, LPWSTR] + _RegDeleteValueW.restype = LONG + _RegDeleteValueW.errcheck = RaiseIfNotErrorSuccess + _RegDeleteValueW(hKeySrc, lpValueName) +RegDeleteValue = GuessStringType(RegDeleteValueA, RegDeleteValueW) + +# LONG WINAPI RegDeleteKeyValue( +# __in HKEY hKey, +# __in_opt LPCTSTR lpSubKey, +# __in_opt LPCTSTR lpValueName +# ); +def RegDeleteKeyValueA(hKeySrc, lpSubKey = None, lpValueName = None): + _RegDeleteKeyValueA = windll.advapi32.RegDeleteKeyValueA + _RegDeleteKeyValueA.argtypes = [HKEY, LPSTR, LPSTR] + _RegDeleteKeyValueA.restype = LONG + _RegDeleteKeyValueA.errcheck = RaiseIfNotErrorSuccess + _RegDeleteKeyValueA(hKeySrc, lpSubKey, lpValueName) +def RegDeleteKeyValueW(hKeySrc, lpSubKey = None, lpValueName = None): + _RegDeleteKeyValueW = windll.advapi32.RegDeleteKeyValueW + _RegDeleteKeyValueW.argtypes = [HKEY, LPWSTR, LPWSTR] + _RegDeleteKeyValueW.restype = LONG + _RegDeleteKeyValueW.errcheck = RaiseIfNotErrorSuccess + _RegDeleteKeyValueW(hKeySrc, lpSubKey, lpValueName) +RegDeleteKeyValue = GuessStringType(RegDeleteKeyValueA, RegDeleteKeyValueW) + +# LONG WINAPI RegDeleteKey( +# __in HKEY hKey, +# __in LPCTSTR lpSubKey +# ); +def RegDeleteKeyA(hKeySrc, lpSubKey = None): + _RegDeleteKeyA = windll.advapi32.RegDeleteKeyA + _RegDeleteKeyA.argtypes = [HKEY, LPSTR] + _RegDeleteKeyA.restype = LONG + _RegDeleteKeyA.errcheck = RaiseIfNotErrorSuccess + _RegDeleteKeyA(hKeySrc, lpSubKey) +def RegDeleteKeyW(hKeySrc, lpSubKey = None): + _RegDeleteKeyW = windll.advapi32.RegDeleteKeyW + _RegDeleteKeyW.argtypes = [HKEY, LPWSTR] + _RegDeleteKeyW.restype = LONG + _RegDeleteKeyW.errcheck = RaiseIfNotErrorSuccess + _RegDeleteKeyW(hKeySrc, lpSubKey) +RegDeleteKey = GuessStringType(RegDeleteKeyA, RegDeleteKeyW) + +# LONG WINAPI RegDeleteKeyEx( +# __in HKEY hKey, +# __in LPCTSTR lpSubKey, +# __in REGSAM samDesired, +# __reserved DWORD Reserved +# ); + +def RegDeleteKeyExA(hKeySrc, lpSubKey = None, samDesired = KEY_WOW64_32KEY): + _RegDeleteKeyExA = windll.advapi32.RegDeleteKeyExA + _RegDeleteKeyExA.argtypes = [HKEY, LPSTR, REGSAM, DWORD] + _RegDeleteKeyExA.restype = LONG + _RegDeleteKeyExA.errcheck = RaiseIfNotErrorSuccess + _RegDeleteKeyExA(hKeySrc, lpSubKey, samDesired, 0) +def RegDeleteKeyExW(hKeySrc, lpSubKey = None, samDesired = KEY_WOW64_32KEY): + _RegDeleteKeyExW = windll.advapi32.RegDeleteKeyExW + _RegDeleteKeyExW.argtypes = [HKEY, LPWSTR, REGSAM, DWORD] + _RegDeleteKeyExW.restype = LONG + _RegDeleteKeyExW.errcheck = RaiseIfNotErrorSuccess + _RegDeleteKeyExW(hKeySrc, lpSubKey, samDesired, 0) +RegDeleteKeyEx = GuessStringType(RegDeleteKeyExA, RegDeleteKeyExW) + +# LONG WINAPI RegCopyTree( +# __in HKEY hKeySrc, +# __in_opt LPCTSTR lpSubKey, +# __in HKEY hKeyDest +# ); +def RegCopyTreeA(hKeySrc, lpSubKey, hKeyDest): + _RegCopyTreeA = windll.advapi32.RegCopyTreeA + _RegCopyTreeA.argtypes = [HKEY, LPSTR, HKEY] + _RegCopyTreeA.restype = LONG + _RegCopyTreeA.errcheck = RaiseIfNotErrorSuccess + _RegCopyTreeA(hKeySrc, lpSubKey, hKeyDest) +def RegCopyTreeW(hKeySrc, lpSubKey, hKeyDest): + _RegCopyTreeW = windll.advapi32.RegCopyTreeW + _RegCopyTreeW.argtypes = [HKEY, LPWSTR, HKEY] + _RegCopyTreeW.restype = LONG + _RegCopyTreeW.errcheck = RaiseIfNotErrorSuccess + _RegCopyTreeW(hKeySrc, lpSubKey, hKeyDest) +RegCopyTree = GuessStringType(RegCopyTreeA, RegCopyTreeW) + +# LONG WINAPI RegDeleteTree( +# __in HKEY hKey, +# __in_opt LPCTSTR lpSubKey +# ); +def RegDeleteTreeA(hKey, lpSubKey = None): + _RegDeleteTreeA = windll.advapi32.RegDeleteTreeA + _RegDeleteTreeA.argtypes = [HKEY, LPWSTR] + _RegDeleteTreeA.restype = LONG + _RegDeleteTreeA.errcheck = RaiseIfNotErrorSuccess + _RegDeleteTreeA(hKey, lpSubKey) +def RegDeleteTreeW(hKey, lpSubKey = None): + _RegDeleteTreeW = windll.advapi32.RegDeleteTreeW + _RegDeleteTreeW.argtypes = [HKEY, LPWSTR] + _RegDeleteTreeW.restype = LONG + _RegDeleteTreeW.errcheck = RaiseIfNotErrorSuccess + _RegDeleteTreeW(hKey, lpSubKey) +RegDeleteTree = GuessStringType(RegDeleteTreeA, RegDeleteTreeW) + +# LONG WINAPI RegFlushKey( +# __in HKEY hKey +# ); +def RegFlushKey(hKey): + _RegFlushKey = windll.advapi32.RegFlushKey + _RegFlushKey.argtypes = [HKEY] + _RegFlushKey.restype = LONG + _RegFlushKey.errcheck = RaiseIfNotErrorSuccess + _RegFlushKey(hKey) + +# LONG WINAPI RegLoadMUIString( +# _In_ HKEY hKey, +# _In_opt_ LPCTSTR pszValue, +# _Out_opt_ LPTSTR pszOutBuf, +# _In_ DWORD cbOutBuf, +# _Out_opt_ LPDWORD pcbData, +# _In_ DWORD Flags, +# _In_opt_ LPCTSTR pszDirectory +# ); + +# TO DO + +#------------------------------------------------------------------------------ + +# BOOL WINAPI CloseServiceHandle( +# _In_ SC_HANDLE hSCObject +# ); +def CloseServiceHandle(hSCObject): + _CloseServiceHandle = windll.advapi32.CloseServiceHandle + _CloseServiceHandle.argtypes = [SC_HANDLE] + _CloseServiceHandle.restype = bool + _CloseServiceHandle.errcheck = RaiseIfZero + + if isinstance(hSCObject, Handle): + # Prevents the handle from being closed without notifying the Handle object. + hSCObject.close() + else: + _CloseServiceHandle(hSCObject) + +# SC_HANDLE WINAPI OpenSCManager( +# _In_opt_ LPCTSTR lpMachineName, +# _In_opt_ LPCTSTR lpDatabaseName, +# _In_ DWORD dwDesiredAccess +# ); +def OpenSCManagerA(lpMachineName = None, lpDatabaseName = None, dwDesiredAccess = SC_MANAGER_ALL_ACCESS): + _OpenSCManagerA = windll.advapi32.OpenSCManagerA + _OpenSCManagerA.argtypes = [LPSTR, LPSTR, DWORD] + _OpenSCManagerA.restype = SC_HANDLE + _OpenSCManagerA.errcheck = RaiseIfZero + + hSCObject = _OpenSCManagerA(lpMachineName, lpDatabaseName, dwDesiredAccess) + return ServiceControlManagerHandle(hSCObject) + +def OpenSCManagerW(lpMachineName = None, lpDatabaseName = None, dwDesiredAccess = SC_MANAGER_ALL_ACCESS): + _OpenSCManagerW = windll.advapi32.OpenSCManagerW + _OpenSCManagerW.argtypes = [LPWSTR, LPWSTR, DWORD] + _OpenSCManagerW.restype = SC_HANDLE + _OpenSCManagerW.errcheck = RaiseIfZero + + hSCObject = _OpenSCManagerA(lpMachineName, lpDatabaseName, dwDesiredAccess) + return ServiceControlManagerHandle(hSCObject) + +OpenSCManager = GuessStringType(OpenSCManagerA, OpenSCManagerW) + +# SC_HANDLE WINAPI OpenService( +# _In_ SC_HANDLE hSCManager, +# _In_ LPCTSTR lpServiceName, +# _In_ DWORD dwDesiredAccess +# ); +def OpenServiceA(hSCManager, lpServiceName, dwDesiredAccess = SERVICE_ALL_ACCESS): + _OpenServiceA = windll.advapi32.OpenServiceA + _OpenServiceA.argtypes = [SC_HANDLE, LPSTR, DWORD] + _OpenServiceA.restype = SC_HANDLE + _OpenServiceA.errcheck = RaiseIfZero + return ServiceHandle( _OpenServiceA(hSCManager, lpServiceName, dwDesiredAccess) ) + +def OpenServiceW(hSCManager, lpServiceName, dwDesiredAccess = SERVICE_ALL_ACCESS): + _OpenServiceW = windll.advapi32.OpenServiceW + _OpenServiceW.argtypes = [SC_HANDLE, LPWSTR, DWORD] + _OpenServiceW.restype = SC_HANDLE + _OpenServiceW.errcheck = RaiseIfZero + return ServiceHandle( _OpenServiceW(hSCManager, lpServiceName, dwDesiredAccess) ) + +OpenService = GuessStringType(OpenServiceA, OpenServiceW) + +# SC_HANDLE WINAPI CreateService( +# _In_ SC_HANDLE hSCManager, +# _In_ LPCTSTR lpServiceName, +# _In_opt_ LPCTSTR lpDisplayName, +# _In_ DWORD dwDesiredAccess, +# _In_ DWORD dwServiceType, +# _In_ DWORD dwStartType, +# _In_ DWORD dwErrorControl, +# _In_opt_ LPCTSTR lpBinaryPathName, +# _In_opt_ LPCTSTR lpLoadOrderGroup, +# _Out_opt_ LPDWORD lpdwTagId, +# _In_opt_ LPCTSTR lpDependencies, +# _In_opt_ LPCTSTR lpServiceStartName, +# _In_opt_ LPCTSTR lpPassword +# ); +def CreateServiceA(hSCManager, lpServiceName, + lpDisplayName = None, + dwDesiredAccess = SERVICE_ALL_ACCESS, + dwServiceType = SERVICE_WIN32_OWN_PROCESS, + dwStartType = SERVICE_DEMAND_START, + dwErrorControl = SERVICE_ERROR_NORMAL, + lpBinaryPathName = None, + lpLoadOrderGroup = None, + lpDependencies = None, + lpServiceStartName = None, + lpPassword = None): + + _CreateServiceA = windll.advapi32.CreateServiceA + _CreateServiceA.argtypes = [SC_HANDLE, LPSTR, LPSTR, DWORD, DWORD, DWORD, DWORD, LPSTR, LPSTR, LPDWORD, LPSTR, LPSTR, LPSTR] + _CreateServiceA.restype = SC_HANDLE + _CreateServiceA.errcheck = RaiseIfZero + + dwTagId = DWORD(0) + hService = _CreateServiceA(hSCManager, lpServiceName, dwDesiredAccess, dwServiceType, dwStartType, dwErrorControl, lpBinaryPathName, lpLoadOrderGroup, byref(dwTagId), lpDependencies, lpServiceStartName, lpPassword) + return ServiceHandle(hService), dwTagId.value + +def CreateServiceW(hSCManager, lpServiceName, + lpDisplayName = None, + dwDesiredAccess = SERVICE_ALL_ACCESS, + dwServiceType = SERVICE_WIN32_OWN_PROCESS, + dwStartType = SERVICE_DEMAND_START, + dwErrorControl = SERVICE_ERROR_NORMAL, + lpBinaryPathName = None, + lpLoadOrderGroup = None, + lpDependencies = None, + lpServiceStartName = None, + lpPassword = None): + + _CreateServiceW = windll.advapi32.CreateServiceW + _CreateServiceW.argtypes = [SC_HANDLE, LPWSTR, LPWSTR, DWORD, DWORD, DWORD, DWORD, LPWSTR, LPWSTR, LPDWORD, LPWSTR, LPWSTR, LPWSTR] + _CreateServiceW.restype = SC_HANDLE + _CreateServiceW.errcheck = RaiseIfZero + + dwTagId = DWORD(0) + hService = _CreateServiceW(hSCManager, lpServiceName, dwDesiredAccess, dwServiceType, dwStartType, dwErrorControl, lpBinaryPathName, lpLoadOrderGroup, byref(dwTagId), lpDependencies, lpServiceStartName, lpPassword) + return ServiceHandle(hService), dwTagId.value + +CreateService = GuessStringType(CreateServiceA, CreateServiceW) + +# BOOL WINAPI DeleteService( +# _In_ SC_HANDLE hService +# ); +def DeleteService(hService): + _DeleteService = windll.advapi32.DeleteService + _DeleteService.argtypes = [SC_HANDLE] + _DeleteService.restype = bool + _DeleteService.errcheck = RaiseIfZero + _DeleteService(hService) + +# BOOL WINAPI GetServiceKeyName( +# _In_ SC_HANDLE hSCManager, +# _In_ LPCTSTR lpDisplayName, +# _Out_opt_ LPTSTR lpServiceName, +# _Inout_ LPDWORD lpcchBuffer +# ); +def GetServiceKeyNameA(hSCManager, lpDisplayName): + _GetServiceKeyNameA = windll.advapi32.GetServiceKeyNameA + _GetServiceKeyNameA.argtypes = [SC_HANDLE, LPSTR, LPSTR, LPDWORD] + _GetServiceKeyNameA.restype = bool + + cchBuffer = DWORD(0) + _GetServiceKeyNameA(hSCManager, lpDisplayName, None, byref(cchBuffer)) + if cchBuffer.value == 0: + raise ctypes.WinError() + lpServiceName = ctypes.create_string_buffer(cchBuffer.value + 1) + cchBuffer.value = sizeof(lpServiceName) + success = _GetServiceKeyNameA(hSCManager, lpDisplayName, lpServiceName, byref(cchBuffer)) + if not success: + raise ctypes.WinError() + return lpServiceName.value + +def GetServiceKeyNameW(hSCManager, lpDisplayName): + _GetServiceKeyNameW = windll.advapi32.GetServiceKeyNameW + _GetServiceKeyNameW.argtypes = [SC_HANDLE, LPWSTR, LPWSTR, LPDWORD] + _GetServiceKeyNameW.restype = bool + + cchBuffer = DWORD(0) + _GetServiceKeyNameW(hSCManager, lpDisplayName, None, byref(cchBuffer)) + if cchBuffer.value == 0: + raise ctypes.WinError() + lpServiceName = ctypes.create_unicode_buffer(cchBuffer.value + 2) + cchBuffer.value = sizeof(lpServiceName) + success = _GetServiceKeyNameW(hSCManager, lpDisplayName, lpServiceName, byref(cchBuffer)) + if not success: + raise ctypes.WinError() + return lpServiceName.value + +GetServiceKeyName = GuessStringType(GetServiceKeyNameA, GetServiceKeyNameW) + +# BOOL WINAPI GetServiceDisplayName( +# _In_ SC_HANDLE hSCManager, +# _In_ LPCTSTR lpServiceName, +# _Out_opt_ LPTSTR lpDisplayName, +# _Inout_ LPDWORD lpcchBuffer +# ); +def GetServiceDisplayNameA(hSCManager, lpServiceName): + _GetServiceDisplayNameA = windll.advapi32.GetServiceDisplayNameA + _GetServiceDisplayNameA.argtypes = [SC_HANDLE, LPSTR, LPSTR, LPDWORD] + _GetServiceDisplayNameA.restype = bool + + cchBuffer = DWORD(0) + _GetServiceDisplayNameA(hSCManager, lpServiceName, None, byref(cchBuffer)) + if cchBuffer.value == 0: + raise ctypes.WinError() + lpDisplayName = ctypes.create_string_buffer(cchBuffer.value + 1) + cchBuffer.value = sizeof(lpDisplayName) + success = _GetServiceDisplayNameA(hSCManager, lpServiceName, lpDisplayName, byref(cchBuffer)) + if not success: + raise ctypes.WinError() + return lpDisplayName.value + +def GetServiceDisplayNameW(hSCManager, lpServiceName): + _GetServiceDisplayNameW = windll.advapi32.GetServiceDisplayNameW + _GetServiceDisplayNameW.argtypes = [SC_HANDLE, LPWSTR, LPWSTR, LPDWORD] + _GetServiceDisplayNameW.restype = bool + + cchBuffer = DWORD(0) + _GetServiceDisplayNameW(hSCManager, lpServiceName, None, byref(cchBuffer)) + if cchBuffer.value == 0: + raise ctypes.WinError() + lpDisplayName = ctypes.create_unicode_buffer(cchBuffer.value + 2) + cchBuffer.value = sizeof(lpDisplayName) + success = _GetServiceDisplayNameW(hSCManager, lpServiceName, lpDisplayName, byref(cchBuffer)) + if not success: + raise ctypes.WinError() + return lpDisplayName.value + +GetServiceDisplayName = GuessStringType(GetServiceDisplayNameA, GetServiceDisplayNameW) + +# BOOL WINAPI QueryServiceConfig( +# _In_ SC_HANDLE hService, +# _Out_opt_ LPQUERY_SERVICE_CONFIG lpServiceConfig, +# _In_ DWORD cbBufSize, +# _Out_ LPDWORD pcbBytesNeeded +# ); + +# TO DO + +# BOOL WINAPI QueryServiceConfig2( +# _In_ SC_HANDLE hService, +# _In_ DWORD dwInfoLevel, +# _Out_opt_ LPBYTE lpBuffer, +# _In_ DWORD cbBufSize, +# _Out_ LPDWORD pcbBytesNeeded +# ); + +# TO DO + +# BOOL WINAPI ChangeServiceConfig( +# _In_ SC_HANDLE hService, +# _In_ DWORD dwServiceType, +# _In_ DWORD dwStartType, +# _In_ DWORD dwErrorControl, +# _In_opt_ LPCTSTR lpBinaryPathName, +# _In_opt_ LPCTSTR lpLoadOrderGroup, +# _Out_opt_ LPDWORD lpdwTagId, +# _In_opt_ LPCTSTR lpDependencies, +# _In_opt_ LPCTSTR lpServiceStartName, +# _In_opt_ LPCTSTR lpPassword, +# _In_opt_ LPCTSTR lpDisplayName +# ); + +# TO DO + +# BOOL WINAPI ChangeServiceConfig2( +# _In_ SC_HANDLE hService, +# _In_ DWORD dwInfoLevel, +# _In_opt_ LPVOID lpInfo +# ); + +# TO DO + +# BOOL WINAPI StartService( +# _In_ SC_HANDLE hService, +# _In_ DWORD dwNumServiceArgs, +# _In_opt_ LPCTSTR *lpServiceArgVectors +# ); +def StartServiceA(hService, ServiceArgVectors = None): + _StartServiceA = windll.advapi32.StartServiceA + _StartServiceA.argtypes = [SC_HANDLE, DWORD, LPVOID] + _StartServiceA.restype = bool + _StartServiceA.errcheck = RaiseIfZero + + if ServiceArgVectors: + dwNumServiceArgs = len(ServiceArgVectors) + CServiceArgVectors = (LPSTR * dwNumServiceArgs)(*ServiceArgVectors) + lpServiceArgVectors = ctypes.pointer(CServiceArgVectors) + else: + dwNumServiceArgs = 0 + lpServiceArgVectors = None + _StartServiceA(hService, dwNumServiceArgs, lpServiceArgVectors) + +def StartServiceW(hService, ServiceArgVectors = None): + _StartServiceW = windll.advapi32.StartServiceW + _StartServiceW.argtypes = [SC_HANDLE, DWORD, LPVOID] + _StartServiceW.restype = bool + _StartServiceW.errcheck = RaiseIfZero + + if ServiceArgVectors: + dwNumServiceArgs = len(ServiceArgVectors) + CServiceArgVectors = (LPWSTR * dwNumServiceArgs)(*ServiceArgVectors) + lpServiceArgVectors = ctypes.pointer(CServiceArgVectors) + else: + dwNumServiceArgs = 0 + lpServiceArgVectors = None + _StartServiceW(hService, dwNumServiceArgs, lpServiceArgVectors) + +StartService = GuessStringType(StartServiceA, StartServiceW) + +# BOOL WINAPI ControlService( +# _In_ SC_HANDLE hService, +# _In_ DWORD dwControl, +# _Out_ LPSERVICE_STATUS lpServiceStatus +# ); +def ControlService(hService, dwControl): + _ControlService = windll.advapi32.ControlService + _ControlService.argtypes = [SC_HANDLE, DWORD, LPSERVICE_STATUS] + _ControlService.restype = bool + _ControlService.errcheck = RaiseIfZero + + rawServiceStatus = SERVICE_STATUS() + _ControlService(hService, dwControl, byref(rawServiceStatus)) + return ServiceStatus(rawServiceStatus) + +# BOOL WINAPI ControlServiceEx( +# _In_ SC_HANDLE hService, +# _In_ DWORD dwControl, +# _In_ DWORD dwInfoLevel, +# _Inout_ PVOID pControlParams +# ); + +# TO DO + +# DWORD WINAPI NotifyServiceStatusChange( +# _In_ SC_HANDLE hService, +# _In_ DWORD dwNotifyMask, +# _In_ PSERVICE_NOTIFY pNotifyBuffer +# ); + +# TO DO + +# BOOL WINAPI QueryServiceStatus( +# _In_ SC_HANDLE hService, +# _Out_ LPSERVICE_STATUS lpServiceStatus +# ); +def QueryServiceStatus(hService): + _QueryServiceStatus = windll.advapi32.QueryServiceStatus + _QueryServiceStatus.argtypes = [SC_HANDLE, LPSERVICE_STATUS] + _QueryServiceStatus.restype = bool + _QueryServiceStatus.errcheck = RaiseIfZero + + rawServiceStatus = SERVICE_STATUS() + _QueryServiceStatus(hService, byref(rawServiceStatus)) + return ServiceStatus(rawServiceStatus) + +# BOOL WINAPI QueryServiceStatusEx( +# _In_ SC_HANDLE hService, +# _In_ SC_STATUS_TYPE InfoLevel, +# _Out_opt_ LPBYTE lpBuffer, +# _In_ DWORD cbBufSize, +# _Out_ LPDWORD pcbBytesNeeded +# ); +def QueryServiceStatusEx(hService, InfoLevel = SC_STATUS_PROCESS_INFO): + + if InfoLevel != SC_STATUS_PROCESS_INFO: + raise NotImplementedError() + + _QueryServiceStatusEx = windll.advapi32.QueryServiceStatusEx + _QueryServiceStatusEx.argtypes = [SC_HANDLE, SC_STATUS_TYPE, LPVOID, DWORD, LPDWORD] + _QueryServiceStatusEx.restype = bool + _QueryServiceStatusEx.errcheck = RaiseIfZero + + lpBuffer = SERVICE_STATUS_PROCESS() + cbBytesNeeded = DWORD(sizeof(lpBuffer)) + _QueryServiceStatusEx(hService, InfoLevel, byref(lpBuffer), sizeof(lpBuffer), byref(cbBytesNeeded)) + return ServiceStatusProcess(lpBuffer) + +# BOOL WINAPI EnumServicesStatus( +# _In_ SC_HANDLE hSCManager, +# _In_ DWORD dwServiceType, +# _In_ DWORD dwServiceState, +# _Out_opt_ LPENUM_SERVICE_STATUS lpServices, +# _In_ DWORD cbBufSize, +# _Out_ LPDWORD pcbBytesNeeded, +# _Out_ LPDWORD lpServicesReturned, +# _Inout_opt_ LPDWORD lpResumeHandle +# ); +def EnumServicesStatusA(hSCManager, dwServiceType = SERVICE_DRIVER | SERVICE_WIN32, dwServiceState = SERVICE_STATE_ALL): + _EnumServicesStatusA = windll.advapi32.EnumServicesStatusA + _EnumServicesStatusA.argtypes = [SC_HANDLE, DWORD, DWORD, LPVOID, DWORD, LPDWORD, LPDWORD, LPDWORD] + _EnumServicesStatusA.restype = bool + + cbBytesNeeded = DWORD(0) + ServicesReturned = DWORD(0) + ResumeHandle = DWORD(0) + + _EnumServicesStatusA(hSCManager, dwServiceType, dwServiceState, None, 0, byref(cbBytesNeeded), byref(ServicesReturned), byref(ResumeHandle)) + + Services = [] + success = False + while GetLastError() == ERROR_MORE_DATA: + if cbBytesNeeded.value < sizeof(ENUM_SERVICE_STATUSA): + break + ServicesBuffer = ctypes.create_string_buffer("", cbBytesNeeded.value) + success = _EnumServicesStatusA(hSCManager, dwServiceType, dwServiceState, byref(ServicesBuffer), sizeof(ServicesBuffer), byref(cbBytesNeeded), byref(ServicesReturned), byref(ResumeHandle)) + if sizeof(ServicesBuffer) < (sizeof(ENUM_SERVICE_STATUSA) * ServicesReturned.value): + raise ctypes.WinError() + lpServicesArray = ctypes.cast(ctypes.cast(ctypes.pointer(ServicesBuffer), ctypes.c_void_p), LPENUM_SERVICE_STATUSA) + for index in compat.xrange(0, ServicesReturned.value): + Services.append( ServiceStatusEntry(lpServicesArray[index]) ) + if success: break + if not success: + raise ctypes.WinError() + + return Services + +def EnumServicesStatusW(hSCManager, dwServiceType = SERVICE_DRIVER | SERVICE_WIN32, dwServiceState = SERVICE_STATE_ALL): + _EnumServicesStatusW = windll.advapi32.EnumServicesStatusW + _EnumServicesStatusW.argtypes = [SC_HANDLE, DWORD, DWORD, LPVOID, DWORD, LPDWORD, LPDWORD, LPDWORD] + _EnumServicesStatusW.restype = bool + + cbBytesNeeded = DWORD(0) + ServicesReturned = DWORD(0) + ResumeHandle = DWORD(0) + + _EnumServicesStatusW(hSCManager, dwServiceType, dwServiceState, None, 0, byref(cbBytesNeeded), byref(ServicesReturned), byref(ResumeHandle)) + + Services = [] + success = False + while GetLastError() == ERROR_MORE_DATA: + if cbBytesNeeded.value < sizeof(ENUM_SERVICE_STATUSW): + break + ServicesBuffer = ctypes.create_string_buffer("", cbBytesNeeded.value) + success = _EnumServicesStatusW(hSCManager, dwServiceType, dwServiceState, byref(ServicesBuffer), sizeof(ServicesBuffer), byref(cbBytesNeeded), byref(ServicesReturned), byref(ResumeHandle)) + if sizeof(ServicesBuffer) < (sizeof(ENUM_SERVICE_STATUSW) * ServicesReturned.value): + raise ctypes.WinError() + lpServicesArray = ctypes.cast(ctypes.cast(ctypes.pointer(ServicesBuffer), ctypes.c_void_p), LPENUM_SERVICE_STATUSW) + for index in compat.xrange(0, ServicesReturned.value): + Services.append( ServiceStatusEntry(lpServicesArray[index]) ) + if success: break + if not success: + raise ctypes.WinError() + + return Services + +EnumServicesStatus = DefaultStringType(EnumServicesStatusA, EnumServicesStatusW) + +# BOOL WINAPI EnumServicesStatusEx( +# _In_ SC_HANDLE hSCManager, +# _In_ SC_ENUM_TYPE InfoLevel, +# _In_ DWORD dwServiceType, +# _In_ DWORD dwServiceState, +# _Out_opt_ LPBYTE lpServices, +# _In_ DWORD cbBufSize, +# _Out_ LPDWORD pcbBytesNeeded, +# _Out_ LPDWORD lpServicesReturned, +# _Inout_opt_ LPDWORD lpResumeHandle, +# _In_opt_ LPCTSTR pszGroupName +# ); +def EnumServicesStatusExA(hSCManager, InfoLevel = SC_ENUM_PROCESS_INFO, dwServiceType = SERVICE_DRIVER | SERVICE_WIN32, dwServiceState = SERVICE_STATE_ALL, pszGroupName = None): + + if InfoLevel != SC_ENUM_PROCESS_INFO: + raise NotImplementedError() + + _EnumServicesStatusExA = windll.advapi32.EnumServicesStatusExA + _EnumServicesStatusExA.argtypes = [SC_HANDLE, SC_ENUM_TYPE, DWORD, DWORD, LPVOID, DWORD, LPDWORD, LPDWORD, LPDWORD, LPSTR] + _EnumServicesStatusExA.restype = bool + + cbBytesNeeded = DWORD(0) + ServicesReturned = DWORD(0) + ResumeHandle = DWORD(0) + + _EnumServicesStatusExA(hSCManager, InfoLevel, dwServiceType, dwServiceState, None, 0, byref(cbBytesNeeded), byref(ServicesReturned), byref(ResumeHandle), pszGroupName) + + Services = [] + success = False + while GetLastError() == ERROR_MORE_DATA: + if cbBytesNeeded.value < sizeof(ENUM_SERVICE_STATUS_PROCESSA): + break + ServicesBuffer = ctypes.create_string_buffer("", cbBytesNeeded.value) + success = _EnumServicesStatusExA(hSCManager, InfoLevel, dwServiceType, dwServiceState, byref(ServicesBuffer), sizeof(ServicesBuffer), byref(cbBytesNeeded), byref(ServicesReturned), byref(ResumeHandle), pszGroupName) + if sizeof(ServicesBuffer) < (sizeof(ENUM_SERVICE_STATUS_PROCESSA) * ServicesReturned.value): + raise ctypes.WinError() + lpServicesArray = ctypes.cast(ctypes.cast(ctypes.pointer(ServicesBuffer), ctypes.c_void_p), LPENUM_SERVICE_STATUS_PROCESSA) + for index in compat.xrange(0, ServicesReturned.value): + Services.append( ServiceStatusProcessEntry(lpServicesArray[index]) ) + if success: break + if not success: + raise ctypes.WinError() + + return Services + +def EnumServicesStatusExW(hSCManager, InfoLevel = SC_ENUM_PROCESS_INFO, dwServiceType = SERVICE_DRIVER | SERVICE_WIN32, dwServiceState = SERVICE_STATE_ALL, pszGroupName = None): + _EnumServicesStatusExW = windll.advapi32.EnumServicesStatusExW + _EnumServicesStatusExW.argtypes = [SC_HANDLE, SC_ENUM_TYPE, DWORD, DWORD, LPVOID, DWORD, LPDWORD, LPDWORD, LPDWORD, LPWSTR] + _EnumServicesStatusExW.restype = bool + + if InfoLevel != SC_ENUM_PROCESS_INFO: + raise NotImplementedError() + + cbBytesNeeded = DWORD(0) + ServicesReturned = DWORD(0) + ResumeHandle = DWORD(0) + + _EnumServicesStatusExW(hSCManager, InfoLevel, dwServiceType, dwServiceState, None, 0, byref(cbBytesNeeded), byref(ServicesReturned), byref(ResumeHandle), pszGroupName) + + Services = [] + success = False + while GetLastError() == ERROR_MORE_DATA: + if cbBytesNeeded.value < sizeof(ENUM_SERVICE_STATUS_PROCESSW): + break + ServicesBuffer = ctypes.create_string_buffer("", cbBytesNeeded.value) + success = _EnumServicesStatusExW(hSCManager, InfoLevel, dwServiceType, dwServiceState, byref(ServicesBuffer), sizeof(ServicesBuffer), byref(cbBytesNeeded), byref(ServicesReturned), byref(ResumeHandle), pszGroupName) + if sizeof(ServicesBuffer) < (sizeof(ENUM_SERVICE_STATUS_PROCESSW) * ServicesReturned.value): + raise ctypes.WinError() + lpServicesArray = ctypes.cast(ctypes.cast(ctypes.pointer(ServicesBuffer), ctypes.c_void_p), LPENUM_SERVICE_STATUS_PROCESSW) + for index in compat.xrange(0, ServicesReturned.value): + Services.append( ServiceStatusProcessEntry(lpServicesArray[index]) ) + if success: break + if not success: + raise ctypes.WinError() + + return Services + +EnumServicesStatusEx = DefaultStringType(EnumServicesStatusExA, EnumServicesStatusExW) + +# BOOL WINAPI EnumDependentServices( +# _In_ SC_HANDLE hService, +# _In_ DWORD dwServiceState, +# _Out_opt_ LPENUM_SERVICE_STATUS lpServices, +# _In_ DWORD cbBufSize, +# _Out_ LPDWORD pcbBytesNeeded, +# _Out_ LPDWORD lpServicesReturned +# ); + +# TO DO + +#============================================================================== +# This calculates the list of exported symbols. +_all = set(vars().keys()).difference(_all) +__all__ = [_x for _x in _all if not _x.startswith('_')] +__all__.sort() +#============================================================================== diff --git a/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/win32/context_amd64.py b/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/win32/context_amd64.py new file mode 100644 index 00000000..eb786b65 --- /dev/null +++ b/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/win32/context_amd64.py @@ -0,0 +1,762 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# Copyright (c) 2009-2014, Mario Vilas +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright +# notice,this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# * Neither the name of the copyright holder nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. + +""" +CONTEXT structure for amd64. +""" + +__revision__ = "$Id$" + +from winappdbg.win32.defines import * +from winappdbg.win32.version import ARCH_AMD64 +from winappdbg.win32 import context_i386 + +#============================================================================== +# This is used later on to calculate the list of exported symbols. +_all = None +_all = set(vars().keys()) +#============================================================================== + +#--- CONTEXT structures and constants ----------------------------------------- + +# The following values specify the type of access in the first parameter +# of the exception record when the exception code specifies an access +# violation. +EXCEPTION_READ_FAULT = 0 # exception caused by a read +EXCEPTION_WRITE_FAULT = 1 # exception caused by a write +EXCEPTION_EXECUTE_FAULT = 8 # exception caused by an instruction fetch + +CONTEXT_AMD64 = 0x00100000 + +CONTEXT_CONTROL = (CONTEXT_AMD64 | long(0x1)) +CONTEXT_INTEGER = (CONTEXT_AMD64 | long(0x2)) +CONTEXT_SEGMENTS = (CONTEXT_AMD64 | long(0x4)) +CONTEXT_FLOATING_POINT = (CONTEXT_AMD64 | long(0x8)) +CONTEXT_DEBUG_REGISTERS = (CONTEXT_AMD64 | long(0x10)) + +CONTEXT_MMX_REGISTERS = CONTEXT_FLOATING_POINT + +CONTEXT_FULL = (CONTEXT_CONTROL | CONTEXT_INTEGER | CONTEXT_FLOATING_POINT) + +CONTEXT_ALL = (CONTEXT_CONTROL | CONTEXT_INTEGER | CONTEXT_SEGMENTS | \ + CONTEXT_FLOATING_POINT | CONTEXT_DEBUG_REGISTERS) + +CONTEXT_EXCEPTION_ACTIVE = 0x8000000 +CONTEXT_SERVICE_ACTIVE = 0x10000000 +CONTEXT_EXCEPTION_REQUEST = 0x40000000 +CONTEXT_EXCEPTION_REPORTING = 0x80000000 + +INITIAL_MXCSR = 0x1f80 # initial MXCSR value +INITIAL_FPCSR = 0x027f # initial FPCSR value + +# typedef struct _XMM_SAVE_AREA32 { +# WORD ControlWord; +# WORD StatusWord; +# BYTE TagWord; +# BYTE Reserved1; +# WORD ErrorOpcode; +# DWORD ErrorOffset; +# WORD ErrorSelector; +# WORD Reserved2; +# DWORD DataOffset; +# WORD DataSelector; +# WORD Reserved3; +# DWORD MxCsr; +# DWORD MxCsr_Mask; +# M128A FloatRegisters[8]; +# M128A XmmRegisters[16]; +# BYTE Reserved4[96]; +# } XMM_SAVE_AREA32, *PXMM_SAVE_AREA32; +class XMM_SAVE_AREA32(Structure): + _pack_ = 1 + _fields_ = [ + ('ControlWord', WORD), + ('StatusWord', WORD), + ('TagWord', BYTE), + ('Reserved1', BYTE), + ('ErrorOpcode', WORD), + ('ErrorOffset', DWORD), + ('ErrorSelector', WORD), + ('Reserved2', WORD), + ('DataOffset', DWORD), + ('DataSelector', WORD), + ('Reserved3', WORD), + ('MxCsr', DWORD), + ('MxCsr_Mask', DWORD), + ('FloatRegisters', M128A * 8), + ('XmmRegisters', M128A * 16), + ('Reserved4', BYTE * 96), + ] + + def from_dict(self): + raise NotImplementedError() + + def to_dict(self): + d = dict() + for name, type in self._fields_: + if name in ('FloatRegisters', 'XmmRegisters'): + d[name] = tuple([ (x.LowPart + (x.HighPart << 64)) for x in getattr(self, name) ]) + elif name == 'Reserved4': + d[name] = tuple([ chr(x) for x in getattr(self, name) ]) + else: + d[name] = getattr(self, name) + return d + +LEGACY_SAVE_AREA_LENGTH = sizeof(XMM_SAVE_AREA32) + +PXMM_SAVE_AREA32 = ctypes.POINTER(XMM_SAVE_AREA32) +LPXMM_SAVE_AREA32 = PXMM_SAVE_AREA32 + +# // +# // Context Frame +# // +# // This frame has a several purposes: 1) it is used as an argument to +# // NtContinue, 2) is is used to constuct a call frame for APC delivery, +# // and 3) it is used in the user level thread creation routines. +# // +# // +# // The flags field within this record controls the contents of a CONTEXT +# // record. +# // +# // If the context record is used as an input parameter, then for each +# // portion of the context record controlled by a flag whose value is +# // set, it is assumed that that portion of the context record contains +# // valid context. If the context record is being used to modify a threads +# // context, then only that portion of the threads context is modified. +# // +# // If the context record is used as an output parameter to capture the +# // context of a thread, then only those portions of the thread's context +# // corresponding to set flags will be returned. +# // +# // CONTEXT_CONTROL specifies SegSs, Rsp, SegCs, Rip, and EFlags. +# // +# // CONTEXT_INTEGER specifies Rax, Rcx, Rdx, Rbx, Rbp, Rsi, Rdi, and R8-R15. +# // +# // CONTEXT_SEGMENTS specifies SegDs, SegEs, SegFs, and SegGs. +# // +# // CONTEXT_DEBUG_REGISTERS specifies Dr0-Dr3 and Dr6-Dr7. +# // +# // CONTEXT_MMX_REGISTERS specifies the floating point and extended registers +# // Mm0/St0-Mm7/St7 and Xmm0-Xmm15). +# // +# +# typedef struct DECLSPEC_ALIGN(16) _CONTEXT { +# +# // +# // Register parameter home addresses. +# // +# // N.B. These fields are for convience - they could be used to extend the +# // context record in the future. +# // +# +# DWORD64 P1Home; +# DWORD64 P2Home; +# DWORD64 P3Home; +# DWORD64 P4Home; +# DWORD64 P5Home; +# DWORD64 P6Home; +# +# // +# // Control flags. +# // +# +# DWORD ContextFlags; +# DWORD MxCsr; +# +# // +# // Segment Registers and processor flags. +# // +# +# WORD SegCs; +# WORD SegDs; +# WORD SegEs; +# WORD SegFs; +# WORD SegGs; +# WORD SegSs; +# DWORD EFlags; +# +# // +# // Debug registers +# // +# +# DWORD64 Dr0; +# DWORD64 Dr1; +# DWORD64 Dr2; +# DWORD64 Dr3; +# DWORD64 Dr6; +# DWORD64 Dr7; +# +# // +# // Integer registers. +# // +# +# DWORD64 Rax; +# DWORD64 Rcx; +# DWORD64 Rdx; +# DWORD64 Rbx; +# DWORD64 Rsp; +# DWORD64 Rbp; +# DWORD64 Rsi; +# DWORD64 Rdi; +# DWORD64 R8; +# DWORD64 R9; +# DWORD64 R10; +# DWORD64 R11; +# DWORD64 R12; +# DWORD64 R13; +# DWORD64 R14; +# DWORD64 R15; +# +# // +# // Program counter. +# // +# +# DWORD64 Rip; +# +# // +# // Floating point state. +# // +# +# union { +# XMM_SAVE_AREA32 FltSave; +# struct { +# M128A Header[2]; +# M128A Legacy[8]; +# M128A Xmm0; +# M128A Xmm1; +# M128A Xmm2; +# M128A Xmm3; +# M128A Xmm4; +# M128A Xmm5; +# M128A Xmm6; +# M128A Xmm7; +# M128A Xmm8; +# M128A Xmm9; +# M128A Xmm10; +# M128A Xmm11; +# M128A Xmm12; +# M128A Xmm13; +# M128A Xmm14; +# M128A Xmm15; +# }; +# }; +# +# // +# // Vector registers. +# // +# +# M128A VectorRegister[26]; +# DWORD64 VectorControl; +# +# // +# // Special debug control registers. +# // +# +# DWORD64 DebugControl; +# DWORD64 LastBranchToRip; +# DWORD64 LastBranchFromRip; +# DWORD64 LastExceptionToRip; +# DWORD64 LastExceptionFromRip; +# } CONTEXT, *PCONTEXT; + +class _CONTEXT_FLTSAVE_STRUCT(Structure): + _fields_ = [ + ('Header', M128A * 2), + ('Legacy', M128A * 8), + ('Xmm0', M128A), + ('Xmm1', M128A), + ('Xmm2', M128A), + ('Xmm3', M128A), + ('Xmm4', M128A), + ('Xmm5', M128A), + ('Xmm6', M128A), + ('Xmm7', M128A), + ('Xmm8', M128A), + ('Xmm9', M128A), + ('Xmm10', M128A), + ('Xmm11', M128A), + ('Xmm12', M128A), + ('Xmm13', M128A), + ('Xmm14', M128A), + ('Xmm15', M128A), + ] + + def from_dict(self): + raise NotImplementedError() + + def to_dict(self): + d = dict() + for name, type in self._fields_: + if name in ('Header', 'Legacy'): + d[name] = tuple([ (x.Low + (x.High << 64)) for x in getattr(self, name) ]) + else: + x = getattr(self, name) + d[name] = x.Low + (x.High << 64) + return d + +class _CONTEXT_FLTSAVE_UNION(Union): + _fields_ = [ + ('flt', XMM_SAVE_AREA32), + ('xmm', _CONTEXT_FLTSAVE_STRUCT), + ] + + def from_dict(self): + raise NotImplementedError() + + def to_dict(self): + d = dict() + d['flt'] = self.flt.to_dict() + d['xmm'] = self.xmm.to_dict() + return d + +class CONTEXT(Structure): + arch = ARCH_AMD64 + + _pack_ = 16 + _fields_ = [ + + # Register parameter home addresses. + ('P1Home', DWORD64), + ('P2Home', DWORD64), + ('P3Home', DWORD64), + ('P4Home', DWORD64), + ('P5Home', DWORD64), + ('P6Home', DWORD64), + + # Control flags. + ('ContextFlags', DWORD), + ('MxCsr', DWORD), + + # Segment Registers and processor flags. + ('SegCs', WORD), + ('SegDs', WORD), + ('SegEs', WORD), + ('SegFs', WORD), + ('SegGs', WORD), + ('SegSs', WORD), + ('EFlags', DWORD), + + # Debug registers. + ('Dr0', DWORD64), + ('Dr1', DWORD64), + ('Dr2', DWORD64), + ('Dr3', DWORD64), + ('Dr6', DWORD64), + ('Dr7', DWORD64), + + # Integer registers. + ('Rax', DWORD64), + ('Rcx', DWORD64), + ('Rdx', DWORD64), + ('Rbx', DWORD64), + ('Rsp', DWORD64), + ('Rbp', DWORD64), + ('Rsi', DWORD64), + ('Rdi', DWORD64), + ('R8', DWORD64), + ('R9', DWORD64), + ('R10', DWORD64), + ('R11', DWORD64), + ('R12', DWORD64), + ('R13', DWORD64), + ('R14', DWORD64), + ('R15', DWORD64), + + # Program counter. + ('Rip', DWORD64), + + # Floating point state. + ('FltSave', _CONTEXT_FLTSAVE_UNION), + + # Vector registers. + ('VectorRegister', M128A * 26), + ('VectorControl', DWORD64), + + # Special debug control registers. + ('DebugControl', DWORD64), + ('LastBranchToRip', DWORD64), + ('LastBranchFromRip', DWORD64), + ('LastExceptionToRip', DWORD64), + ('LastExceptionFromRip', DWORD64), + ] + + _others = ('P1Home', 'P2Home', 'P3Home', 'P4Home', 'P5Home', 'P6Home', \ + 'MxCsr', 'VectorRegister', 'VectorControl') + _control = ('SegSs', 'Rsp', 'SegCs', 'Rip', 'EFlags') + _integer = ('Rax', 'Rcx', 'Rdx', 'Rbx', 'Rsp', 'Rbp', 'Rsi', 'Rdi', \ + 'R8', 'R9', 'R10', 'R11', 'R12', 'R13', 'R14', 'R15') + _segments = ('SegDs', 'SegEs', 'SegFs', 'SegGs') + _debug = ('Dr0', 'Dr1', 'Dr2', 'Dr3', 'Dr6', 'Dr7', \ + 'DebugControl', 'LastBranchToRip', 'LastBranchFromRip', \ + 'LastExceptionToRip', 'LastExceptionFromRip') + _mmx = ('Xmm0', 'Xmm1', 'Xmm2', 'Xmm3', 'Xmm4', 'Xmm5', 'Xmm6', 'Xmm7', \ + 'Xmm8', 'Xmm9', 'Xmm10', 'Xmm11', 'Xmm12', 'Xmm13', 'Xmm14', 'Xmm15') + + # XXX TODO + # Convert VectorRegister and Xmm0-Xmm15 to pure Python types! + + @classmethod + def from_dict(cls, ctx): + 'Instance a new structure from a Python native type.' + ctx = Context(ctx) + s = cls() + ContextFlags = ctx['ContextFlags'] + s.ContextFlags = ContextFlags + for key in cls._others: + if key != 'VectorRegister': + setattr(s, key, ctx[key]) + else: + w = ctx[key] + v = (M128A * len(w))() + i = 0 + for x in w: + y = M128A() + y.High = x >> 64 + y.Low = x - (x >> 64) + v[i] = y + i += 1 + setattr(s, key, v) + if (ContextFlags & CONTEXT_CONTROL) == CONTEXT_CONTROL: + for key in cls._control: + setattr(s, key, ctx[key]) + if (ContextFlags & CONTEXT_INTEGER) == CONTEXT_INTEGER: + for key in cls._integer: + setattr(s, key, ctx[key]) + if (ContextFlags & CONTEXT_SEGMENTS) == CONTEXT_SEGMENTS: + for key in cls._segments: + setattr(s, key, ctx[key]) + if (ContextFlags & CONTEXT_DEBUG_REGISTERS) == CONTEXT_DEBUG_REGISTERS: + for key in cls._debug: + setattr(s, key, ctx[key]) + if (ContextFlags & CONTEXT_MMX_REGISTERS) == CONTEXT_MMX_REGISTERS: + xmm = s.FltSave.xmm + for key in cls._mmx: + y = M128A() + y.High = x >> 64 + y.Low = x - (x >> 64) + setattr(xmm, key, y) + return s + + def to_dict(self): + 'Convert a structure into a Python dictionary.' + ctx = Context() + ContextFlags = self.ContextFlags + ctx['ContextFlags'] = ContextFlags + for key in self._others: + if key != 'VectorRegister': + ctx[key] = getattr(self, key) + else: + ctx[key] = tuple([ (x.Low + (x.High << 64)) for x in getattr(self, key) ]) + if (ContextFlags & CONTEXT_CONTROL) == CONTEXT_CONTROL: + for key in self._control: + ctx[key] = getattr(self, key) + if (ContextFlags & CONTEXT_INTEGER) == CONTEXT_INTEGER: + for key in self._integer: + ctx[key] = getattr(self, key) + if (ContextFlags & CONTEXT_SEGMENTS) == CONTEXT_SEGMENTS: + for key in self._segments: + ctx[key] = getattr(self, key) + if (ContextFlags & CONTEXT_DEBUG_REGISTERS) == CONTEXT_DEBUG_REGISTERS: + for key in self._debug: + ctx[key] = getattr(self, key) + if (ContextFlags & CONTEXT_MMX_REGISTERS) == CONTEXT_MMX_REGISTERS: + xmm = self.FltSave.xmm.to_dict() + for key in self._mmx: + ctx[key] = xmm.get(key) + return ctx + +PCONTEXT = ctypes.POINTER(CONTEXT) +LPCONTEXT = PCONTEXT + +class Context(dict): + """ + Register context dictionary for the amd64 architecture. + """ + + arch = CONTEXT.arch + + def __get_pc(self): + return self['Rip'] + def __set_pc(self, value): + self['Rip'] = value + pc = property(__get_pc, __set_pc) + + def __get_sp(self): + return self['Rsp'] + def __set_sp(self, value): + self['Rsp'] = value + sp = property(__get_sp, __set_sp) + + def __get_fp(self): + return self['Rbp'] + def __set_fp(self, value): + self['Rbp'] = value + fp = property(__get_fp, __set_fp) + +#--- LDT_ENTRY structure ------------------------------------------------------ + +# typedef struct _LDT_ENTRY { +# WORD LimitLow; +# WORD BaseLow; +# union { +# struct { +# BYTE BaseMid; +# BYTE Flags1; +# BYTE Flags2; +# BYTE BaseHi; +# } Bytes; +# struct { +# DWORD BaseMid :8; +# DWORD Type :5; +# DWORD Dpl :2; +# DWORD Pres :1; +# DWORD LimitHi :4; +# DWORD Sys :1; +# DWORD Reserved_0 :1; +# DWORD Default_Big :1; +# DWORD Granularity :1; +# DWORD BaseHi :8; +# } Bits; +# } HighWord; +# } LDT_ENTRY, +# *PLDT_ENTRY; + +class _LDT_ENTRY_BYTES_(Structure): + _pack_ = 1 + _fields_ = [ + ('BaseMid', BYTE), + ('Flags1', BYTE), + ('Flags2', BYTE), + ('BaseHi', BYTE), + ] + +class _LDT_ENTRY_BITS_(Structure): + _pack_ = 1 + _fields_ = [ + ('BaseMid', DWORD, 8), + ('Type', DWORD, 5), + ('Dpl', DWORD, 2), + ('Pres', DWORD, 1), + ('LimitHi', DWORD, 4), + ('Sys', DWORD, 1), + ('Reserved_0', DWORD, 1), + ('Default_Big', DWORD, 1), + ('Granularity', DWORD, 1), + ('BaseHi', DWORD, 8), + ] + +class _LDT_ENTRY_HIGHWORD_(Union): + _pack_ = 1 + _fields_ = [ + ('Bytes', _LDT_ENTRY_BYTES_), + ('Bits', _LDT_ENTRY_BITS_), + ] + +class LDT_ENTRY(Structure): + _pack_ = 1 + _fields_ = [ + ('LimitLow', WORD), + ('BaseLow', WORD), + ('HighWord', _LDT_ENTRY_HIGHWORD_), + ] + +PLDT_ENTRY = POINTER(LDT_ENTRY) +LPLDT_ENTRY = PLDT_ENTRY + +#--- WOW64 CONTEXT structure and constants ------------------------------------ + +# Value of SegCs in a Wow64 thread when running in 32 bits mode +WOW64_CS32 = 0x23 + +WOW64_CONTEXT_i386 = long(0x00010000) +WOW64_CONTEXT_i486 = long(0x00010000) + +WOW64_CONTEXT_CONTROL = (WOW64_CONTEXT_i386 | long(0x00000001)) +WOW64_CONTEXT_INTEGER = (WOW64_CONTEXT_i386 | long(0x00000002)) +WOW64_CONTEXT_SEGMENTS = (WOW64_CONTEXT_i386 | long(0x00000004)) +WOW64_CONTEXT_FLOATING_POINT = (WOW64_CONTEXT_i386 | long(0x00000008)) +WOW64_CONTEXT_DEBUG_REGISTERS = (WOW64_CONTEXT_i386 | long(0x00000010)) +WOW64_CONTEXT_EXTENDED_REGISTERS = (WOW64_CONTEXT_i386 | long(0x00000020)) + +WOW64_CONTEXT_FULL = (WOW64_CONTEXT_CONTROL | WOW64_CONTEXT_INTEGER | WOW64_CONTEXT_SEGMENTS) +WOW64_CONTEXT_ALL = (WOW64_CONTEXT_CONTROL | WOW64_CONTEXT_INTEGER | WOW64_CONTEXT_SEGMENTS | WOW64_CONTEXT_FLOATING_POINT | WOW64_CONTEXT_DEBUG_REGISTERS | WOW64_CONTEXT_EXTENDED_REGISTERS) + +WOW64_SIZE_OF_80387_REGISTERS = 80 +WOW64_MAXIMUM_SUPPORTED_EXTENSION = 512 + +class WOW64_FLOATING_SAVE_AREA (context_i386.FLOATING_SAVE_AREA): + pass + +class WOW64_CONTEXT (context_i386.CONTEXT): + pass + +class WOW64_LDT_ENTRY (context_i386.LDT_ENTRY): + pass + +PWOW64_FLOATING_SAVE_AREA = POINTER(WOW64_FLOATING_SAVE_AREA) +PWOW64_CONTEXT = POINTER(WOW64_CONTEXT) +PWOW64_LDT_ENTRY = POINTER(WOW64_LDT_ENTRY) + +############################################################################### + +# BOOL WINAPI GetThreadSelectorEntry( +# __in HANDLE hThread, +# __in DWORD dwSelector, +# __out LPLDT_ENTRY lpSelectorEntry +# ); +def GetThreadSelectorEntry(hThread, dwSelector): + _GetThreadSelectorEntry = windll.kernel32.GetThreadSelectorEntry + _GetThreadSelectorEntry.argtypes = [HANDLE, DWORD, LPLDT_ENTRY] + _GetThreadSelectorEntry.restype = bool + _GetThreadSelectorEntry.errcheck = RaiseIfZero + + ldt = LDT_ENTRY() + _GetThreadSelectorEntry(hThread, dwSelector, byref(ldt)) + return ldt + +# BOOL WINAPI GetThreadContext( +# __in HANDLE hThread, +# __inout LPCONTEXT lpContext +# ); +def GetThreadContext(hThread, ContextFlags = None, raw = False): + _GetThreadContext = windll.kernel32.GetThreadContext + _GetThreadContext.argtypes = [HANDLE, LPCONTEXT] + _GetThreadContext.restype = bool + _GetThreadContext.errcheck = RaiseIfZero + + if ContextFlags is None: + ContextFlags = CONTEXT_ALL | CONTEXT_AMD64 + Context = CONTEXT() + Context.ContextFlags = ContextFlags + _GetThreadContext(hThread, byref(Context)) + if raw: + return Context + return Context.to_dict() + +# BOOL WINAPI SetThreadContext( +# __in HANDLE hThread, +# __in const CONTEXT* lpContext +# ); +def SetThreadContext(hThread, lpContext): + _SetThreadContext = windll.kernel32.SetThreadContext + _SetThreadContext.argtypes = [HANDLE, LPCONTEXT] + _SetThreadContext.restype = bool + _SetThreadContext.errcheck = RaiseIfZero + + if isinstance(lpContext, dict): + lpContext = CONTEXT.from_dict(lpContext) + _SetThreadContext(hThread, byref(lpContext)) + +# BOOL Wow64GetThreadSelectorEntry( +# __in HANDLE hThread, +# __in DWORD dwSelector, +# __out PWOW64_LDT_ENTRY lpSelectorEntry +# ); +def Wow64GetThreadSelectorEntry(hThread, dwSelector): + _Wow64GetThreadSelectorEntry = windll.kernel32.Wow64GetThreadSelectorEntry + _Wow64GetThreadSelectorEntry.argtypes = [HANDLE, DWORD, PWOW64_LDT_ENTRY] + _Wow64GetThreadSelectorEntry.restype = bool + _Wow64GetThreadSelectorEntry.errcheck = RaiseIfZero + + lpSelectorEntry = WOW64_LDT_ENTRY() + _Wow64GetThreadSelectorEntry(hThread, dwSelector, byref(lpSelectorEntry)) + return lpSelectorEntry + +# DWORD WINAPI Wow64ResumeThread( +# __in HANDLE hThread +# ); +def Wow64ResumeThread(hThread): + _Wow64ResumeThread = windll.kernel32.Wow64ResumeThread + _Wow64ResumeThread.argtypes = [HANDLE] + _Wow64ResumeThread.restype = DWORD + + previousCount = _Wow64ResumeThread(hThread) + if previousCount == DWORD(-1).value: + raise ctypes.WinError() + return previousCount + +# DWORD WINAPI Wow64SuspendThread( +# __in HANDLE hThread +# ); +def Wow64SuspendThread(hThread): + _Wow64SuspendThread = windll.kernel32.Wow64SuspendThread + _Wow64SuspendThread.argtypes = [HANDLE] + _Wow64SuspendThread.restype = DWORD + + previousCount = _Wow64SuspendThread(hThread) + if previousCount == DWORD(-1).value: + raise ctypes.WinError() + return previousCount + +# XXX TODO Use this http://www.nynaeve.net/Code/GetThreadWow64Context.cpp +# Also see http://www.woodmann.com/forum/archive/index.php/t-11162.html + +# BOOL WINAPI Wow64GetThreadContext( +# __in HANDLE hThread, +# __inout PWOW64_CONTEXT lpContext +# ); +def Wow64GetThreadContext(hThread, ContextFlags = None): + _Wow64GetThreadContext = windll.kernel32.Wow64GetThreadContext + _Wow64GetThreadContext.argtypes = [HANDLE, PWOW64_CONTEXT] + _Wow64GetThreadContext.restype = bool + _Wow64GetThreadContext.errcheck = RaiseIfZero + + # XXX doesn't exist in XP 64 bits + + Context = WOW64_CONTEXT() + if ContextFlags is None: + Context.ContextFlags = WOW64_CONTEXT_ALL | WOW64_CONTEXT_i386 + else: + Context.ContextFlags = ContextFlags + _Wow64GetThreadContext(hThread, byref(Context)) + return Context.to_dict() + +# BOOL WINAPI Wow64SetThreadContext( +# __in HANDLE hThread, +# __in const WOW64_CONTEXT *lpContext +# ); +def Wow64SetThreadContext(hThread, lpContext): + _Wow64SetThreadContext = windll.kernel32.Wow64SetThreadContext + _Wow64SetThreadContext.argtypes = [HANDLE, PWOW64_CONTEXT] + _Wow64SetThreadContext.restype = bool + _Wow64SetThreadContext.errcheck = RaiseIfZero + + # XXX doesn't exist in XP 64 bits + + if isinstance(lpContext, dict): + lpContext = WOW64_CONTEXT.from_dict(lpContext) + _Wow64SetThreadContext(hThread, byref(lpContext)) + +#============================================================================== +# This calculates the list of exported symbols. +_all = set(vars().keys()).difference(_all) +__all__ = [_x for _x in _all if not _x.startswith('_')] +__all__.sort() +#============================================================================== diff --git a/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/win32/context_i386.py b/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/win32/context_i386.py new file mode 100644 index 00000000..91ff2d93 --- /dev/null +++ b/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/win32/context_i386.py @@ -0,0 +1,449 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# Copyright (c) 2009-2014, Mario Vilas +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright +# notice,this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# * Neither the name of the copyright holder nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. + +""" +CONTEXT structure for i386. +""" + +__revision__ = "$Id$" + +from winappdbg.win32.defines import * +from winappdbg.win32.version import ARCH_I386 + +#============================================================================== +# This is used later on to calculate the list of exported symbols. +_all = None +_all = set(vars().keys()) +#============================================================================== + +#--- CONTEXT structures and constants ----------------------------------------- + +# The following values specify the type of access in the first parameter +# of the exception record when the exception code specifies an access +# violation. +EXCEPTION_READ_FAULT = 0 # exception caused by a read +EXCEPTION_WRITE_FAULT = 1 # exception caused by a write +EXCEPTION_EXECUTE_FAULT = 8 # exception caused by an instruction fetch + +CONTEXT_i386 = 0x00010000 # this assumes that i386 and +CONTEXT_i486 = 0x00010000 # i486 have identical context records + +CONTEXT_CONTROL = (CONTEXT_i386 | long(0x00000001)) # SS:SP, CS:IP, FLAGS, BP +CONTEXT_INTEGER = (CONTEXT_i386 | long(0x00000002)) # AX, BX, CX, DX, SI, DI +CONTEXT_SEGMENTS = (CONTEXT_i386 | long(0x00000004)) # DS, ES, FS, GS +CONTEXT_FLOATING_POINT = (CONTEXT_i386 | long(0x00000008)) # 387 state +CONTEXT_DEBUG_REGISTERS = (CONTEXT_i386 | long(0x00000010)) # DB 0-3,6,7 +CONTEXT_EXTENDED_REGISTERS = (CONTEXT_i386 | long(0x00000020)) # cpu specific extensions + +CONTEXT_FULL = (CONTEXT_CONTROL | CONTEXT_INTEGER | CONTEXT_SEGMENTS) + +CONTEXT_ALL = (CONTEXT_CONTROL | CONTEXT_INTEGER | CONTEXT_SEGMENTS | \ + CONTEXT_FLOATING_POINT | CONTEXT_DEBUG_REGISTERS | \ + CONTEXT_EXTENDED_REGISTERS) + +SIZE_OF_80387_REGISTERS = 80 +MAXIMUM_SUPPORTED_EXTENSION = 512 + +# typedef struct _FLOATING_SAVE_AREA { +# DWORD ControlWord; +# DWORD StatusWord; +# DWORD TagWord; +# DWORD ErrorOffset; +# DWORD ErrorSelector; +# DWORD DataOffset; +# DWORD DataSelector; +# BYTE RegisterArea[SIZE_OF_80387_REGISTERS]; +# DWORD Cr0NpxState; +# } FLOATING_SAVE_AREA; +class FLOATING_SAVE_AREA(Structure): + _pack_ = 1 + _fields_ = [ + ('ControlWord', DWORD), + ('StatusWord', DWORD), + ('TagWord', DWORD), + ('ErrorOffset', DWORD), + ('ErrorSelector', DWORD), + ('DataOffset', DWORD), + ('DataSelector', DWORD), + ('RegisterArea', BYTE * SIZE_OF_80387_REGISTERS), + ('Cr0NpxState', DWORD), + ] + + _integer_members = ('ControlWord', 'StatusWord', 'TagWord', 'ErrorOffset', 'ErrorSelector', 'DataOffset', 'DataSelector', 'Cr0NpxState') + + @classmethod + def from_dict(cls, fsa): + 'Instance a new structure from a Python dictionary.' + fsa = dict(fsa) + s = cls() + for key in cls._integer_members: + setattr(s, key, fsa.get(key)) + ra = fsa.get('RegisterArea', None) + if ra is not None: + for index in compat.xrange(0, SIZE_OF_80387_REGISTERS): + s.RegisterArea[index] = ra[index] + return s + + def to_dict(self): + 'Convert a structure into a Python dictionary.' + fsa = dict() + for key in self._integer_members: + fsa[key] = getattr(self, key) + ra = [ self.RegisterArea[index] for index in compat.xrange(0, SIZE_OF_80387_REGISTERS) ] + ra = tuple(ra) + fsa['RegisterArea'] = ra + return fsa + +PFLOATING_SAVE_AREA = POINTER(FLOATING_SAVE_AREA) +LPFLOATING_SAVE_AREA = PFLOATING_SAVE_AREA + +# typedef struct _CONTEXT { +# DWORD ContextFlags; +# DWORD Dr0; +# DWORD Dr1; +# DWORD Dr2; +# DWORD Dr3; +# DWORD Dr6; +# DWORD Dr7; +# FLOATING_SAVE_AREA FloatSave; +# DWORD SegGs; +# DWORD SegFs; +# DWORD SegEs; +# DWORD SegDs; +# DWORD Edi; +# DWORD Esi; +# DWORD Ebx; +# DWORD Edx; +# DWORD Ecx; +# DWORD Eax; +# DWORD Ebp; +# DWORD Eip; +# DWORD SegCs; +# DWORD EFlags; +# DWORD Esp; +# DWORD SegSs; +# BYTE ExtendedRegisters[MAXIMUM_SUPPORTED_EXTENSION]; +# } CONTEXT; +class CONTEXT(Structure): + arch = ARCH_I386 + + _pack_ = 1 + + # Context Frame + # + # This frame has a several purposes: 1) it is used as an argument to + # NtContinue, 2) is is used to constuct a call frame for APC delivery, + # and 3) it is used in the user level thread creation routines. + # + # The layout of the record conforms to a standard call frame. + + _fields_ = [ + + # The flags values within this flag control the contents of + # a CONTEXT record. + # + # If the context record is used as an input parameter, then + # for each portion of the context record controlled by a flag + # whose value is set, it is assumed that that portion of the + # context record contains valid context. If the context record + # is being used to modify a threads context, then only that + # portion of the threads context will be modified. + # + # If the context record is used as an IN OUT parameter to capture + # the context of a thread, then only those portions of the thread's + # context corresponding to set flags will be returned. + # + # The context record is never used as an OUT only parameter. + + ('ContextFlags', DWORD), + + # This section is specified/returned if CONTEXT_DEBUG_REGISTERS is + # set in ContextFlags. Note that CONTEXT_DEBUG_REGISTERS is NOT + # included in CONTEXT_FULL. + + ('Dr0', DWORD), + ('Dr1', DWORD), + ('Dr2', DWORD), + ('Dr3', DWORD), + ('Dr6', DWORD), + ('Dr7', DWORD), + + # This section is specified/returned if the + # ContextFlags word contains the flag CONTEXT_FLOATING_POINT. + + ('FloatSave', FLOATING_SAVE_AREA), + + # This section is specified/returned if the + # ContextFlags word contains the flag CONTEXT_SEGMENTS. + + ('SegGs', DWORD), + ('SegFs', DWORD), + ('SegEs', DWORD), + ('SegDs', DWORD), + + # This section is specified/returned if the + # ContextFlags word contains the flag CONTEXT_INTEGER. + + ('Edi', DWORD), + ('Esi', DWORD), + ('Ebx', DWORD), + ('Edx', DWORD), + ('Ecx', DWORD), + ('Eax', DWORD), + + # This section is specified/returned if the + # ContextFlags word contains the flag CONTEXT_CONTROL. + + ('Ebp', DWORD), + ('Eip', DWORD), + ('SegCs', DWORD), # MUST BE SANITIZED + ('EFlags', DWORD), # MUST BE SANITIZED + ('Esp', DWORD), + ('SegSs', DWORD), + + # This section is specified/returned if the ContextFlags word + # contains the flag CONTEXT_EXTENDED_REGISTERS. + # The format and contexts are processor specific. + + ('ExtendedRegisters', BYTE * MAXIMUM_SUPPORTED_EXTENSION), + ] + + _ctx_debug = ('Dr0', 'Dr1', 'Dr2', 'Dr3', 'Dr6', 'Dr7') + _ctx_segs = ('SegGs', 'SegFs', 'SegEs', 'SegDs', ) + _ctx_int = ('Edi', 'Esi', 'Ebx', 'Edx', 'Ecx', 'Eax') + _ctx_ctrl = ('Ebp', 'Eip', 'SegCs', 'EFlags', 'Esp', 'SegSs') + + @classmethod + def from_dict(cls, ctx): + 'Instance a new structure from a Python dictionary.' + ctx = Context(ctx) + s = cls() + ContextFlags = ctx['ContextFlags'] + setattr(s, 'ContextFlags', ContextFlags) + if (ContextFlags & CONTEXT_DEBUG_REGISTERS) == CONTEXT_DEBUG_REGISTERS: + for key in s._ctx_debug: + setattr(s, key, ctx[key]) + if (ContextFlags & CONTEXT_FLOATING_POINT) == CONTEXT_FLOATING_POINT: + fsa = ctx['FloatSave'] + s.FloatSave = FLOATING_SAVE_AREA.from_dict(fsa) + if (ContextFlags & CONTEXT_SEGMENTS) == CONTEXT_SEGMENTS: + for key in s._ctx_segs: + setattr(s, key, ctx[key]) + if (ContextFlags & CONTEXT_INTEGER) == CONTEXT_INTEGER: + for key in s._ctx_int: + setattr(s, key, ctx[key]) + if (ContextFlags & CONTEXT_CONTROL) == CONTEXT_CONTROL: + for key in s._ctx_ctrl: + setattr(s, key, ctx[key]) + if (ContextFlags & CONTEXT_EXTENDED_REGISTERS) == CONTEXT_EXTENDED_REGISTERS: + er = ctx['ExtendedRegisters'] + for index in compat.xrange(0, MAXIMUM_SUPPORTED_EXTENSION): + s.ExtendedRegisters[index] = er[index] + return s + + def to_dict(self): + 'Convert a structure into a Python native type.' + ctx = Context() + ContextFlags = self.ContextFlags + ctx['ContextFlags'] = ContextFlags + if (ContextFlags & CONTEXT_DEBUG_REGISTERS) == CONTEXT_DEBUG_REGISTERS: + for key in self._ctx_debug: + ctx[key] = getattr(self, key) + if (ContextFlags & CONTEXT_FLOATING_POINT) == CONTEXT_FLOATING_POINT: + ctx['FloatSave'] = self.FloatSave.to_dict() + if (ContextFlags & CONTEXT_SEGMENTS) == CONTEXT_SEGMENTS: + for key in self._ctx_segs: + ctx[key] = getattr(self, key) + if (ContextFlags & CONTEXT_INTEGER) == CONTEXT_INTEGER: + for key in self._ctx_int: + ctx[key] = getattr(self, key) + if (ContextFlags & CONTEXT_CONTROL) == CONTEXT_CONTROL: + for key in self._ctx_ctrl: + ctx[key] = getattr(self, key) + if (ContextFlags & CONTEXT_EXTENDED_REGISTERS) == CONTEXT_EXTENDED_REGISTERS: + er = [ self.ExtendedRegisters[index] for index in compat.xrange(0, MAXIMUM_SUPPORTED_EXTENSION) ] + er = tuple(er) + ctx['ExtendedRegisters'] = er + return ctx + +PCONTEXT = POINTER(CONTEXT) +LPCONTEXT = PCONTEXT + +class Context(dict): + """ + Register context dictionary for the i386 architecture. + """ + + arch = CONTEXT.arch + + def __get_pc(self): + return self['Eip'] + def __set_pc(self, value): + self['Eip'] = value + pc = property(__get_pc, __set_pc) + + def __get_sp(self): + return self['Esp'] + def __set_sp(self, value): + self['Esp'] = value + sp = property(__get_sp, __set_sp) + + def __get_fp(self): + return self['Ebp'] + def __set_fp(self, value): + self['Ebp'] = value + fp = property(__get_fp, __set_fp) + +#--- LDT_ENTRY structure ------------------------------------------------------ + +# typedef struct _LDT_ENTRY { +# WORD LimitLow; +# WORD BaseLow; +# union { +# struct { +# BYTE BaseMid; +# BYTE Flags1; +# BYTE Flags2; +# BYTE BaseHi; +# } Bytes; +# struct { +# DWORD BaseMid :8; +# DWORD Type :5; +# DWORD Dpl :2; +# DWORD Pres :1; +# DWORD LimitHi :4; +# DWORD Sys :1; +# DWORD Reserved_0 :1; +# DWORD Default_Big :1; +# DWORD Granularity :1; +# DWORD BaseHi :8; +# } Bits; +# } HighWord; +# } LDT_ENTRY, +# *PLDT_ENTRY; + +class _LDT_ENTRY_BYTES_(Structure): + _pack_ = 1 + _fields_ = [ + ('BaseMid', BYTE), + ('Flags1', BYTE), + ('Flags2', BYTE), + ('BaseHi', BYTE), + ] + +class _LDT_ENTRY_BITS_(Structure): + _pack_ = 1 + _fields_ = [ + ('BaseMid', DWORD, 8), + ('Type', DWORD, 5), + ('Dpl', DWORD, 2), + ('Pres', DWORD, 1), + ('LimitHi', DWORD, 4), + ('Sys', DWORD, 1), + ('Reserved_0', DWORD, 1), + ('Default_Big', DWORD, 1), + ('Granularity', DWORD, 1), + ('BaseHi', DWORD, 8), + ] + +class _LDT_ENTRY_HIGHWORD_(Union): + _pack_ = 1 + _fields_ = [ + ('Bytes', _LDT_ENTRY_BYTES_), + ('Bits', _LDT_ENTRY_BITS_), + ] + +class LDT_ENTRY(Structure): + _pack_ = 1 + _fields_ = [ + ('LimitLow', WORD), + ('BaseLow', WORD), + ('HighWord', _LDT_ENTRY_HIGHWORD_), + ] + +PLDT_ENTRY = POINTER(LDT_ENTRY) +LPLDT_ENTRY = PLDT_ENTRY + +############################################################################### + +# BOOL WINAPI GetThreadSelectorEntry( +# __in HANDLE hThread, +# __in DWORD dwSelector, +# __out LPLDT_ENTRY lpSelectorEntry +# ); +def GetThreadSelectorEntry(hThread, dwSelector): + _GetThreadSelectorEntry = windll.kernel32.GetThreadSelectorEntry + _GetThreadSelectorEntry.argtypes = [HANDLE, DWORD, LPLDT_ENTRY] + _GetThreadSelectorEntry.restype = bool + _GetThreadSelectorEntry.errcheck = RaiseIfZero + + ldt = LDT_ENTRY() + _GetThreadSelectorEntry(hThread, dwSelector, byref(ldt)) + return ldt + +# BOOL WINAPI GetThreadContext( +# __in HANDLE hThread, +# __inout LPCONTEXT lpContext +# ); +def GetThreadContext(hThread, ContextFlags = None, raw = False): + _GetThreadContext = windll.kernel32.GetThreadContext + _GetThreadContext.argtypes = [HANDLE, LPCONTEXT] + _GetThreadContext.restype = bool + _GetThreadContext.errcheck = RaiseIfZero + + if ContextFlags is None: + ContextFlags = CONTEXT_ALL | CONTEXT_i386 + Context = CONTEXT() + Context.ContextFlags = ContextFlags + _GetThreadContext(hThread, byref(Context)) + if raw: + return Context + return Context.to_dict() + +# BOOL WINAPI SetThreadContext( +# __in HANDLE hThread, +# __in const CONTEXT* lpContext +# ); +def SetThreadContext(hThread, lpContext): + _SetThreadContext = windll.kernel32.SetThreadContext + _SetThreadContext.argtypes = [HANDLE, LPCONTEXT] + _SetThreadContext.restype = bool + _SetThreadContext.errcheck = RaiseIfZero + + if isinstance(lpContext, dict): + lpContext = CONTEXT.from_dict(lpContext) + _SetThreadContext(hThread, byref(lpContext)) + +#============================================================================== +# This calculates the list of exported symbols. +_all = set(vars().keys()).difference(_all) +__all__ = [_x for _x in _all if not _x.startswith('_')] +__all__.sort() +#============================================================================== diff --git a/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/win32/dbghelp.py b/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/win32/dbghelp.py new file mode 100644 index 00000000..0add047d --- /dev/null +++ b/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/win32/dbghelp.py @@ -0,0 +1,1272 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# Copyright (c) 2009-2014, Mario Vilas +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright +# notice,this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# * Neither the name of the copyright holder nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. + +""" +Wrapper for dbghelp.dll in ctypes. +""" + +__revision__ = "$Id$" + +from winappdbg.win32.defines import * +from winappdbg.win32.version import * +from winappdbg.win32.kernel32 import * + +# DbgHelp versions and features list: +# http://msdn.microsoft.com/en-us/library/windows/desktop/ms679294(v=vs.85).aspx + +#------------------------------------------------------------------------------ +# Tries to load the newest version of dbghelp.dll if available. + +def _load_latest_dbghelp_dll(): + + from os import getenv + from os.path import join + + if arch == ARCH_AMD64: + if wow64: + pathname = join( + getenv("ProgramFiles(x86)", + getenv("ProgramFiles")), + "Debugging Tools for Windows (x86)", + "dbghelp.dll") + else: + pathname = join( + getenv("ProgramFiles"), + "Debugging Tools for Windows (x64)", + "dbghelp.dll") + elif arch == ARCH_I386: + pathname = join( + getenv("ProgramFiles"), + "Debugging Tools for Windows (x86)", + "dbghelp.dll") + else: + pathname = None + + if pathname: + try: + _dbghelp = ctypes.windll.LoadLibrary(pathname) + ctypes.windll.dbghelp = _dbghelp + except Exception: + pass + +_load_latest_dbghelp_dll() + +# Recover the old binding of the "os" symbol. +# XXX FIXME not sure if I really need to do this! +##from version import os + +#------------------------------------------------------------------------------ + +#============================================================================== +# This is used later on to calculate the list of exported symbols. +_all = None +_all = set(vars().keys()) +#============================================================================== + +# SymGetHomeDirectory "type" values +hdBase = 0 +hdSym = 1 +hdSrc = 2 + +UNDNAME_32_BIT_DECODE = 0x0800 +UNDNAME_COMPLETE = 0x0000 +UNDNAME_NAME_ONLY = 0x1000 +UNDNAME_NO_ACCESS_SPECIFIERS = 0x0080 +UNDNAME_NO_ALLOCATION_LANGUAGE = 0x0010 +UNDNAME_NO_ALLOCATION_MODEL = 0x0008 +UNDNAME_NO_ARGUMENTS = 0x2000 +UNDNAME_NO_CV_THISTYPE = 0x0040 +UNDNAME_NO_FUNCTION_RETURNS = 0x0004 +UNDNAME_NO_LEADING_UNDERSCORES = 0x0001 +UNDNAME_NO_MEMBER_TYPE = 0x0200 +UNDNAME_NO_MS_KEYWORDS = 0x0002 +UNDNAME_NO_MS_THISTYPE = 0x0020 +UNDNAME_NO_RETURN_UDT_MODEL = 0x0400 +UNDNAME_NO_SPECIAL_SYMS = 0x4000 +UNDNAME_NO_THISTYPE = 0x0060 +UNDNAME_NO_THROW_SIGNATURES = 0x0100 + +#--- IMAGEHLP_MODULE structure and related ------------------------------------ + +SYMOPT_ALLOW_ABSOLUTE_SYMBOLS = 0x00000800 +SYMOPT_ALLOW_ZERO_ADDRESS = 0x01000000 +SYMOPT_AUTO_PUBLICS = 0x00010000 +SYMOPT_CASE_INSENSITIVE = 0x00000001 +SYMOPT_DEBUG = 0x80000000 +SYMOPT_DEFERRED_LOADS = 0x00000004 +SYMOPT_DISABLE_SYMSRV_AUTODETECT = 0x02000000 +SYMOPT_EXACT_SYMBOLS = 0x00000400 +SYMOPT_FAIL_CRITICAL_ERRORS = 0x00000200 +SYMOPT_FAVOR_COMPRESSED = 0x00800000 +SYMOPT_FLAT_DIRECTORY = 0x00400000 +SYMOPT_IGNORE_CVREC = 0x00000080 +SYMOPT_IGNORE_IMAGEDIR = 0x00200000 +SYMOPT_IGNORE_NT_SYMPATH = 0x00001000 +SYMOPT_INCLUDE_32BIT_MODULES = 0x00002000 +SYMOPT_LOAD_ANYTHING = 0x00000040 +SYMOPT_LOAD_LINES = 0x00000010 +SYMOPT_NO_CPP = 0x00000008 +SYMOPT_NO_IMAGE_SEARCH = 0x00020000 +SYMOPT_NO_PROMPTS = 0x00080000 +SYMOPT_NO_PUBLICS = 0x00008000 +SYMOPT_NO_UNQUALIFIED_LOADS = 0x00000100 +SYMOPT_OVERWRITE = 0x00100000 +SYMOPT_PUBLICS_ONLY = 0x00004000 +SYMOPT_SECURE = 0x00040000 +SYMOPT_UNDNAME = 0x00000002 + +##SSRVOPT_DWORD +##SSRVOPT_DWORDPTR +##SSRVOPT_GUIDPTR +## +##SSRVOPT_CALLBACK +##SSRVOPT_DOWNSTREAM_STORE +##SSRVOPT_FLAT_DEFAULT_STORE +##SSRVOPT_FAVOR_COMPRESSED +##SSRVOPT_NOCOPY +##SSRVOPT_OVERWRITE +##SSRVOPT_PARAMTYPE +##SSRVOPT_PARENTWIN +##SSRVOPT_PROXY +##SSRVOPT_RESET +##SSRVOPT_SECURE +##SSRVOPT_SETCONTEXT +##SSRVOPT_TRACE +##SSRVOPT_UNATTENDED + +# typedef enum +# { +# SymNone = 0, +# SymCoff, +# SymCv, +# SymPdb, +# SymExport, +# SymDeferred, +# SymSym, +# SymDia, +# SymVirtual, +# NumSymTypes +# } SYM_TYPE; +SymNone = 0 +SymCoff = 1 +SymCv = 2 +SymPdb = 3 +SymExport = 4 +SymDeferred = 5 +SymSym = 6 +SymDia = 7 +SymVirtual = 8 +NumSymTypes = 9 + +# typedef struct _IMAGEHLP_MODULE64 { +# DWORD SizeOfStruct; +# DWORD64 BaseOfImage; +# DWORD ImageSize; +# DWORD TimeDateStamp; +# DWORD CheckSum; +# DWORD NumSyms; +# SYM_TYPE SymType; +# TCHAR ModuleName[32]; +# TCHAR ImageName[256]; +# TCHAR LoadedImageName[256]; +# TCHAR LoadedPdbName[256]; +# DWORD CVSig; +# TCHAR CVData[MAX_PATH*3]; +# DWORD PdbSig; +# GUID PdbSig70; +# DWORD PdbAge; +# BOOL PdbUnmatched; +# BOOL DbgUnmatched; +# BOOL LineNumbers; +# BOOL GlobalSymbols; +# BOOL TypeInfo; +# BOOL SourceIndexed; +# BOOL Publics; +# } IMAGEHLP_MODULE64, *PIMAGEHLP_MODULE64; + +class IMAGEHLP_MODULE (Structure): + _fields_ = [ + ("SizeOfStruct", DWORD), + ("BaseOfImage", DWORD), + ("ImageSize", DWORD), + ("TimeDateStamp", DWORD), + ("CheckSum", DWORD), + ("NumSyms", DWORD), + ("SymType", DWORD), # SYM_TYPE + ("ModuleName", CHAR * 32), + ("ImageName", CHAR * 256), + ("LoadedImageName", CHAR * 256), + ] +PIMAGEHLP_MODULE = POINTER(IMAGEHLP_MODULE) + +class IMAGEHLP_MODULE64 (Structure): + _fields_ = [ + ("SizeOfStruct", DWORD), + ("BaseOfImage", DWORD64), + ("ImageSize", DWORD), + ("TimeDateStamp", DWORD), + ("CheckSum", DWORD), + ("NumSyms", DWORD), + ("SymType", DWORD), # SYM_TYPE + ("ModuleName", CHAR * 32), + ("ImageName", CHAR * 256), + ("LoadedImageName", CHAR * 256), + ("LoadedPdbName", CHAR * 256), + ("CVSig", DWORD), + ("CVData", CHAR * (MAX_PATH * 3)), + ("PdbSig", DWORD), + ("PdbSig70", GUID), + ("PdbAge", DWORD), + ("PdbUnmatched", BOOL), + ("DbgUnmatched", BOOL), + ("LineNumbers", BOOL), + ("GlobalSymbols", BOOL), + ("TypeInfo", BOOL), + ("SourceIndexed", BOOL), + ("Publics", BOOL), + ] +PIMAGEHLP_MODULE64 = POINTER(IMAGEHLP_MODULE64) + +class IMAGEHLP_MODULEW (Structure): + _fields_ = [ + ("SizeOfStruct", DWORD), + ("BaseOfImage", DWORD), + ("ImageSize", DWORD), + ("TimeDateStamp", DWORD), + ("CheckSum", DWORD), + ("NumSyms", DWORD), + ("SymType", DWORD), # SYM_TYPE + ("ModuleName", WCHAR * 32), + ("ImageName", WCHAR * 256), + ("LoadedImageName", WCHAR * 256), + ] +PIMAGEHLP_MODULEW = POINTER(IMAGEHLP_MODULEW) + +class IMAGEHLP_MODULEW64 (Structure): + _fields_ = [ + ("SizeOfStruct", DWORD), + ("BaseOfImage", DWORD64), + ("ImageSize", DWORD), + ("TimeDateStamp", DWORD), + ("CheckSum", DWORD), + ("NumSyms", DWORD), + ("SymType", DWORD), # SYM_TYPE + ("ModuleName", WCHAR * 32), + ("ImageName", WCHAR * 256), + ("LoadedImageName", WCHAR * 256), + ("LoadedPdbName", WCHAR * 256), + ("CVSig", DWORD), + ("CVData", WCHAR * (MAX_PATH * 3)), + ("PdbSig", DWORD), + ("PdbSig70", GUID), + ("PdbAge", DWORD), + ("PdbUnmatched", BOOL), + ("DbgUnmatched", BOOL), + ("LineNumbers", BOOL), + ("GlobalSymbols", BOOL), + ("TypeInfo", BOOL), + ("SourceIndexed", BOOL), + ("Publics", BOOL), + ] +PIMAGEHLP_MODULEW64 = POINTER(IMAGEHLP_MODULEW64) + +#--- dbghelp.dll -------------------------------------------------------------- + +# XXX the ANSI versions of these functions don't end in "A" as expected! + +# BOOL WINAPI MakeSureDirectoryPathExists( +# _In_ PCSTR DirPath +# ); +def MakeSureDirectoryPathExistsA(DirPath): + _MakeSureDirectoryPathExists = windll.dbghelp.MakeSureDirectoryPathExists + _MakeSureDirectoryPathExists.argtypes = [LPSTR] + _MakeSureDirectoryPathExists.restype = bool + _MakeSureDirectoryPathExists.errcheck = RaiseIfZero + return _MakeSureDirectoryPathExists(DirPath) + +MakeSureDirectoryPathExistsW = MakeWideVersion(MakeSureDirectoryPathExistsA) +MakeSureDirectoryPathExists = GuessStringType(MakeSureDirectoryPathExistsA, MakeSureDirectoryPathExistsW) + +# BOOL WINAPI SymInitialize( +# __in HANDLE hProcess, +# __in_opt PCTSTR UserSearchPath, +# __in BOOL fInvadeProcess +# ); +def SymInitializeA(hProcess, UserSearchPath = None, fInvadeProcess = False): + _SymInitialize = windll.dbghelp.SymInitialize + _SymInitialize.argtypes = [HANDLE, LPSTR, BOOL] + _SymInitialize.restype = bool + _SymInitialize.errcheck = RaiseIfZero + if not UserSearchPath: + UserSearchPath = None + _SymInitialize(hProcess, UserSearchPath, fInvadeProcess) + +SymInitializeW = MakeWideVersion(SymInitializeA) +SymInitialize = GuessStringType(SymInitializeA, SymInitializeW) + +# BOOL WINAPI SymCleanup( +# __in HANDLE hProcess +# ); +def SymCleanup(hProcess): + _SymCleanup = windll.dbghelp.SymCleanup + _SymCleanup.argtypes = [HANDLE] + _SymCleanup.restype = bool + _SymCleanup.errcheck = RaiseIfZero + _SymCleanup(hProcess) + +# BOOL WINAPI SymRefreshModuleList( +# __in HANDLE hProcess +# ); +def SymRefreshModuleList(hProcess): + _SymRefreshModuleList = windll.dbghelp.SymRefreshModuleList + _SymRefreshModuleList.argtypes = [HANDLE] + _SymRefreshModuleList.restype = bool + _SymRefreshModuleList.errcheck = RaiseIfZero + _SymRefreshModuleList(hProcess) + +# BOOL WINAPI SymSetParentWindow( +# __in HWND hwnd +# ); +def SymSetParentWindow(hwnd): + _SymSetParentWindow = windll.dbghelp.SymSetParentWindow + _SymSetParentWindow.argtypes = [HWND] + _SymSetParentWindow.restype = bool + _SymSetParentWindow.errcheck = RaiseIfZero + _SymSetParentWindow(hwnd) + +# DWORD WINAPI SymSetOptions( +# __in DWORD SymOptions +# ); +def SymSetOptions(SymOptions): + _SymSetOptions = windll.dbghelp.SymSetOptions + _SymSetOptions.argtypes = [DWORD] + _SymSetOptions.restype = DWORD + _SymSetOptions.errcheck = RaiseIfZero + _SymSetOptions(SymOptions) + +# DWORD WINAPI SymGetOptions(void); +def SymGetOptions(): + _SymGetOptions = windll.dbghelp.SymGetOptions + _SymGetOptions.argtypes = [] + _SymGetOptions.restype = DWORD + return _SymGetOptions() + +# DWORD WINAPI SymLoadModule( +# __in HANDLE hProcess, +# __in_opt HANDLE hFile, +# __in_opt PCSTR ImageName, +# __in_opt PCSTR ModuleName, +# __in DWORD BaseOfDll, +# __in DWORD SizeOfDll +# ); +def SymLoadModuleA(hProcess, hFile = None, ImageName = None, ModuleName = None, BaseOfDll = None, SizeOfDll = None): + _SymLoadModule = windll.dbghelp.SymLoadModule + _SymLoadModule.argtypes = [HANDLE, HANDLE, LPSTR, LPSTR, DWORD, DWORD] + _SymLoadModule.restype = DWORD + + if not ImageName: + ImageName = None + if not ModuleName: + ModuleName = None + if not BaseOfDll: + BaseOfDll = 0 + if not SizeOfDll: + SizeOfDll = 0 + SetLastError(ERROR_SUCCESS) + lpBaseAddress = _SymLoadModule(hProcess, hFile, ImageName, ModuleName, BaseOfDll, SizeOfDll) + if lpBaseAddress == NULL: + dwErrorCode = GetLastError() + if dwErrorCode != ERROR_SUCCESS: + raise ctypes.WinError(dwErrorCode) + return lpBaseAddress + +SymLoadModuleW = MakeWideVersion(SymLoadModuleA) +SymLoadModule = GuessStringType(SymLoadModuleA, SymLoadModuleW) + +# DWORD64 WINAPI SymLoadModule64( +# __in HANDLE hProcess, +# __in_opt HANDLE hFile, +# __in_opt PCSTR ImageName, +# __in_opt PCSTR ModuleName, +# __in DWORD64 BaseOfDll, +# __in DWORD SizeOfDll +# ); +def SymLoadModule64A(hProcess, hFile = None, ImageName = None, ModuleName = None, BaseOfDll = None, SizeOfDll = None): + _SymLoadModule64 = windll.dbghelp.SymLoadModule64 + _SymLoadModule64.argtypes = [HANDLE, HANDLE, LPSTR, LPSTR, DWORD64, DWORD] + _SymLoadModule64.restype = DWORD64 + + if not ImageName: + ImageName = None + if not ModuleName: + ModuleName = None + if not BaseOfDll: + BaseOfDll = 0 + if not SizeOfDll: + SizeOfDll = 0 + SetLastError(ERROR_SUCCESS) + lpBaseAddress = _SymLoadModule64(hProcess, hFile, ImageName, ModuleName, BaseOfDll, SizeOfDll) + if lpBaseAddress == NULL: + dwErrorCode = GetLastError() + if dwErrorCode != ERROR_SUCCESS: + raise ctypes.WinError(dwErrorCode) + return lpBaseAddress + +SymLoadModule64W = MakeWideVersion(SymLoadModule64A) +SymLoadModule64 = GuessStringType(SymLoadModule64A, SymLoadModule64W) + +# BOOL WINAPI SymUnloadModule( +# __in HANDLE hProcess, +# __in DWORD BaseOfDll +# ); +def SymUnloadModule(hProcess, BaseOfDll): + _SymUnloadModule = windll.dbghelp.SymUnloadModule + _SymUnloadModule.argtypes = [HANDLE, DWORD] + _SymUnloadModule.restype = bool + _SymUnloadModule.errcheck = RaiseIfZero + _SymUnloadModule(hProcess, BaseOfDll) + +# BOOL WINAPI SymUnloadModule64( +# __in HANDLE hProcess, +# __in DWORD64 BaseOfDll +# ); +def SymUnloadModule64(hProcess, BaseOfDll): + _SymUnloadModule64 = windll.dbghelp.SymUnloadModule64 + _SymUnloadModule64.argtypes = [HANDLE, DWORD64] + _SymUnloadModule64.restype = bool + _SymUnloadModule64.errcheck = RaiseIfZero + _SymUnloadModule64(hProcess, BaseOfDll) + +# BOOL WINAPI SymGetModuleInfo( +# __in HANDLE hProcess, +# __in DWORD dwAddr, +# __out PIMAGEHLP_MODULE ModuleInfo +# ); +def SymGetModuleInfoA(hProcess, dwAddr): + _SymGetModuleInfo = windll.dbghelp.SymGetModuleInfo + _SymGetModuleInfo.argtypes = [HANDLE, DWORD, PIMAGEHLP_MODULE] + _SymGetModuleInfo.restype = bool + _SymGetModuleInfo.errcheck = RaiseIfZero + + ModuleInfo = IMAGEHLP_MODULE() + ModuleInfo.SizeOfStruct = sizeof(ModuleInfo) + _SymGetModuleInfo(hProcess, dwAddr, byref(ModuleInfo)) + return ModuleInfo + +def SymGetModuleInfoW(hProcess, dwAddr): + _SymGetModuleInfoW = windll.dbghelp.SymGetModuleInfoW + _SymGetModuleInfoW.argtypes = [HANDLE, DWORD, PIMAGEHLP_MODULEW] + _SymGetModuleInfoW.restype = bool + _SymGetModuleInfoW.errcheck = RaiseIfZero + + ModuleInfo = IMAGEHLP_MODULEW() + ModuleInfo.SizeOfStruct = sizeof(ModuleInfo) + _SymGetModuleInfoW(hProcess, dwAddr, byref(ModuleInfo)) + return ModuleInfo + +SymGetModuleInfo = GuessStringType(SymGetModuleInfoA, SymGetModuleInfoW) + +# BOOL WINAPI SymGetModuleInfo64( +# __in HANDLE hProcess, +# __in DWORD64 dwAddr, +# __out PIMAGEHLP_MODULE64 ModuleInfo +# ); +def SymGetModuleInfo64A(hProcess, dwAddr): + _SymGetModuleInfo64 = windll.dbghelp.SymGetModuleInfo64 + _SymGetModuleInfo64.argtypes = [HANDLE, DWORD64, PIMAGEHLP_MODULE64] + _SymGetModuleInfo64.restype = bool + _SymGetModuleInfo64.errcheck = RaiseIfZero + + ModuleInfo = IMAGEHLP_MODULE64() + ModuleInfo.SizeOfStruct = sizeof(ModuleInfo) + _SymGetModuleInfo64(hProcess, dwAddr, byref(ModuleInfo)) + return ModuleInfo + +def SymGetModuleInfo64W(hProcess, dwAddr): + _SymGetModuleInfo64W = windll.dbghelp.SymGetModuleInfo64W + _SymGetModuleInfo64W.argtypes = [HANDLE, DWORD64, PIMAGEHLP_MODULE64W] + _SymGetModuleInfo64W.restype = bool + _SymGetModuleInfo64W.errcheck = RaiseIfZero + + ModuleInfo = IMAGEHLP_MODULE64W() + ModuleInfo.SizeOfStruct = sizeof(ModuleInfo) + _SymGetModuleInfo64W(hProcess, dwAddr, byref(ModuleInfo)) + return ModuleInfo + +SymGetModuleInfo64 = GuessStringType(SymGetModuleInfo64A, SymGetModuleInfo64W) + +# BOOL CALLBACK SymEnumerateModulesProc( +# __in PCTSTR ModuleName, +# __in DWORD BaseOfDll, +# __in_opt PVOID UserContext +# ); +PSYM_ENUMMODULES_CALLBACK = WINFUNCTYPE(BOOL, LPSTR, DWORD, PVOID) +PSYM_ENUMMODULES_CALLBACKW = WINFUNCTYPE(BOOL, LPWSTR, DWORD, PVOID) + +# BOOL CALLBACK SymEnumerateModulesProc64( +# __in PCTSTR ModuleName, +# __in DWORD64 BaseOfDll, +# __in_opt PVOID UserContext +# ); +PSYM_ENUMMODULES_CALLBACK64 = WINFUNCTYPE(BOOL, LPSTR, DWORD64, PVOID) +PSYM_ENUMMODULES_CALLBACKW64 = WINFUNCTYPE(BOOL, LPWSTR, DWORD64, PVOID) + +# BOOL WINAPI SymEnumerateModules( +# __in HANDLE hProcess, +# __in PSYM_ENUMMODULES_CALLBACK EnumModulesCallback, +# __in_opt PVOID UserContext +# ); +def SymEnumerateModulesA(hProcess, EnumModulesCallback, UserContext = None): + _SymEnumerateModules = windll.dbghelp.SymEnumerateModules + _SymEnumerateModules.argtypes = [HANDLE, PSYM_ENUMMODULES_CALLBACK, PVOID] + _SymEnumerateModules.restype = bool + _SymEnumerateModules.errcheck = RaiseIfZero + + EnumModulesCallback = PSYM_ENUMMODULES_CALLBACK(EnumModulesCallback) + if UserContext: + UserContext = ctypes.pointer(UserContext) + else: + UserContext = LPVOID(NULL) + _SymEnumerateModules(hProcess, EnumModulesCallback, UserContext) + +def SymEnumerateModulesW(hProcess, EnumModulesCallback, UserContext = None): + _SymEnumerateModulesW = windll.dbghelp.SymEnumerateModulesW + _SymEnumerateModulesW.argtypes = [HANDLE, PSYM_ENUMMODULES_CALLBACKW, PVOID] + _SymEnumerateModulesW.restype = bool + _SymEnumerateModulesW.errcheck = RaiseIfZero + + EnumModulesCallback = PSYM_ENUMMODULES_CALLBACKW(EnumModulesCallback) + if UserContext: + UserContext = ctypes.pointer(UserContext) + else: + UserContext = LPVOID(NULL) + _SymEnumerateModulesW(hProcess, EnumModulesCallback, UserContext) + +SymEnumerateModules = GuessStringType(SymEnumerateModulesA, SymEnumerateModulesW) + +# BOOL WINAPI SymEnumerateModules64( +# __in HANDLE hProcess, +# __in PSYM_ENUMMODULES_CALLBACK64 EnumModulesCallback, +# __in_opt PVOID UserContext +# ); +def SymEnumerateModules64A(hProcess, EnumModulesCallback, UserContext = None): + _SymEnumerateModules64 = windll.dbghelp.SymEnumerateModules64 + _SymEnumerateModules64.argtypes = [HANDLE, PSYM_ENUMMODULES_CALLBACK64, PVOID] + _SymEnumerateModules64.restype = bool + _SymEnumerateModules64.errcheck = RaiseIfZero + + EnumModulesCallback = PSYM_ENUMMODULES_CALLBACK64(EnumModulesCallback) + if UserContext: + UserContext = ctypes.pointer(UserContext) + else: + UserContext = LPVOID(NULL) + _SymEnumerateModules64(hProcess, EnumModulesCallback, UserContext) + +def SymEnumerateModules64W(hProcess, EnumModulesCallback, UserContext = None): + _SymEnumerateModules64W = windll.dbghelp.SymEnumerateModules64W + _SymEnumerateModules64W.argtypes = [HANDLE, PSYM_ENUMMODULES_CALLBACK64W, PVOID] + _SymEnumerateModules64W.restype = bool + _SymEnumerateModules64W.errcheck = RaiseIfZero + + EnumModulesCallback = PSYM_ENUMMODULES_CALLBACK64W(EnumModulesCallback) + if UserContext: + UserContext = ctypes.pointer(UserContext) + else: + UserContext = LPVOID(NULL) + _SymEnumerateModules64W(hProcess, EnumModulesCallback, UserContext) + +SymEnumerateModules64 = GuessStringType(SymEnumerateModules64A, SymEnumerateModules64W) + +# BOOL CALLBACK SymEnumerateSymbolsProc( +# __in PCTSTR SymbolName, +# __in DWORD SymbolAddress, +# __in ULONG SymbolSize, +# __in_opt PVOID UserContext +# ); +PSYM_ENUMSYMBOLS_CALLBACK = WINFUNCTYPE(BOOL, LPSTR, DWORD, ULONG, PVOID) +PSYM_ENUMSYMBOLS_CALLBACKW = WINFUNCTYPE(BOOL, LPWSTR, DWORD, ULONG, PVOID) + +# BOOL CALLBACK SymEnumerateSymbolsProc64( +# __in PCTSTR SymbolName, +# __in DWORD64 SymbolAddress, +# __in ULONG SymbolSize, +# __in_opt PVOID UserContext +# ); +PSYM_ENUMSYMBOLS_CALLBACK64 = WINFUNCTYPE(BOOL, LPSTR, DWORD64, ULONG, PVOID) +PSYM_ENUMSYMBOLS_CALLBACKW64 = WINFUNCTYPE(BOOL, LPWSTR, DWORD64, ULONG, PVOID) + +# BOOL WINAPI SymEnumerateSymbols( +# __in HANDLE hProcess, +# __in ULONG BaseOfDll, +# __in PSYM_ENUMSYMBOLS_CALLBACK EnumSymbolsCallback, +# __in_opt PVOID UserContext +# ); +def SymEnumerateSymbolsA(hProcess, BaseOfDll, EnumSymbolsCallback, UserContext = None): + _SymEnumerateSymbols = windll.dbghelp.SymEnumerateSymbols + _SymEnumerateSymbols.argtypes = [HANDLE, ULONG, PSYM_ENUMSYMBOLS_CALLBACK, PVOID] + _SymEnumerateSymbols.restype = bool + _SymEnumerateSymbols.errcheck = RaiseIfZero + + EnumSymbolsCallback = PSYM_ENUMSYMBOLS_CALLBACK(EnumSymbolsCallback) + if UserContext: + UserContext = ctypes.pointer(UserContext) + else: + UserContext = LPVOID(NULL) + _SymEnumerateSymbols(hProcess, BaseOfDll, EnumSymbolsCallback, UserContext) + +def SymEnumerateSymbolsW(hProcess, BaseOfDll, EnumSymbolsCallback, UserContext = None): + _SymEnumerateSymbolsW = windll.dbghelp.SymEnumerateSymbolsW + _SymEnumerateSymbolsW.argtypes = [HANDLE, ULONG, PSYM_ENUMSYMBOLS_CALLBACKW, PVOID] + _SymEnumerateSymbolsW.restype = bool + _SymEnumerateSymbolsW.errcheck = RaiseIfZero + + EnumSymbolsCallback = PSYM_ENUMSYMBOLS_CALLBACKW(EnumSymbolsCallback) + if UserContext: + UserContext = ctypes.pointer(UserContext) + else: + UserContext = LPVOID(NULL) + _SymEnumerateSymbolsW(hProcess, BaseOfDll, EnumSymbolsCallback, UserContext) + +SymEnumerateSymbols = GuessStringType(SymEnumerateSymbolsA, SymEnumerateSymbolsW) + +# BOOL WINAPI SymEnumerateSymbols64( +# __in HANDLE hProcess, +# __in ULONG64 BaseOfDll, +# __in PSYM_ENUMSYMBOLS_CALLBACK64 EnumSymbolsCallback, +# __in_opt PVOID UserContext +# ); +def SymEnumerateSymbols64A(hProcess, BaseOfDll, EnumSymbolsCallback, UserContext = None): + _SymEnumerateSymbols64 = windll.dbghelp.SymEnumerateSymbols64 + _SymEnumerateSymbols64.argtypes = [HANDLE, ULONG64, PSYM_ENUMSYMBOLS_CALLBACK64, PVOID] + _SymEnumerateSymbols64.restype = bool + _SymEnumerateSymbols64.errcheck = RaiseIfZero + + EnumSymbolsCallback = PSYM_ENUMSYMBOLS_CALLBACK64(EnumSymbolsCallback) + if UserContext: + UserContext = ctypes.pointer(UserContext) + else: + UserContext = LPVOID(NULL) + _SymEnumerateSymbols64(hProcess, BaseOfDll, EnumSymbolsCallback, UserContext) + +def SymEnumerateSymbols64W(hProcess, BaseOfDll, EnumSymbolsCallback, UserContext = None): + _SymEnumerateSymbols64W = windll.dbghelp.SymEnumerateSymbols64W + _SymEnumerateSymbols64W.argtypes = [HANDLE, ULONG64, PSYM_ENUMSYMBOLS_CALLBACK64W, PVOID] + _SymEnumerateSymbols64W.restype = bool + _SymEnumerateSymbols64W.errcheck = RaiseIfZero + + EnumSymbolsCallback = PSYM_ENUMSYMBOLS_CALLBACK64W(EnumSymbolsCallback) + if UserContext: + UserContext = ctypes.pointer(UserContext) + else: + UserContext = LPVOID(NULL) + _SymEnumerateSymbols64W(hProcess, BaseOfDll, EnumSymbolsCallback, UserContext) + +SymEnumerateSymbols64 = GuessStringType(SymEnumerateSymbols64A, SymEnumerateSymbols64W) + +# DWORD WINAPI UnDecorateSymbolName( +# __in PCTSTR DecoratedName, +# __out PTSTR UnDecoratedName, +# __in DWORD UndecoratedLength, +# __in DWORD Flags +# ); +def UnDecorateSymbolNameA(DecoratedName, Flags = UNDNAME_COMPLETE): + _UnDecorateSymbolNameA = windll.dbghelp.UnDecorateSymbolName + _UnDecorateSymbolNameA.argtypes = [LPSTR, LPSTR, DWORD, DWORD] + _UnDecorateSymbolNameA.restype = DWORD + _UnDecorateSymbolNameA.errcheck = RaiseIfZero + + UndecoratedLength = _UnDecorateSymbolNameA(DecoratedName, None, 0, Flags) + UnDecoratedName = ctypes.create_string_buffer('', UndecoratedLength + 1) + _UnDecorateSymbolNameA(DecoratedName, UnDecoratedName, UndecoratedLength, Flags) + return UnDecoratedName.value + +def UnDecorateSymbolNameW(DecoratedName, Flags = UNDNAME_COMPLETE): + _UnDecorateSymbolNameW = windll.dbghelp.UnDecorateSymbolNameW + _UnDecorateSymbolNameW.argtypes = [LPWSTR, LPWSTR, DWORD, DWORD] + _UnDecorateSymbolNameW.restype = DWORD + _UnDecorateSymbolNameW.errcheck = RaiseIfZero + + UndecoratedLength = _UnDecorateSymbolNameW(DecoratedName, None, 0, Flags) + UnDecoratedName = ctypes.create_unicode_buffer(u'', UndecoratedLength + 1) + _UnDecorateSymbolNameW(DecoratedName, UnDecoratedName, UndecoratedLength, Flags) + return UnDecoratedName.value + +UnDecorateSymbolName = GuessStringType(UnDecorateSymbolNameA, UnDecorateSymbolNameW) + +# BOOL WINAPI SymGetSearchPath( +# __in HANDLE hProcess, +# __out PTSTR SearchPath, +# __in DWORD SearchPathLength +# ); +def SymGetSearchPathA(hProcess): + _SymGetSearchPath = windll.dbghelp.SymGetSearchPath + _SymGetSearchPath.argtypes = [HANDLE, LPSTR, DWORD] + _SymGetSearchPath.restype = bool + _SymGetSearchPath.errcheck = RaiseIfZero + + SearchPathLength = MAX_PATH + SearchPath = ctypes.create_string_buffer("", SearchPathLength) + _SymGetSearchPath(hProcess, SearchPath, SearchPathLength) + return SearchPath.value + +def SymGetSearchPathW(hProcess): + _SymGetSearchPathW = windll.dbghelp.SymGetSearchPathW + _SymGetSearchPathW.argtypes = [HANDLE, LPWSTR, DWORD] + _SymGetSearchPathW.restype = bool + _SymGetSearchPathW.errcheck = RaiseIfZero + + SearchPathLength = MAX_PATH + SearchPath = ctypes.create_unicode_buffer(u"", SearchPathLength) + _SymGetSearchPathW(hProcess, SearchPath, SearchPathLength) + return SearchPath.value + +SymGetSearchPath = GuessStringType(SymGetSearchPathA, SymGetSearchPathW) + +# BOOL WINAPI SymSetSearchPath( +# __in HANDLE hProcess, +# __in_opt PCTSTR SearchPath +# ); +def SymSetSearchPathA(hProcess, SearchPath = None): + _SymSetSearchPath = windll.dbghelp.SymSetSearchPath + _SymSetSearchPath.argtypes = [HANDLE, LPSTR] + _SymSetSearchPath.restype = bool + _SymSetSearchPath.errcheck = RaiseIfZero + if not SearchPath: + SearchPath = None + _SymSetSearchPath(hProcess, SearchPath) + +def SymSetSearchPathW(hProcess, SearchPath = None): + _SymSetSearchPathW = windll.dbghelp.SymSetSearchPathW + _SymSetSearchPathW.argtypes = [HANDLE, LPWSTR] + _SymSetSearchPathW.restype = bool + _SymSetSearchPathW.errcheck = RaiseIfZero + if not SearchPath: + SearchPath = None + _SymSetSearchPathW(hProcess, SearchPath) + +SymSetSearchPath = GuessStringType(SymSetSearchPathA, SymSetSearchPathW) + +# PTCHAR WINAPI SymGetHomeDirectory( +# __in DWORD type, +# __out PTSTR dir, +# __in size_t size +# ); +def SymGetHomeDirectoryA(type): + _SymGetHomeDirectoryA = windll.dbghelp.SymGetHomeDirectoryA + _SymGetHomeDirectoryA.argtypes = [DWORD, LPSTR, SIZE_T] + _SymGetHomeDirectoryA.restype = LPSTR + _SymGetHomeDirectoryA.errcheck = RaiseIfZero + + size = MAX_PATH + dir = ctypes.create_string_buffer("", size) + _SymGetHomeDirectoryA(type, dir, size) + return dir.value + +def SymGetHomeDirectoryW(type): + _SymGetHomeDirectoryW = windll.dbghelp.SymGetHomeDirectoryW + _SymGetHomeDirectoryW.argtypes = [DWORD, LPWSTR, SIZE_T] + _SymGetHomeDirectoryW.restype = LPWSTR + _SymGetHomeDirectoryW.errcheck = RaiseIfZero + + size = MAX_PATH + dir = ctypes.create_unicode_buffer(u"", size) + _SymGetHomeDirectoryW(type, dir, size) + return dir.value + +SymGetHomeDirectory = GuessStringType(SymGetHomeDirectoryA, SymGetHomeDirectoryW) + +# PTCHAR WINAPI SymSetHomeDirectory( +# __in HANDLE hProcess, +# __in_opt PCTSTR dir +# ); +def SymSetHomeDirectoryA(hProcess, dir = None): + _SymSetHomeDirectoryA = windll.dbghelp.SymSetHomeDirectoryA + _SymSetHomeDirectoryA.argtypes = [HANDLE, LPSTR] + _SymSetHomeDirectoryA.restype = LPSTR + _SymSetHomeDirectoryA.errcheck = RaiseIfZero + if not dir: + dir = None + _SymSetHomeDirectoryA(hProcess, dir) + return dir + +def SymSetHomeDirectoryW(hProcess, dir = None): + _SymSetHomeDirectoryW = windll.dbghelp.SymSetHomeDirectoryW + _SymSetHomeDirectoryW.argtypes = [HANDLE, LPWSTR] + _SymSetHomeDirectoryW.restype = LPWSTR + _SymSetHomeDirectoryW.errcheck = RaiseIfZero + if not dir: + dir = None + _SymSetHomeDirectoryW(hProcess, dir) + return dir + +SymSetHomeDirectory = GuessStringType(SymSetHomeDirectoryA, SymSetHomeDirectoryW) + +#--- DbgHelp 5+ support, patch by Neitsa -------------------------------------- + +# XXX TODO +# + use the GuessStringType decorator for ANSI/Wide versions +# + replace hardcoded struct sizes with sizeof() calls +# + StackWalk64 should raise on error, but something has to be done about it +# not setting the last error code (maybe we should call SetLastError +# ourselves with a default error code?) +# /Mario + +#maximum length of a symbol name +MAX_SYM_NAME = 2000 + +class SYM_INFO(Structure): + _fields_ = [ + ("SizeOfStruct", ULONG), + ("TypeIndex", ULONG), + ("Reserved", ULONG64 * 2), + ("Index", ULONG), + ("Size", ULONG), + ("ModBase", ULONG64), + ("Flags", ULONG), + ("Value", ULONG64), + ("Address", ULONG64), + ("Register", ULONG), + ("Scope", ULONG), + ("Tag", ULONG), + ("NameLen", ULONG), + ("MaxNameLen", ULONG), + ("Name", CHAR * (MAX_SYM_NAME + 1)), + ] +PSYM_INFO = POINTER(SYM_INFO) + +class SYM_INFOW(Structure): + _fields_ = [ + ("SizeOfStruct", ULONG), + ("TypeIndex", ULONG), + ("Reserved", ULONG64 * 2), + ("Index", ULONG), + ("Size", ULONG), + ("ModBase", ULONG64), + ("Flags", ULONG), + ("Value", ULONG64), + ("Address", ULONG64), + ("Register", ULONG), + ("Scope", ULONG), + ("Tag", ULONG), + ("NameLen", ULONG), + ("MaxNameLen", ULONG), + ("Name", WCHAR * (MAX_SYM_NAME + 1)), + ] +PSYM_INFOW = POINTER(SYM_INFOW) + +#=============================================================================== +# BOOL WINAPI SymFromName( +# __in HANDLE hProcess, +# __in PCTSTR Name, +# __inout PSYMBOL_INFO Symbol +# ); +#=============================================================================== +def SymFromName(hProcess, Name): + _SymFromNameA = windll.dbghelp.SymFromName + _SymFromNameA.argtypes = [HANDLE, LPSTR, PSYM_INFO] + _SymFromNameA.restype = bool + _SymFromNameA.errcheck = RaiseIfZero + + SymInfo = SYM_INFO() + SymInfo.SizeOfStruct = 88 # *don't modify*: sizeof(SYMBOL_INFO) in C. + SymInfo.MaxNameLen = MAX_SYM_NAME + + _SymFromNameA(hProcess, Name, byref(SymInfo)) + + return SymInfo + +def SymFromNameW(hProcess, Name): + _SymFromNameW = windll.dbghelp.SymFromNameW + _SymFromNameW.argtypes = [HANDLE, LPWSTR, PSYM_INFOW] + _SymFromNameW.restype = bool + _SymFromNameW.errcheck = RaiseIfZero + + SymInfo = SYM_INFOW() + SymInfo.SizeOfStruct = 88 # *don't modify*: sizeof(SYMBOL_INFOW) in C. + SymInfo.MaxNameLen = MAX_SYM_NAME + + _SymFromNameW(hProcess, Name, byref(SymInfo)) + + return SymInfo + +#=============================================================================== +# BOOL WINAPI SymFromAddr( +# __in HANDLE hProcess, +# __in DWORD64 Address, +# __out_opt PDWORD64 Displacement, +# __inout PSYMBOL_INFO Symbol +# ); +#=============================================================================== +def SymFromAddr(hProcess, Address): + _SymFromAddr = windll.dbghelp.SymFromAddr + _SymFromAddr.argtypes = [HANDLE, DWORD64, PDWORD64, PSYM_INFO] + _SymFromAddr.restype = bool + _SymFromAddr.errcheck = RaiseIfZero + + SymInfo = SYM_INFO() + SymInfo.SizeOfStruct = 88 # *don't modify*: sizeof(SYMBOL_INFO) in C. + SymInfo.MaxNameLen = MAX_SYM_NAME + + Displacement = DWORD64(0) + _SymFromAddr(hProcess, Address, byref(Displacement), byref(SymInfo)) + + return (Displacement.value, SymInfo) + +def SymFromAddrW(hProcess, Address): + _SymFromAddr = windll.dbghelp.SymFromAddrW + _SymFromAddr.argtypes = [HANDLE, DWORD64, PDWORD64, PSYM_INFOW] + _SymFromAddr.restype = bool + _SymFromAddr.errcheck = RaiseIfZero + + SymInfo = SYM_INFOW() + SymInfo.SizeOfStruct = 88 # *don't modify*: sizeof(SYMBOL_INFOW) in C. + SymInfo.MaxNameLen = MAX_SYM_NAME + + Displacement = DWORD64(0) + _SymFromAddr(hProcess, Address, byref(Displacement), byref(SymInfo)) + + return (Displacement.value, SymInfo) + +#=============================================================================== +# typedef struct _IMAGEHLP_SYMBOL64 { +# DWORD SizeOfStruct; +# DWORD64 Address; +# DWORD Size; +# DWORD Flags; +# DWORD MaxNameLength; +# CHAR Name[1]; +# } IMAGEHLP_SYMBOL64, *PIMAGEHLP_SYMBOL64; +#=============================================================================== +class IMAGEHLP_SYMBOL64 (Structure): + _fields_ = [ + ("SizeOfStruct", DWORD), + ("Address", DWORD64), + ("Size", DWORD), + ("Flags", DWORD), + ("MaxNameLength", DWORD), + ("Name", CHAR * (MAX_SYM_NAME + 1)), + ] +PIMAGEHLP_SYMBOL64 = POINTER(IMAGEHLP_SYMBOL64) + +#=============================================================================== +# typedef struct _IMAGEHLP_SYMBOLW64 { +# DWORD SizeOfStruct; +# DWORD64 Address; +# DWORD Size; +# DWORD Flags; +# DWORD MaxNameLength; +# WCHAR Name[1]; +# } IMAGEHLP_SYMBOLW64, *PIMAGEHLP_SYMBOLW64; +#=============================================================================== +class IMAGEHLP_SYMBOLW64 (Structure): + _fields_ = [ + ("SizeOfStruct", DWORD), + ("Address", DWORD64), + ("Size", DWORD), + ("Flags", DWORD), + ("MaxNameLength", DWORD), + ("Name", WCHAR * (MAX_SYM_NAME + 1)), + ] +PIMAGEHLP_SYMBOLW64 = POINTER(IMAGEHLP_SYMBOLW64) + +#=============================================================================== +# BOOL WINAPI SymGetSymFromAddr64( +# __in HANDLE hProcess, +# __in DWORD64 Address, +# __out_opt PDWORD64 Displacement, +# __inout PIMAGEHLP_SYMBOL64 Symbol +# ); +#=============================================================================== +def SymGetSymFromAddr64(hProcess, Address): + _SymGetSymFromAddr64 = windll.dbghelp.SymGetSymFromAddr64 + _SymGetSymFromAddr64.argtypes = [HANDLE, DWORD64, PDWORD64, PIMAGEHLP_SYMBOL64] + _SymGetSymFromAddr64.restype = bool + _SymGetSymFromAddr64.errcheck = RaiseIfZero + + imagehlp_symbol64 = IMAGEHLP_SYMBOL64() + imagehlp_symbol64.SizeOfStruct = 32 # *don't modify*: sizeof(IMAGEHLP_SYMBOL64) in C. + imagehlp_symbol64.MaxNameLen = MAX_SYM_NAME + + Displacement = DWORD64(0) + _SymGetSymFromAddr64(hProcess, Address, byref(Displacement), byref(imagehlp_symbol64)) + + return (Displacement.value, imagehlp_symbol64) + +#TODO: check for the 'W' version of SymGetSymFromAddr64() + + +#=============================================================================== +# typedef struct API_VERSION { +# USHORT MajorVersion; +# USHORT MinorVersion; +# USHORT Revision; +# USHORT Reserved; +# } API_VERSION, *LPAPI_VERSION; +#=============================================================================== +class API_VERSION (Structure): + _fields_ = [ + ("MajorVersion", USHORT), + ("MinorVersion", USHORT), + ("Revision", USHORT), + ("Reserved", USHORT), + ] +PAPI_VERSION = POINTER(API_VERSION) +LPAPI_VERSION = PAPI_VERSION + +#=============================================================================== +# LPAPI_VERSION WINAPI ImagehlpApiVersion(void); +#=============================================================================== +def ImagehlpApiVersion(): + _ImagehlpApiVersion = windll.dbghelp.ImagehlpApiVersion + _ImagehlpApiVersion.restype = LPAPI_VERSION + + api_version = _ImagehlpApiVersion() + return api_version.contents + + +#=============================================================================== +# LPAPI_VERSION WINAPI ImagehlpApiVersionEx( +# __in LPAPI_VERSION AppVersion +# ); +#=============================================================================== +def ImagehlpApiVersionEx(MajorVersion, MinorVersion, Revision): + _ImagehlpApiVersionEx = windll.dbghelp.ImagehlpApiVersionEx + _ImagehlpApiVersionEx.argtypes = [LPAPI_VERSION] + _ImagehlpApiVersionEx.restype = LPAPI_VERSION + + api_version = API_VERSION(MajorVersion, MinorVersion, Revision, 0) + + ret_api_version = _ImagehlpApiVersionEx(byref(api_version)) + + return ret_api_version.contents + +#=============================================================================== +# typedef enum { +# AddrMode1616, +# AddrMode1632, +# AddrModeReal, +# AddrModeFlat +# } ADDRESS_MODE; +#=============================================================================== +AddrMode1616 = 0 +AddrMode1632 = 1 +AddrModeReal = 2 +AddrModeFlat = 3 + +ADDRESS_MODE = DWORD #needed for the size of an ADDRESS_MODE (see ADDRESS64) + +#=============================================================================== +# typedef struct _tagADDRESS64 { +# DWORD64 Offset; +# WORD Segment; +# ADDRESS_MODE Mode; +# } ADDRESS64, *LPADDRESS64; +#=============================================================================== +class ADDRESS64 (Structure): + _fields_ = [ + ("Offset", DWORD64), + ("Segment", WORD), + ("Mode", ADDRESS_MODE), #it's a member of the ADDRESS_MODE enum. + ] +LPADDRESS64 = POINTER(ADDRESS64) + +#=============================================================================== +# typedef struct _KDHELP64 { +# DWORD64 Thread; +# DWORD ThCallbackStack; +# DWORD ThCallbackBStore; +# DWORD NextCallback; +# DWORD FramePointer; +# DWORD64 KiCallUserMode; +# DWORD64 KeUserCallbackDispatcher; +# DWORD64 SystemRangeStart; +# DWORD64 KiUserExceptionDispatcher; +# DWORD64 StackBase; +# DWORD64 StackLimit; +# DWORD64 Reserved[5]; +# } KDHELP64, *PKDHELP64; +#=============================================================================== +class KDHELP64 (Structure): + _fields_ = [ + ("Thread", DWORD64), + ("ThCallbackStack", DWORD), + ("ThCallbackBStore", DWORD), + ("NextCallback", DWORD), + ("FramePointer", DWORD), + ("KiCallUserMode", DWORD64), + ("KeUserCallbackDispatcher", DWORD64), + ("SystemRangeStart", DWORD64), + ("KiUserExceptionDispatcher", DWORD64), + ("StackBase", DWORD64), + ("StackLimit", DWORD64), + ("Reserved", DWORD64 * 5), + ] +PKDHELP64 = POINTER(KDHELP64) + +#=============================================================================== +# typedef struct _tagSTACKFRAME64 { +# ADDRESS64 AddrPC; +# ADDRESS64 AddrReturn; +# ADDRESS64 AddrFrame; +# ADDRESS64 AddrStack; +# ADDRESS64 AddrBStore; +# PVOID FuncTableEntry; +# DWORD64 Params[4]; +# BOOL Far; +# BOOL Virtual; +# DWORD64 Reserved[3]; +# KDHELP64 KdHelp; +# } STACKFRAME64, *LPSTACKFRAME64; +#=============================================================================== +class STACKFRAME64(Structure): + _fields_ = [ + ("AddrPC", ADDRESS64), + ("AddrReturn", ADDRESS64), + ("AddrFrame", ADDRESS64), + ("AddrStack", ADDRESS64), + ("AddrBStore", ADDRESS64), + ("FuncTableEntry", PVOID), + ("Params", DWORD64 * 4), + ("Far", BOOL), + ("Virtual", BOOL), + ("Reserved", DWORD64 * 3), + ("KdHelp", KDHELP64), + ] +LPSTACKFRAME64 = POINTER(STACKFRAME64) + +#=============================================================================== +# BOOL CALLBACK ReadProcessMemoryProc64( +# __in HANDLE hProcess, +# __in DWORD64 lpBaseAddress, +# __out PVOID lpBuffer, +# __in DWORD nSize, +# __out LPDWORD lpNumberOfBytesRead +# ); +#=============================================================================== +PREAD_PROCESS_MEMORY_ROUTINE64 = WINFUNCTYPE(BOOL, HANDLE, DWORD64, PVOID, DWORD, LPDWORD) + +#=============================================================================== +# PVOID CALLBACK FunctionTableAccessProc64( +# __in HANDLE hProcess, +# __in DWORD64 AddrBase +# ); +#=============================================================================== +PFUNCTION_TABLE_ACCESS_ROUTINE64 = WINFUNCTYPE(PVOID, HANDLE, DWORD64) + +#=============================================================================== +# DWORD64 CALLBACK GetModuleBaseProc64( +# __in HANDLE hProcess, +# __in DWORD64 Address +# ); +#=============================================================================== +PGET_MODULE_BASE_ROUTINE64 = WINFUNCTYPE(DWORD64, HANDLE, DWORD64) + +#=============================================================================== +# DWORD64 CALLBACK GetModuleBaseProc64( +# __in HANDLE hProcess, +# __in DWORD64 Address +# ); +#=============================================================================== +PTRANSLATE_ADDRESS_ROUTINE64 = WINFUNCTYPE(DWORD64, HANDLE, DWORD64) + +# Valid machine types for StackWalk64 function +IMAGE_FILE_MACHINE_I386 = 0x014c #Intel x86 +IMAGE_FILE_MACHINE_IA64 = 0x0200 #Intel Itanium Processor Family (IPF) +IMAGE_FILE_MACHINE_AMD64 = 0x8664 #x64 (AMD64 or EM64T) + +#=============================================================================== +# BOOL WINAPI StackWalk64( +# __in DWORD MachineType, +# __in HANDLE hProcess, +# __in HANDLE hThread, +# __inout LPSTACKFRAME64 StackFrame, +# __inout PVOID ContextRecord, +# __in_opt PREAD_PROCESS_MEMORY_ROUTINE64 ReadMemoryRoutine, +# __in_opt PFUNCTION_TABLE_ACCESS_ROUTINE64 FunctionTableAccessRoutine, +# __in_opt PGET_MODULE_BASE_ROUTINE64 GetModuleBaseRoutine, +# __in_opt PTRANSLATE_ADDRESS_ROUTINE64 TranslateAddress +# ); +#=============================================================================== +def StackWalk64(MachineType, hProcess, hThread, StackFrame, + ContextRecord = None, ReadMemoryRoutine = None, + FunctionTableAccessRoutine = None, GetModuleBaseRoutine = None, + TranslateAddress = None): + + _StackWalk64 = windll.dbghelp.StackWalk64 + _StackWalk64.argtypes = [DWORD, HANDLE, HANDLE, LPSTACKFRAME64, PVOID, + PREAD_PROCESS_MEMORY_ROUTINE64, + PFUNCTION_TABLE_ACCESS_ROUTINE64, + PGET_MODULE_BASE_ROUTINE64, + PTRANSLATE_ADDRESS_ROUTINE64] + _StackWalk64.restype = bool + + pReadMemoryRoutine = None + if ReadMemoryRoutine: + pReadMemoryRoutine = PREAD_PROCESS_MEMORY_ROUTINE64(ReadMemoryRoutine) + else: + pReadMemoryRoutine = ctypes.cast(None, PREAD_PROCESS_MEMORY_ROUTINE64) + + pFunctionTableAccessRoutine = None + if FunctionTableAccessRoutine: + pFunctionTableAccessRoutine = PFUNCTION_TABLE_ACCESS_ROUTINE64(FunctionTableAccessRoutine) + else: + pFunctionTableAccessRoutine = ctypes.cast(None, PFUNCTION_TABLE_ACCESS_ROUTINE64) + + pGetModuleBaseRoutine = None + if GetModuleBaseRoutine: + pGetModuleBaseRoutine = PGET_MODULE_BASE_ROUTINE64(GetModuleBaseRoutine) + else: + pGetModuleBaseRoutine = ctypes.cast(None, PGET_MODULE_BASE_ROUTINE64) + + pTranslateAddress = None + if TranslateAddress: + pTranslateAddress = PTRANSLATE_ADDRESS_ROUTINE64(TranslateAddress) + else: + pTranslateAddress = ctypes.cast(None, PTRANSLATE_ADDRESS_ROUTINE64) + + pContextRecord = None + if ContextRecord is None: + ContextRecord = GetThreadContext(hThread, raw=True) + pContextRecord = PCONTEXT(ContextRecord) + + #this function *DOESN'T* set last error [GetLastError()] properly most of the time. + ret = _StackWalk64(MachineType, hProcess, hThread, byref(StackFrame), + pContextRecord, pReadMemoryRoutine, + pFunctionTableAccessRoutine, pGetModuleBaseRoutine, + pTranslateAddress) + + return ret + +#============================================================================== +# This calculates the list of exported symbols. +_all = set(vars().keys()).difference(_all) +__all__ = [_x for _x in _all if not _x.startswith('_')] +__all__.sort() +#============================================================================== diff --git a/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/win32/defines.py b/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/win32/defines.py new file mode 100644 index 00000000..187e4294 --- /dev/null +++ b/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/win32/defines.py @@ -0,0 +1,718 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# Copyright (c) 2009-2014, Mario Vilas +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright +# notice,this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# * Neither the name of the copyright holder nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. + +""" +Common definitions. +""" + +# TODO +# + add TCHAR and related types? + +__revision__ = "$Id$" + +import ctypes +import functools +from winappdbg import compat + +#------------------------------------------------------------------------------ + +# Some stuff from ctypes we'll be using very frequently. +addressof = ctypes.addressof +sizeof = ctypes.sizeof +SIZEOF = ctypes.sizeof +POINTER = ctypes.POINTER +Structure = ctypes.Structure +Union = ctypes.Union +WINFUNCTYPE = ctypes.WINFUNCTYPE +windll = ctypes.windll + +# The IronPython implementation of byref() was giving me problems, +# so I'm replacing it with the slower pointer() function. +try: + ctypes.c_void_p(ctypes.byref(ctypes.c_char())) # this fails in IronPython + byref = ctypes.byref +except TypeError: + byref = ctypes.pointer + +# XXX DEBUG +# The following code can be enabled to make the Win32 API wrappers log to +# standard output the dll and function names, the parameter values and the +# return value for each call. + +##WIN32_VERBOSE_MODE = True +WIN32_VERBOSE_MODE = False + +if WIN32_VERBOSE_MODE: + + class WinDllHook(object): + def __getattr__(self, name): + if name.startswith('_'): + return object.__getattr__(self, name) + return WinFuncHook(name) + + class WinFuncHook(object): + def __init__(self, name): + self.__name = name + + def __getattr__(self, name): + if name.startswith('_'): + return object.__getattr__(self, name) + return WinCallHook(self.__name, name) + + class WinCallHook(object): + def __init__(self, dllname, funcname): + self.__dllname = dllname + self.__funcname = funcname + self.__func = getattr(getattr(ctypes.windll, dllname), funcname) + + def __copy_attribute(self, attribute): + try: + value = getattr(self, attribute) + setattr(self.__func, attribute, value) + except AttributeError: + try: + delattr(self.__func, attribute) + except AttributeError: + pass + + def __call__(self, *argv): + self.__copy_attribute('argtypes') + self.__copy_attribute('restype') + self.__copy_attribute('errcheck') + print("-"*10) + print("%s ! %s %r" % (self.__dllname, self.__funcname, argv)) + retval = self.__func(*argv) + print("== %r" % (retval,)) + return retval + + windll = WinDllHook() + +#============================================================================== +# This is used later on to calculate the list of exported symbols. +_all = None +_all = set(vars().keys()) +#============================================================================== + +def RaiseIfZero(result, func = None, arguments = ()): + """ + Error checking for most Win32 API calls. + + The function is assumed to return an integer, which is C{0} on error. + In that case the C{WindowsError} exception is raised. + """ + if not result: + raise ctypes.WinError() + return result + +def RaiseIfNotZero(result, func = None, arguments = ()): + """ + Error checking for some odd Win32 API calls. + + The function is assumed to return an integer, which is zero on success. + If the return value is nonzero the C{WindowsError} exception is raised. + + This is mostly useful for free() like functions, where the return value is + the pointer to the memory block on failure or a C{NULL} pointer on success. + """ + if result: + raise ctypes.WinError() + return result + +def RaiseIfNotErrorSuccess(result, func = None, arguments = ()): + """ + Error checking for Win32 Registry API calls. + + The function is assumed to return a Win32 error code. If the code is not + C{ERROR_SUCCESS} then a C{WindowsError} exception is raised. + """ + if result != ERROR_SUCCESS: + raise ctypes.WinError(result) + return result + +class GuessStringType(object): + """ + Decorator that guesses the correct version (A or W) to call + based on the types of the strings passed as parameters. + + Calls the B{ANSI} version if the only string types are ANSI. + + Calls the B{Unicode} version if Unicode or mixed string types are passed. + + The default if no string arguments are passed depends on the value of the + L{t_default} class variable. + + @type fn_ansi: function + @ivar fn_ansi: ANSI version of the API function to call. + @type fn_unicode: function + @ivar fn_unicode: Unicode (wide) version of the API function to call. + + @type t_default: type + @cvar t_default: Default string type to use. + Possible values are: + - type('') for ANSI + - type(u'') for Unicode + """ + + # ANSI and Unicode types + t_ansi = type('') + t_unicode = type(u'') + + # Default is ANSI for Python 2.x + t_default = t_ansi + + def __init__(self, fn_ansi, fn_unicode): + """ + @type fn_ansi: function + @param fn_ansi: ANSI version of the API function to call. + @type fn_unicode: function + @param fn_unicode: Unicode (wide) version of the API function to call. + """ + self.fn_ansi = fn_ansi + self.fn_unicode = fn_unicode + + # Copy the wrapped function attributes. + try: + self.__name__ = self.fn_ansi.__name__[:-1] # remove the A or W + except AttributeError: + pass + try: + self.__module__ = self.fn_ansi.__module__ + except AttributeError: + pass + try: + self.__doc__ = self.fn_ansi.__doc__ + except AttributeError: + pass + + def __call__(self, *argv, **argd): + + # Shortcut to self.t_ansi + t_ansi = self.t_ansi + + # Get the types of all arguments for the function + v_types = [ type(item) for item in argv ] + v_types.extend( [ type(value) for (key, value) in compat.iteritems(argd) ] ) + + # Get the appropriate function for the default type + if self.t_default == t_ansi: + fn = self.fn_ansi + else: + fn = self.fn_unicode + + # If at least one argument is a Unicode string... + if self.t_unicode in v_types: + + # If al least one argument is an ANSI string, + # convert all ANSI strings to Unicode + if t_ansi in v_types: + argv = list(argv) + for index in compat.xrange(len(argv)): + if v_types[index] == t_ansi: + argv[index] = compat.unicode(argv[index]) + for (key, value) in argd.items(): + if type(value) == t_ansi: + argd[key] = compat.unicode(value) + + # Use the W version + fn = self.fn_unicode + + # If at least one argument is an ANSI string, + # but there are no Unicode strings... + elif t_ansi in v_types: + + # Use the A version + fn = self.fn_ansi + + # Call the function and return the result + return fn(*argv, **argd) + +class DefaultStringType(object): + """ + Decorator that uses the default version (A or W) to call + based on the configuration of the L{GuessStringType} decorator. + + @see: L{GuessStringType.t_default} + + @type fn_ansi: function + @ivar fn_ansi: ANSI version of the API function to call. + @type fn_unicode: function + @ivar fn_unicode: Unicode (wide) version of the API function to call. + """ + + def __init__(self, fn_ansi, fn_unicode): + """ + @type fn_ansi: function + @param fn_ansi: ANSI version of the API function to call. + @type fn_unicode: function + @param fn_unicode: Unicode (wide) version of the API function to call. + """ + self.fn_ansi = fn_ansi + self.fn_unicode = fn_unicode + + # Copy the wrapped function attributes. + try: + self.__name__ = self.fn_ansi.__name__[:-1] # remove the A or W + except AttributeError: + pass + try: + self.__module__ = self.fn_ansi.__module__ + except AttributeError: + pass + try: + self.__doc__ = self.fn_ansi.__doc__ + except AttributeError: + pass + + def __call__(self, *argv, **argd): + + # Get the appropriate function based on the default. + if GuessStringType.t_default == GuessStringType.t_ansi: + fn = self.fn_ansi + else: + fn = self.fn_unicode + + # Call the function and return the result + return fn(*argv, **argd) + +def MakeANSIVersion(fn): + """ + Decorator that generates an ANSI version of a Unicode (wide) only API call. + + @type fn: callable + @param fn: Unicode (wide) version of the API function to call. + """ + @functools.wraps(fn) + def wrapper(*argv, **argd): + t_ansi = GuessStringType.t_ansi + t_unicode = GuessStringType.t_unicode + v_types = [ type(item) for item in argv ] + v_types.extend( [ type(value) for (key, value) in compat.iteritems(argd) ] ) + if t_ansi in v_types: + argv = list(argv) + for index in compat.xrange(len(argv)): + if v_types[index] == t_ansi: + argv[index] = t_unicode(argv[index]) + for key, value in argd.items(): + if type(value) == t_ansi: + argd[key] = t_unicode(value) + return fn(*argv, **argd) + return wrapper + +def MakeWideVersion(fn): + """ + Decorator that generates a Unicode (wide) version of an ANSI only API call. + + @type fn: callable + @param fn: ANSI version of the API function to call. + """ + @functools.wraps(fn) + def wrapper(*argv, **argd): + t_ansi = GuessStringType.t_ansi + t_unicode = GuessStringType.t_unicode + v_types = [ type(item) for item in argv ] + v_types.extend( [ type(value) for (key, value) in compat.iteritems(argd) ] ) + if t_unicode in v_types: + argv = list(argv) + for index in compat.xrange(len(argv)): + if v_types[index] == t_unicode: + argv[index] = t_ansi(argv[index]) + for key, value in argd.items(): + if type(value) == t_unicode: + argd[key] = t_ansi(value) + return fn(*argv, **argd) + return wrapper + +#--- Types -------------------------------------------------------------------- +# http://msdn.microsoft.com/en-us/library/aa383751(v=vs.85).aspx + +# Map of basic C types to Win32 types +LPVOID = ctypes.c_void_p +CHAR = ctypes.c_char +WCHAR = ctypes.c_wchar +BYTE = ctypes.c_ubyte +SBYTE = ctypes.c_byte +WORD = ctypes.c_uint16 +SWORD = ctypes.c_int16 +DWORD = ctypes.c_uint32 +SDWORD = ctypes.c_int32 +QWORD = ctypes.c_uint64 +SQWORD = ctypes.c_int64 +SHORT = ctypes.c_short +USHORT = ctypes.c_ushort +INT = ctypes.c_int +UINT = ctypes.c_uint +LONG = ctypes.c_long +ULONG = ctypes.c_ulong +LONGLONG = ctypes.c_int64 # c_longlong +ULONGLONG = ctypes.c_uint64 # c_ulonglong +LPSTR = ctypes.c_char_p +LPWSTR = ctypes.c_wchar_p +INT8 = ctypes.c_int8 +INT16 = ctypes.c_int16 +INT32 = ctypes.c_int32 +INT64 = ctypes.c_int64 +UINT8 = ctypes.c_uint8 +UINT16 = ctypes.c_uint16 +UINT32 = ctypes.c_uint32 +UINT64 = ctypes.c_uint64 +LONG32 = ctypes.c_int32 +LONG64 = ctypes.c_int64 +ULONG32 = ctypes.c_uint32 +ULONG64 = ctypes.c_uint64 +DWORD32 = ctypes.c_uint32 +DWORD64 = ctypes.c_uint64 +BOOL = ctypes.c_int +FLOAT = ctypes.c_float + +# Map size_t to SIZE_T +try: + SIZE_T = ctypes.c_size_t + SSIZE_T = ctypes.c_ssize_t +except AttributeError: + # Size of a pointer + SIZE_T = {1:BYTE, 2:WORD, 4:DWORD, 8:QWORD}[sizeof(LPVOID)] + SSIZE_T = {1:SBYTE, 2:SWORD, 4:SDWORD, 8:SQWORD}[sizeof(LPVOID)] +PSIZE_T = POINTER(SIZE_T) + +# Not really pointers but pointer-sized integers +DWORD_PTR = SIZE_T +ULONG_PTR = SIZE_T +LONG_PTR = SIZE_T + +# Other Win32 types, more may be added as needed +PVOID = LPVOID +PPVOID = POINTER(PVOID) +PSTR = LPSTR +PWSTR = LPWSTR +PCHAR = LPSTR +PWCHAR = LPWSTR +LPBYTE = POINTER(BYTE) +LPSBYTE = POINTER(SBYTE) +LPWORD = POINTER(WORD) +LPSWORD = POINTER(SWORD) +LPDWORD = POINTER(DWORD) +LPSDWORD = POINTER(SDWORD) +LPULONG = POINTER(ULONG) +LPLONG = POINTER(LONG) +PDWORD = LPDWORD +PDWORD_PTR = POINTER(DWORD_PTR) +PULONG = LPULONG +PLONG = LPLONG +CCHAR = CHAR +BOOLEAN = BYTE +PBOOL = POINTER(BOOL) +LPBOOL = PBOOL +TCHAR = CHAR # XXX ANSI by default? +UCHAR = BYTE +DWORDLONG = ULONGLONG +LPDWORD32 = POINTER(DWORD32) +LPULONG32 = POINTER(ULONG32) +LPDWORD64 = POINTER(DWORD64) +LPULONG64 = POINTER(ULONG64) +PDWORD32 = LPDWORD32 +PULONG32 = LPULONG32 +PDWORD64 = LPDWORD64 +PULONG64 = LPULONG64 +ATOM = WORD +HANDLE = LPVOID +PHANDLE = POINTER(HANDLE) +LPHANDLE = PHANDLE +HMODULE = HANDLE +HINSTANCE = HANDLE +HTASK = HANDLE +HKEY = HANDLE +PHKEY = POINTER(HKEY) +HDESK = HANDLE +HRSRC = HANDLE +HSTR = HANDLE +HWINSTA = HANDLE +HKL = HANDLE +HDWP = HANDLE +HFILE = HANDLE +HRESULT = LONG +HGLOBAL = HANDLE +HLOCAL = HANDLE +HGDIOBJ = HANDLE +HDC = HGDIOBJ +HRGN = HGDIOBJ +HBITMAP = HGDIOBJ +HPALETTE = HGDIOBJ +HPEN = HGDIOBJ +HBRUSH = HGDIOBJ +HMF = HGDIOBJ +HEMF = HGDIOBJ +HENHMETAFILE = HGDIOBJ +HMETAFILE = HGDIOBJ +HMETAFILEPICT = HGDIOBJ +HWND = HANDLE +NTSTATUS = LONG +PNTSTATUS = POINTER(NTSTATUS) +KAFFINITY = ULONG_PTR +RVA = DWORD +RVA64 = QWORD +WPARAM = DWORD +LPARAM = LPVOID +LRESULT = LPVOID +ACCESS_MASK = DWORD +REGSAM = ACCESS_MASK +PACCESS_MASK = POINTER(ACCESS_MASK) +PREGSAM = POINTER(REGSAM) + +# Since the SID is an opaque structure, let's treat its pointers as void* +PSID = PVOID + +# typedef union _LARGE_INTEGER { +# struct { +# DWORD LowPart; +# LONG HighPart; +# } ; +# struct { +# DWORD LowPart; +# LONG HighPart; +# } u; +# LONGLONG QuadPart; +# } LARGE_INTEGER, +# *PLARGE_INTEGER; + +# XXX TODO + +# typedef struct _FLOAT128 { +# __int64 LowPart; +# __int64 HighPart; +# } FLOAT128; +class FLOAT128 (Structure): + _fields_ = [ + ("LowPart", QWORD), + ("HighPart", QWORD), + ] +PFLOAT128 = POINTER(FLOAT128) + +# typedef struct DECLSPEC_ALIGN(16) _M128A { +# ULONGLONG Low; +# LONGLONG High; +# } M128A, *PM128A; +class M128A(Structure): + _fields_ = [ + ("Low", ULONGLONG), + ("High", LONGLONG), + ] +PM128A = POINTER(M128A) + +#--- Constants ---------------------------------------------------------------- + +NULL = None +INFINITE = -1 +TRUE = 1 +FALSE = 0 + +# http://blogs.msdn.com/oldnewthing/archive/2004/08/26/220873.aspx +ANYSIZE_ARRAY = 1 + +# Invalid handle value is -1 casted to void pointer. +try: + INVALID_HANDLE_VALUE = ctypes.c_void_p(-1).value #-1 #0xFFFFFFFF +except TypeError: + if sizeof(ctypes.c_void_p) == 4: + INVALID_HANDLE_VALUE = 0xFFFFFFFF + elif sizeof(ctypes.c_void_p) == 8: + INVALID_HANDLE_VALUE = 0xFFFFFFFFFFFFFFFF + else: + raise + +MAX_MODULE_NAME32 = 255 +MAX_PATH = 260 + +# Error codes +# TODO maybe add more error codes? +# if they're too many they could be pickled instead, +# or at the very least put in a new file +ERROR_SUCCESS = 0 +ERROR_INVALID_FUNCTION = 1 +ERROR_FILE_NOT_FOUND = 2 +ERROR_PATH_NOT_FOUND = 3 +ERROR_ACCESS_DENIED = 5 +ERROR_INVALID_HANDLE = 6 +ERROR_NOT_ENOUGH_MEMORY = 8 +ERROR_INVALID_DRIVE = 15 +ERROR_NO_MORE_FILES = 18 +ERROR_BAD_LENGTH = 24 +ERROR_HANDLE_EOF = 38 +ERROR_HANDLE_DISK_FULL = 39 +ERROR_NOT_SUPPORTED = 50 +ERROR_FILE_EXISTS = 80 +ERROR_INVALID_PARAMETER = 87 +ERROR_BUFFER_OVERFLOW = 111 +ERROR_DISK_FULL = 112 +ERROR_CALL_NOT_IMPLEMENTED = 120 +ERROR_SEM_TIMEOUT = 121 +ERROR_INSUFFICIENT_BUFFER = 122 +ERROR_INVALID_NAME = 123 +ERROR_MOD_NOT_FOUND = 126 +ERROR_PROC_NOT_FOUND = 127 +ERROR_DIR_NOT_EMPTY = 145 +ERROR_BAD_THREADID_ADDR = 159 +ERROR_BAD_ARGUMENTS = 160 +ERROR_BAD_PATHNAME = 161 +ERROR_ALREADY_EXISTS = 183 +ERROR_INVALID_FLAG_NUMBER = 186 +ERROR_ENVVAR_NOT_FOUND = 203 +ERROR_FILENAME_EXCED_RANGE = 206 +ERROR_MORE_DATA = 234 + +WAIT_TIMEOUT = 258 + +ERROR_NO_MORE_ITEMS = 259 +ERROR_PARTIAL_COPY = 299 +ERROR_INVALID_ADDRESS = 487 +ERROR_THREAD_NOT_IN_PROCESS = 566 +ERROR_CONTROL_C_EXIT = 572 +ERROR_UNHANDLED_EXCEPTION = 574 +ERROR_ASSERTION_FAILURE = 668 +ERROR_WOW_ASSERTION = 670 + +ERROR_DBG_EXCEPTION_NOT_HANDLED = 688 +ERROR_DBG_REPLY_LATER = 689 +ERROR_DBG_UNABLE_TO_PROVIDE_HANDLE = 690 +ERROR_DBG_TERMINATE_THREAD = 691 +ERROR_DBG_TERMINATE_PROCESS = 692 +ERROR_DBG_CONTROL_C = 693 +ERROR_DBG_PRINTEXCEPTION_C = 694 +ERROR_DBG_RIPEXCEPTION = 695 +ERROR_DBG_CONTROL_BREAK = 696 +ERROR_DBG_COMMAND_EXCEPTION = 697 +ERROR_DBG_EXCEPTION_HANDLED = 766 +ERROR_DBG_CONTINUE = 767 + +ERROR_ELEVATION_REQUIRED = 740 +ERROR_NOACCESS = 998 + +ERROR_CIRCULAR_DEPENDENCY = 1059 +ERROR_SERVICE_DOES_NOT_EXIST = 1060 +ERROR_SERVICE_CANNOT_ACCEPT_CTRL = 1061 +ERROR_SERVICE_NOT_ACTIVE = 1062 +ERROR_FAILED_SERVICE_CONTROLLER_CONNECT = 1063 +ERROR_EXCEPTION_IN_SERVICE = 1064 +ERROR_DATABASE_DOES_NOT_EXIST = 1065 +ERROR_SERVICE_SPECIFIC_ERROR = 1066 +ERROR_PROCESS_ABORTED = 1067 +ERROR_SERVICE_DEPENDENCY_FAIL = 1068 +ERROR_SERVICE_LOGON_FAILED = 1069 +ERROR_SERVICE_START_HANG = 1070 +ERROR_INVALID_SERVICE_LOCK = 1071 +ERROR_SERVICE_MARKED_FOR_DELETE = 1072 +ERROR_SERVICE_EXISTS = 1073 +ERROR_ALREADY_RUNNING_LKG = 1074 +ERROR_SERVICE_DEPENDENCY_DELETED = 1075 +ERROR_BOOT_ALREADY_ACCEPTED = 1076 +ERROR_SERVICE_NEVER_STARTED = 1077 +ERROR_DUPLICATE_SERVICE_NAME = 1078 +ERROR_DIFFERENT_SERVICE_ACCOUNT = 1079 +ERROR_CANNOT_DETECT_DRIVER_FAILURE = 1080 +ERROR_CANNOT_DETECT_PROCESS_ABORT = 1081 +ERROR_NO_RECOVERY_PROGRAM = 1082 +ERROR_SERVICE_NOT_IN_EXE = 1083 +ERROR_NOT_SAFEBOOT_SERVICE = 1084 + +ERROR_DEBUGGER_INACTIVE = 1284 + +ERROR_PRIVILEGE_NOT_HELD = 1314 + +ERROR_NONE_MAPPED = 1332 + +RPC_S_SERVER_UNAVAILABLE = 1722 + +# Standard access rights +import sys +if sys.version_info[0] >= 3: + long = int + +DELETE = long(0x00010000) +READ_CONTROL = long(0x00020000) +WRITE_DAC = long(0x00040000) +WRITE_OWNER = long(0x00080000) +SYNCHRONIZE = long(0x00100000) +STANDARD_RIGHTS_REQUIRED = long(0x000F0000) +STANDARD_RIGHTS_READ = READ_CONTROL +STANDARD_RIGHTS_WRITE = READ_CONTROL +STANDARD_RIGHTS_EXECUTE = READ_CONTROL +STANDARD_RIGHTS_ALL = long(0x001F0000) +SPECIFIC_RIGHTS_ALL = long(0x0000FFFF) + +#--- Structures --------------------------------------------------------------- + +# typedef struct _LSA_UNICODE_STRING { +# USHORT Length; +# USHORT MaximumLength; +# PWSTR Buffer; +# } LSA_UNICODE_STRING, +# *PLSA_UNICODE_STRING, +# UNICODE_STRING, +# *PUNICODE_STRING; +class UNICODE_STRING(Structure): + _fields_ = [ + ("Length", USHORT), + ("MaximumLength", USHORT), + ("Buffer", PVOID), + ] + +# From MSDN: +# +# typedef struct _GUID { +# DWORD Data1; +# WORD Data2; +# WORD Data3; +# BYTE Data4[8]; +# } GUID; +class GUID(Structure): + _fields_ = [ + ("Data1", DWORD), + ("Data2", WORD), + ("Data3", WORD), + ("Data4", BYTE * 8), +] + +# From MSDN: +# +# typedef struct _LIST_ENTRY { +# struct _LIST_ENTRY *Flink; +# struct _LIST_ENTRY *Blink; +# } LIST_ENTRY, *PLIST_ENTRY, *RESTRICTED_POINTER PRLIST_ENTRY; +class LIST_ENTRY(Structure): + _fields_ = [ + ("Flink", PVOID), # POINTER(LIST_ENTRY) + ("Blink", PVOID), # POINTER(LIST_ENTRY) +] + +#============================================================================== +# This calculates the list of exported symbols. +_all = set(vars().keys()).difference(_all) +##__all__ = [_x for _x in _all if not _x.startswith('_')] +##__all__.sort() +#============================================================================== diff --git a/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/win32/gdi32.py b/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/win32/gdi32.py new file mode 100644 index 00000000..c3b5e6eb --- /dev/null +++ b/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/win32/gdi32.py @@ -0,0 +1,507 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# Copyright (c) 2009-2014, Mario Vilas +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright +# notice,this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# * Neither the name of the copyright holder nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. + +""" +Wrapper for gdi32.dll in ctypes. +""" + +__revision__ = "$Id$" + +from winappdbg.win32.defines import * +from winappdbg.win32.kernel32 import GetLastError, SetLastError + +#============================================================================== +# This is used later on to calculate the list of exported symbols. +_all = None +_all = set(vars().keys()) +#============================================================================== + +#--- Helpers ------------------------------------------------------------------ + +#--- Types -------------------------------------------------------------------- + +#--- Constants ---------------------------------------------------------------- + +# GDI object types +OBJ_PEN = 1 +OBJ_BRUSH = 2 +OBJ_DC = 3 +OBJ_METADC = 4 +OBJ_PAL = 5 +OBJ_FONT = 6 +OBJ_BITMAP = 7 +OBJ_REGION = 8 +OBJ_METAFILE = 9 +OBJ_MEMDC = 10 +OBJ_EXTPEN = 11 +OBJ_ENHMETADC = 12 +OBJ_ENHMETAFILE = 13 +OBJ_COLORSPACE = 14 +GDI_OBJ_LAST = OBJ_COLORSPACE + +# Ternary raster operations +SRCCOPY = 0x00CC0020 # dest = source +SRCPAINT = 0x00EE0086 # dest = source OR dest +SRCAND = 0x008800C6 # dest = source AND dest +SRCINVERT = 0x00660046 # dest = source XOR dest +SRCERASE = 0x00440328 # dest = source AND (NOT dest) +NOTSRCCOPY = 0x00330008 # dest = (NOT source) +NOTSRCERASE = 0x001100A6 # dest = (NOT src) AND (NOT dest) +MERGECOPY = 0x00C000CA # dest = (source AND pattern) +MERGEPAINT = 0x00BB0226 # dest = (NOT source) OR dest +PATCOPY = 0x00F00021 # dest = pattern +PATPAINT = 0x00FB0A09 # dest = DPSnoo +PATINVERT = 0x005A0049 # dest = pattern XOR dest +DSTINVERT = 0x00550009 # dest = (NOT dest) +BLACKNESS = 0x00000042 # dest = BLACK +WHITENESS = 0x00FF0062 # dest = WHITE +NOMIRRORBITMAP = 0x80000000 # Do not Mirror the bitmap in this call +CAPTUREBLT = 0x40000000 # Include layered windows + +# Region flags +ERROR = 0 +NULLREGION = 1 +SIMPLEREGION = 2 +COMPLEXREGION = 3 +RGN_ERROR = ERROR + +# CombineRgn() styles +RGN_AND = 1 +RGN_OR = 2 +RGN_XOR = 3 +RGN_DIFF = 4 +RGN_COPY = 5 +RGN_MIN = RGN_AND +RGN_MAX = RGN_COPY + +# StretchBlt() modes +BLACKONWHITE = 1 +WHITEONBLACK = 2 +COLORONCOLOR = 3 +HALFTONE = 4 +MAXSTRETCHBLTMODE = 4 +STRETCH_ANDSCANS = BLACKONWHITE +STRETCH_ORSCANS = WHITEONBLACK +STRETCH_DELETESCANS = COLORONCOLOR +STRETCH_HALFTONE = HALFTONE + +# PolyFill() modes +ALTERNATE = 1 +WINDING = 2 +POLYFILL_LAST = 2 + +# Layout orientation options +LAYOUT_RTL = 0x00000001 # Right to left +LAYOUT_BTT = 0x00000002 # Bottom to top +LAYOUT_VBH = 0x00000004 # Vertical before horizontal +LAYOUT_ORIENTATIONMASK = LAYOUT_RTL + LAYOUT_BTT + LAYOUT_VBH +LAYOUT_BITMAPORIENTATIONPRESERVED = 0x00000008 + +# Stock objects +WHITE_BRUSH = 0 +LTGRAY_BRUSH = 1 +GRAY_BRUSH = 2 +DKGRAY_BRUSH = 3 +BLACK_BRUSH = 4 +NULL_BRUSH = 5 +HOLLOW_BRUSH = NULL_BRUSH +WHITE_PEN = 6 +BLACK_PEN = 7 +NULL_PEN = 8 +OEM_FIXED_FONT = 10 +ANSI_FIXED_FONT = 11 +ANSI_VAR_FONT = 12 +SYSTEM_FONT = 13 +DEVICE_DEFAULT_FONT = 14 +DEFAULT_PALETTE = 15 +SYSTEM_FIXED_FONT = 16 + +# Metafile functions +META_SETBKCOLOR = 0x0201 +META_SETBKMODE = 0x0102 +META_SETMAPMODE = 0x0103 +META_SETROP2 = 0x0104 +META_SETRELABS = 0x0105 +META_SETPOLYFILLMODE = 0x0106 +META_SETSTRETCHBLTMODE = 0x0107 +META_SETTEXTCHAREXTRA = 0x0108 +META_SETTEXTCOLOR = 0x0209 +META_SETTEXTJUSTIFICATION = 0x020A +META_SETWINDOWORG = 0x020B +META_SETWINDOWEXT = 0x020C +META_SETVIEWPORTORG = 0x020D +META_SETVIEWPORTEXT = 0x020E +META_OFFSETWINDOWORG = 0x020F +META_SCALEWINDOWEXT = 0x0410 +META_OFFSETVIEWPORTORG = 0x0211 +META_SCALEVIEWPORTEXT = 0x0412 +META_LINETO = 0x0213 +META_MOVETO = 0x0214 +META_EXCLUDECLIPRECT = 0x0415 +META_INTERSECTCLIPRECT = 0x0416 +META_ARC = 0x0817 +META_ELLIPSE = 0x0418 +META_FLOODFILL = 0x0419 +META_PIE = 0x081A +META_RECTANGLE = 0x041B +META_ROUNDRECT = 0x061C +META_PATBLT = 0x061D +META_SAVEDC = 0x001E +META_SETPIXEL = 0x041F +META_OFFSETCLIPRGN = 0x0220 +META_TEXTOUT = 0x0521 +META_BITBLT = 0x0922 +META_STRETCHBLT = 0x0B23 +META_POLYGON = 0x0324 +META_POLYLINE = 0x0325 +META_ESCAPE = 0x0626 +META_RESTOREDC = 0x0127 +META_FILLREGION = 0x0228 +META_FRAMEREGION = 0x0429 +META_INVERTREGION = 0x012A +META_PAINTREGION = 0x012B +META_SELECTCLIPREGION = 0x012C +META_SELECTOBJECT = 0x012D +META_SETTEXTALIGN = 0x012E +META_CHORD = 0x0830 +META_SETMAPPERFLAGS = 0x0231 +META_EXTTEXTOUT = 0x0a32 +META_SETDIBTODEV = 0x0d33 +META_SELECTPALETTE = 0x0234 +META_REALIZEPALETTE = 0x0035 +META_ANIMATEPALETTE = 0x0436 +META_SETPALENTRIES = 0x0037 +META_POLYPOLYGON = 0x0538 +META_RESIZEPALETTE = 0x0139 +META_DIBBITBLT = 0x0940 +META_DIBSTRETCHBLT = 0x0b41 +META_DIBCREATEPATTERNBRUSH = 0x0142 +META_STRETCHDIB = 0x0f43 +META_EXTFLOODFILL = 0x0548 +META_SETLAYOUT = 0x0149 +META_DELETEOBJECT = 0x01f0 +META_CREATEPALETTE = 0x00f7 +META_CREATEPATTERNBRUSH = 0x01F9 +META_CREATEPENINDIRECT = 0x02FA +META_CREATEFONTINDIRECT = 0x02FB +META_CREATEBRUSHINDIRECT = 0x02FC +META_CREATEREGION = 0x06FF + +# Metafile escape codes +NEWFRAME = 1 +ABORTDOC = 2 +NEXTBAND = 3 +SETCOLORTABLE = 4 +GETCOLORTABLE = 5 +FLUSHOUTPUT = 6 +DRAFTMODE = 7 +QUERYESCSUPPORT = 8 +SETABORTPROC = 9 +STARTDOC = 10 +ENDDOC = 11 +GETPHYSPAGESIZE = 12 +GETPRINTINGOFFSET = 13 +GETSCALINGFACTOR = 14 +MFCOMMENT = 15 +GETPENWIDTH = 16 +SETCOPYCOUNT = 17 +SELECTPAPERSOURCE = 18 +DEVICEDATA = 19 +PASSTHROUGH = 19 +GETTECHNOLGY = 20 +GETTECHNOLOGY = 20 +SETLINECAP = 21 +SETLINEJOIN = 22 +SETMITERLIMIT = 23 +BANDINFO = 24 +DRAWPATTERNRECT = 25 +GETVECTORPENSIZE = 26 +GETVECTORBRUSHSIZE = 27 +ENABLEDUPLEX = 28 +GETSETPAPERBINS = 29 +GETSETPRINTORIENT = 30 +ENUMPAPERBINS = 31 +SETDIBSCALING = 32 +EPSPRINTING = 33 +ENUMPAPERMETRICS = 34 +GETSETPAPERMETRICS = 35 +POSTSCRIPT_DATA = 37 +POSTSCRIPT_IGNORE = 38 +MOUSETRAILS = 39 +GETDEVICEUNITS = 42 +GETEXTENDEDTEXTMETRICS = 256 +GETEXTENTTABLE = 257 +GETPAIRKERNTABLE = 258 +GETTRACKKERNTABLE = 259 +EXTTEXTOUT = 512 +GETFACENAME = 513 +DOWNLOADFACE = 514 +ENABLERELATIVEWIDTHS = 768 +ENABLEPAIRKERNING = 769 +SETKERNTRACK = 770 +SETALLJUSTVALUES = 771 +SETCHARSET = 772 +STRETCHBLT = 2048 +METAFILE_DRIVER = 2049 +GETSETSCREENPARAMS = 3072 +QUERYDIBSUPPORT = 3073 +BEGIN_PATH = 4096 +CLIP_TO_PATH = 4097 +END_PATH = 4098 +EXT_DEVICE_CAPS = 4099 +RESTORE_CTM = 4100 +SAVE_CTM = 4101 +SET_ARC_DIRECTION = 4102 +SET_BACKGROUND_COLOR = 4103 +SET_POLY_MODE = 4104 +SET_SCREEN_ANGLE = 4105 +SET_SPREAD = 4106 +TRANSFORM_CTM = 4107 +SET_CLIP_BOX = 4108 +SET_BOUNDS = 4109 +SET_MIRROR_MODE = 4110 +OPENCHANNEL = 4110 +DOWNLOADHEADER = 4111 +CLOSECHANNEL = 4112 +POSTSCRIPT_PASSTHROUGH = 4115 +ENCAPSULATED_POSTSCRIPT = 4116 +POSTSCRIPT_IDENTIFY = 4117 +POSTSCRIPT_INJECTION = 4118 +CHECKJPEGFORMAT = 4119 +CHECKPNGFORMAT = 4120 +GET_PS_FEATURESETTING = 4121 +GDIPLUS_TS_QUERYVER = 4122 +GDIPLUS_TS_RECORD = 4123 +SPCLPASSTHROUGH2 = 4568 + +#--- Structures --------------------------------------------------------------- + +# typedef struct _RECT { +# LONG left; +# LONG top; +# LONG right; +# LONG bottom; +# }RECT, *PRECT; +class RECT(Structure): + _fields_ = [ + ('left', LONG), + ('top', LONG), + ('right', LONG), + ('bottom', LONG), + ] +PRECT = POINTER(RECT) +LPRECT = PRECT + +# typedef struct tagPOINT { +# LONG x; +# LONG y; +# } POINT; +class POINT(Structure): + _fields_ = [ + ('x', LONG), + ('y', LONG), + ] +PPOINT = POINTER(POINT) +LPPOINT = PPOINT + +# typedef struct tagBITMAP { +# LONG bmType; +# LONG bmWidth; +# LONG bmHeight; +# LONG bmWidthBytes; +# WORD bmPlanes; +# WORD bmBitsPixel; +# LPVOID bmBits; +# } BITMAP, *PBITMAP; +class BITMAP(Structure): + _fields_ = [ + ("bmType", LONG), + ("bmWidth", LONG), + ("bmHeight", LONG), + ("bmWidthBytes", LONG), + ("bmPlanes", WORD), + ("bmBitsPixel", WORD), + ("bmBits", LPVOID), + ] +PBITMAP = POINTER(BITMAP) +LPBITMAP = PBITMAP + +#--- High level classes ------------------------------------------------------- + +#--- gdi32.dll ---------------------------------------------------------------- + +# HDC GetDC( +# __in HWND hWnd +# ); +def GetDC(hWnd): + _GetDC = windll.gdi32.GetDC + _GetDC.argtypes = [HWND] + _GetDC.restype = HDC + _GetDC.errcheck = RaiseIfZero + return _GetDC(hWnd) + +# HDC GetWindowDC( +# __in HWND hWnd +# ); +def GetWindowDC(hWnd): + _GetWindowDC = windll.gdi32.GetWindowDC + _GetWindowDC.argtypes = [HWND] + _GetWindowDC.restype = HDC + _GetWindowDC.errcheck = RaiseIfZero + return _GetWindowDC(hWnd) + +# int ReleaseDC( +# __in HWND hWnd, +# __in HDC hDC +# ); +def ReleaseDC(hWnd, hDC): + _ReleaseDC = windll.gdi32.ReleaseDC + _ReleaseDC.argtypes = [HWND, HDC] + _ReleaseDC.restype = ctypes.c_int + _ReleaseDC.errcheck = RaiseIfZero + _ReleaseDC(hWnd, hDC) + +# HGDIOBJ SelectObject( +# __in HDC hdc, +# __in HGDIOBJ hgdiobj +# ); +def SelectObject(hdc, hgdiobj): + _SelectObject = windll.gdi32.SelectObject + _SelectObject.argtypes = [HDC, HGDIOBJ] + _SelectObject.restype = HGDIOBJ + _SelectObject.errcheck = RaiseIfZero + return _SelectObject(hdc, hgdiobj) + +# HGDIOBJ GetStockObject( +# __in int fnObject +# ); +def GetStockObject(fnObject): + _GetStockObject = windll.gdi32.GetStockObject + _GetStockObject.argtypes = [ctypes.c_int] + _GetStockObject.restype = HGDIOBJ + _GetStockObject.errcheck = RaiseIfZero + return _GetStockObject(fnObject) + +# DWORD GetObjectType( +# __in HGDIOBJ h +# ); +def GetObjectType(h): + _GetObjectType = windll.gdi32.GetObjectType + _GetObjectType.argtypes = [HGDIOBJ] + _GetObjectType.restype = DWORD + _GetObjectType.errcheck = RaiseIfZero + return _GetObjectType(h) + +# int GetObject( +# __in HGDIOBJ hgdiobj, +# __in int cbBuffer, +# __out LPVOID lpvObject +# ); +def GetObject(hgdiobj, cbBuffer = None, lpvObject = None): + _GetObject = windll.gdi32.GetObject + _GetObject.argtypes = [HGDIOBJ, ctypes.c_int, LPVOID] + _GetObject.restype = ctypes.c_int + _GetObject.errcheck = RaiseIfZero + + # Both cbBuffer and lpvObject can be omitted, the correct + # size and structure to return are automatically deduced. + # If lpvObject is given it must be a ctypes object, not a pointer. + # Always returns a ctypes object. + + if cbBuffer is not None: + if lpvObject is None: + lpvObject = ctypes.create_string_buffer("", cbBuffer) + elif lpvObject is not None: + cbBuffer = sizeof(lpvObject) + else: # most likely case, both are None + t = GetObjectType(hgdiobj) + if t == OBJ_PEN: + cbBuffer = sizeof(LOGPEN) + lpvObject = LOGPEN() + elif t == OBJ_BRUSH: + cbBuffer = sizeof(LOGBRUSH) + lpvObject = LOGBRUSH() + elif t == OBJ_PAL: + cbBuffer = _GetObject(hgdiobj, 0, None) + lpvObject = (WORD * (cbBuffer // sizeof(WORD)))() + elif t == OBJ_FONT: + cbBuffer = sizeof(LOGFONT) + lpvObject = LOGFONT() + elif t == OBJ_BITMAP: # try the two possible types of bitmap + cbBuffer = sizeof(DIBSECTION) + lpvObject = DIBSECTION() + try: + _GetObject(hgdiobj, cbBuffer, byref(lpvObject)) + return lpvObject + except WindowsError: + cbBuffer = sizeof(BITMAP) + lpvObject = BITMAP() + elif t == OBJ_EXTPEN: + cbBuffer = sizeof(LOGEXTPEN) + lpvObject = LOGEXTPEN() + else: + cbBuffer = _GetObject(hgdiobj, 0, None) + lpvObject = ctypes.create_string_buffer("", cbBuffer) + _GetObject(hgdiobj, cbBuffer, byref(lpvObject)) + return lpvObject + +# LONG GetBitmapBits( +# __in HBITMAP hbmp, +# __in LONG cbBuffer, +# __out LPVOID lpvBits +# ); +def GetBitmapBits(hbmp): + _GetBitmapBits = windll.gdi32.GetBitmapBits + _GetBitmapBits.argtypes = [HBITMAP, LONG, LPVOID] + _GetBitmapBits.restype = LONG + _GetBitmapBits.errcheck = RaiseIfZero + + bitmap = GetObject(hbmp, lpvObject = BITMAP()) + cbBuffer = bitmap.bmWidthBytes * bitmap.bmHeight + lpvBits = ctypes.create_string_buffer("", cbBuffer) + _GetBitmapBits(hbmp, cbBuffer, byref(lpvBits)) + return lpvBits.raw + +# HBITMAP CreateBitmapIndirect( +# __in const BITMAP *lpbm +# ); +def CreateBitmapIndirect(lpbm): + _CreateBitmapIndirect = windll.gdi32.CreateBitmapIndirect + _CreateBitmapIndirect.argtypes = [PBITMAP] + _CreateBitmapIndirect.restype = HBITMAP + _CreateBitmapIndirect.errcheck = RaiseIfZero + return _CreateBitmapIndirect(lpbm) + +#============================================================================== +# This calculates the list of exported symbols. +_all = set(vars().keys()).difference(_all) +__all__ = [_x for _x in _all if not _x.startswith('_')] +__all__.sort() +#============================================================================== diff --git a/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/win32/kernel32.py b/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/win32/kernel32.py new file mode 100644 index 00000000..d0c0468f --- /dev/null +++ b/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/win32/kernel32.py @@ -0,0 +1,4716 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# Copyright (c) 2009-2014, Mario Vilas +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright +# notice,this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# * Neither the name of the copyright holder nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. + +""" +Wrapper for kernel32.dll in ctypes. +""" + +__revision__ = "$Id$" + +import warnings + +from winappdbg.win32.defines import * + +from winappdbg.win32 import context_i386 +from winappdbg.win32 import context_amd64 + +#============================================================================== +# This is used later on to calculate the list of exported symbols. +_all = None +_all = set(vars().keys()) +_all.add('version') +#============================================================================== + +from winappdbg.win32.version import * + +#------------------------------------------------------------------------------ + +# This can't be defined in defines.py because it calls GetLastError(). +def RaiseIfLastError(result, func = None, arguments = ()): + """ + Error checking for Win32 API calls with no error-specific return value. + + Regardless of the return value, the function calls GetLastError(). If the + code is not C{ERROR_SUCCESS} then a C{WindowsError} exception is raised. + + For this to work, the user MUST call SetLastError(ERROR_SUCCESS) prior to + calling the API. Otherwise an exception may be raised even on success, + since most API calls don't clear the error status code. + """ + code = GetLastError() + if code != ERROR_SUCCESS: + raise ctypes.WinError(code) + return result + +#--- CONTEXT structure and constants ------------------------------------------ + +ContextArchMask = 0x0FFF0000 # just guessing here! seems to work, though + +if arch == ARCH_I386: + from winappdbg.win32.context_i386 import * +elif arch == ARCH_AMD64: + if bits == 64: + from winappdbg.win32.context_amd64 import * + else: + from winappdbg.win32.context_i386 import * +else: + warnings.warn("Unknown or unsupported architecture: %s" % arch) + +#--- Constants ---------------------------------------------------------------- + +STILL_ACTIVE = 259 + +WAIT_TIMEOUT = 0x102 +WAIT_FAILED = -1 +WAIT_OBJECT_0 = 0 + +EXCEPTION_NONCONTINUABLE = 0x1 # Noncontinuable exception +EXCEPTION_MAXIMUM_PARAMETERS = 15 # maximum number of exception parameters +MAXIMUM_WAIT_OBJECTS = 64 # Maximum number of wait objects +MAXIMUM_SUSPEND_COUNT = 0x7f # Maximum times thread can be suspended + +FORMAT_MESSAGE_ALLOCATE_BUFFER = 0x00000100 +FORMAT_MESSAGE_FROM_SYSTEM = 0x00001000 + +GR_GDIOBJECTS = 0 +GR_USEROBJECTS = 1 + +PROCESS_NAME_NATIVE = 1 + +MAXINTATOM = 0xC000 + +STD_INPUT_HANDLE = 0xFFFFFFF6 # (DWORD)-10 +STD_OUTPUT_HANDLE = 0xFFFFFFF5 # (DWORD)-11 +STD_ERROR_HANDLE = 0xFFFFFFF4 # (DWORD)-12 + +ATTACH_PARENT_PROCESS = 0xFFFFFFFF # (DWORD)-1 + +# LoadLibraryEx constants +DONT_RESOLVE_DLL_REFERENCES = 0x00000001 +LOAD_LIBRARY_AS_DATAFILE = 0x00000002 +LOAD_WITH_ALTERED_SEARCH_PATH = 0x00000008 +LOAD_IGNORE_CODE_AUTHZ_LEVEL = 0x00000010 +LOAD_LIBRARY_AS_IMAGE_RESOURCE = 0x00000020 +LOAD_LIBRARY_AS_DATAFILE_EXCLUSIVE = 0x00000040 + +# SetSearchPathMode flags +# TODO I couldn't find these constants :( +##BASE_SEARCH_PATH_ENABLE_SAFE_SEARCHMODE = ??? +##BASE_SEARCH_PATH_DISABLE_SAFE_SEARCHMODE = ??? +##BASE_SEARCH_PATH_PERMANENT = ??? + +# Console control events +CTRL_C_EVENT = 0 +CTRL_BREAK_EVENT = 1 +CTRL_CLOSE_EVENT = 2 +CTRL_LOGOFF_EVENT = 5 +CTRL_SHUTDOWN_EVENT = 6 + +# Heap flags +HEAP_NO_SERIALIZE = 0x00000001 +HEAP_GENERATE_EXCEPTIONS = 0x00000004 +HEAP_ZERO_MEMORY = 0x00000008 +HEAP_CREATE_ENABLE_EXECUTE = 0x00040000 + +# Standard access rights +DELETE = long(0x00010000) +READ_CONTROL = long(0x00020000) +WRITE_DAC = long(0x00040000) +WRITE_OWNER = long(0x00080000) +SYNCHRONIZE = long(0x00100000) +STANDARD_RIGHTS_REQUIRED = long(0x000F0000) +STANDARD_RIGHTS_READ = (READ_CONTROL) +STANDARD_RIGHTS_WRITE = (READ_CONTROL) +STANDARD_RIGHTS_EXECUTE = (READ_CONTROL) +STANDARD_RIGHTS_ALL = long(0x001F0000) +SPECIFIC_RIGHTS_ALL = long(0x0000FFFF) + +# Mutex access rights +MUTEX_ALL_ACCESS = 0x1F0001 +MUTEX_MODIFY_STATE = 1 + +# Event access rights +EVENT_ALL_ACCESS = 0x1F0003 +EVENT_MODIFY_STATE = 2 + +# Semaphore access rights +SEMAPHORE_ALL_ACCESS = 0x1F0003 +SEMAPHORE_MODIFY_STATE = 2 + +# Timer access rights +TIMER_ALL_ACCESS = 0x1F0003 +TIMER_MODIFY_STATE = 2 +TIMER_QUERY_STATE = 1 + +# Process access rights for OpenProcess +PROCESS_TERMINATE = 0x0001 +PROCESS_CREATE_THREAD = 0x0002 +PROCESS_SET_SESSIONID = 0x0004 +PROCESS_VM_OPERATION = 0x0008 +PROCESS_VM_READ = 0x0010 +PROCESS_VM_WRITE = 0x0020 +PROCESS_DUP_HANDLE = 0x0040 +PROCESS_CREATE_PROCESS = 0x0080 +PROCESS_SET_QUOTA = 0x0100 +PROCESS_SET_INFORMATION = 0x0200 +PROCESS_QUERY_INFORMATION = 0x0400 +PROCESS_SUSPEND_RESUME = 0x0800 +PROCESS_QUERY_LIMITED_INFORMATION = 0x1000 + +# Thread access rights for OpenThread +THREAD_TERMINATE = 0x0001 +THREAD_SUSPEND_RESUME = 0x0002 +THREAD_ALERT = 0x0004 +THREAD_GET_CONTEXT = 0x0008 +THREAD_SET_CONTEXT = 0x0010 +THREAD_SET_INFORMATION = 0x0020 +THREAD_QUERY_INFORMATION = 0x0040 +THREAD_SET_THREAD_TOKEN = 0x0080 +THREAD_IMPERSONATE = 0x0100 +THREAD_DIRECT_IMPERSONATION = 0x0200 +THREAD_SET_LIMITED_INFORMATION = 0x0400 +THREAD_QUERY_LIMITED_INFORMATION = 0x0800 + +# The values of PROCESS_ALL_ACCESS and THREAD_ALL_ACCESS were changed in Vista/2008 +PROCESS_ALL_ACCESS_NT = (STANDARD_RIGHTS_REQUIRED | SYNCHRONIZE | 0xFFF) +PROCESS_ALL_ACCESS_VISTA = (STANDARD_RIGHTS_REQUIRED | SYNCHRONIZE | 0xFFFF) +THREAD_ALL_ACCESS_NT = (STANDARD_RIGHTS_REQUIRED | SYNCHRONIZE | 0x3FF) +THREAD_ALL_ACCESS_VISTA = (STANDARD_RIGHTS_REQUIRED | SYNCHRONIZE | 0xFFFF) +if NTDDI_VERSION < NTDDI_VISTA: + PROCESS_ALL_ACCESS = PROCESS_ALL_ACCESS_NT + THREAD_ALL_ACCESS = THREAD_ALL_ACCESS_NT +else: + PROCESS_ALL_ACCESS = PROCESS_ALL_ACCESS_VISTA + THREAD_ALL_ACCESS = THREAD_ALL_ACCESS_VISTA + +# Process priority classes + +IDLE_PRIORITY_CLASS = 0x00000040 +BELOW_NORMAL_PRIORITY_CLASS = 0x00004000 +NORMAL_PRIORITY_CLASS = 0x00000020 +ABOVE_NORMAL_PRIORITY_CLASS = 0x00008000 +HIGH_PRIORITY_CLASS = 0x00000080 +REALTIME_PRIORITY_CLASS = 0x00000100 + +PROCESS_MODE_BACKGROUND_BEGIN = 0x00100000 +PROCESS_MODE_BACKGROUND_END = 0x00200000 + +# dwCreationFlag values + +DEBUG_PROCESS = 0x00000001 +DEBUG_ONLY_THIS_PROCESS = 0x00000002 +CREATE_SUSPENDED = 0x00000004 # Threads and processes +DETACHED_PROCESS = 0x00000008 +CREATE_NEW_CONSOLE = 0x00000010 +NORMAL_PRIORITY_CLASS = 0x00000020 +IDLE_PRIORITY_CLASS = 0x00000040 +HIGH_PRIORITY_CLASS = 0x00000080 +REALTIME_PRIORITY_CLASS = 0x00000100 +CREATE_NEW_PROCESS_GROUP = 0x00000200 +CREATE_UNICODE_ENVIRONMENT = 0x00000400 +CREATE_SEPARATE_WOW_VDM = 0x00000800 +CREATE_SHARED_WOW_VDM = 0x00001000 +CREATE_FORCEDOS = 0x00002000 +BELOW_NORMAL_PRIORITY_CLASS = 0x00004000 +ABOVE_NORMAL_PRIORITY_CLASS = 0x00008000 +INHERIT_PARENT_AFFINITY = 0x00010000 +STACK_SIZE_PARAM_IS_A_RESERVATION = 0x00010000 # Threads only +INHERIT_CALLER_PRIORITY = 0x00020000 # Deprecated +CREATE_PROTECTED_PROCESS = 0x00040000 +EXTENDED_STARTUPINFO_PRESENT = 0x00080000 +PROCESS_MODE_BACKGROUND_BEGIN = 0x00100000 +PROCESS_MODE_BACKGROUND_END = 0x00200000 +CREATE_BREAKAWAY_FROM_JOB = 0x01000000 +CREATE_PRESERVE_CODE_AUTHZ_LEVEL = 0x02000000 +CREATE_DEFAULT_ERROR_MODE = 0x04000000 +CREATE_NO_WINDOW = 0x08000000 +PROFILE_USER = 0x10000000 +PROFILE_KERNEL = 0x20000000 +PROFILE_SERVER = 0x40000000 +CREATE_IGNORE_SYSTEM_DEFAULT = 0x80000000 + +# Thread priority values + +THREAD_BASE_PRIORITY_LOWRT = 15 # value that gets a thread to LowRealtime-1 +THREAD_BASE_PRIORITY_MAX = 2 # maximum thread base priority boost +THREAD_BASE_PRIORITY_MIN = (-2) # minimum thread base priority boost +THREAD_BASE_PRIORITY_IDLE = (-15) # value that gets a thread to idle + +THREAD_PRIORITY_LOWEST = THREAD_BASE_PRIORITY_MIN +THREAD_PRIORITY_BELOW_NORMAL = (THREAD_PRIORITY_LOWEST+1) +THREAD_PRIORITY_NORMAL = 0 +THREAD_PRIORITY_HIGHEST = THREAD_BASE_PRIORITY_MAX +THREAD_PRIORITY_ABOVE_NORMAL = (THREAD_PRIORITY_HIGHEST-1) +THREAD_PRIORITY_ERROR_RETURN = long(0xFFFFFFFF) + +THREAD_PRIORITY_TIME_CRITICAL = THREAD_BASE_PRIORITY_LOWRT +THREAD_PRIORITY_IDLE = THREAD_BASE_PRIORITY_IDLE + +# Memory access +SECTION_QUERY = 0x0001 +SECTION_MAP_WRITE = 0x0002 +SECTION_MAP_READ = 0x0004 +SECTION_MAP_EXECUTE = 0x0008 +SECTION_EXTEND_SIZE = 0x0010 +SECTION_MAP_EXECUTE_EXPLICIT = 0x0020 # not included in SECTION_ALL_ACCESS + +SECTION_ALL_ACCESS = (STANDARD_RIGHTS_REQUIRED|SECTION_QUERY|\ + SECTION_MAP_WRITE | \ + SECTION_MAP_READ | \ + SECTION_MAP_EXECUTE | \ + SECTION_EXTEND_SIZE) +PAGE_NOACCESS = 0x01 +PAGE_READONLY = 0x02 +PAGE_READWRITE = 0x04 +PAGE_WRITECOPY = 0x08 +PAGE_EXECUTE = 0x10 +PAGE_EXECUTE_READ = 0x20 +PAGE_EXECUTE_READWRITE = 0x40 +PAGE_EXECUTE_WRITECOPY = 0x80 +PAGE_GUARD = 0x100 +PAGE_NOCACHE = 0x200 +PAGE_WRITECOMBINE = 0x400 +MEM_COMMIT = 0x1000 +MEM_RESERVE = 0x2000 +MEM_DECOMMIT = 0x4000 +MEM_RELEASE = 0x8000 +MEM_FREE = 0x10000 +MEM_PRIVATE = 0x20000 +MEM_MAPPED = 0x40000 +MEM_RESET = 0x80000 +MEM_TOP_DOWN = 0x100000 +MEM_WRITE_WATCH = 0x200000 +MEM_PHYSICAL = 0x400000 +MEM_LARGE_PAGES = 0x20000000 +MEM_4MB_PAGES = 0x80000000 +SEC_FILE = 0x800000 +SEC_IMAGE = 0x1000000 +SEC_RESERVE = 0x4000000 +SEC_COMMIT = 0x8000000 +SEC_NOCACHE = 0x10000000 +SEC_LARGE_PAGES = 0x80000000 +MEM_IMAGE = SEC_IMAGE +WRITE_WATCH_FLAG_RESET = 0x01 +FILE_MAP_ALL_ACCESS = 0xF001F + +SECTION_QUERY = 0x0001 +SECTION_MAP_WRITE = 0x0002 +SECTION_MAP_READ = 0x0004 +SECTION_MAP_EXECUTE = 0x0008 +SECTION_EXTEND_SIZE = 0x0010 +SECTION_MAP_EXECUTE_EXPLICIT = 0x0020 # not included in SECTION_ALL_ACCESS + +SECTION_ALL_ACCESS = (STANDARD_RIGHTS_REQUIRED|SECTION_QUERY|\ + SECTION_MAP_WRITE | \ + SECTION_MAP_READ | \ + SECTION_MAP_EXECUTE | \ + SECTION_EXTEND_SIZE) + +FILE_MAP_COPY = SECTION_QUERY +FILE_MAP_WRITE = SECTION_MAP_WRITE +FILE_MAP_READ = SECTION_MAP_READ +FILE_MAP_ALL_ACCESS = SECTION_ALL_ACCESS +FILE_MAP_EXECUTE = SECTION_MAP_EXECUTE_EXPLICIT # not included in FILE_MAP_ALL_ACCESS + +GENERIC_READ = 0x80000000 +GENERIC_WRITE = 0x40000000 +GENERIC_EXECUTE = 0x20000000 +GENERIC_ALL = 0x10000000 + +FILE_SHARE_READ = 0x00000001 +FILE_SHARE_WRITE = 0x00000002 +FILE_SHARE_DELETE = 0x00000004 + +CREATE_NEW = 1 +CREATE_ALWAYS = 2 +OPEN_EXISTING = 3 +OPEN_ALWAYS = 4 +TRUNCATE_EXISTING = 5 + +FILE_ATTRIBUTE_READONLY = 0x00000001 +FILE_ATTRIBUTE_NORMAL = 0x00000080 +FILE_ATTRIBUTE_TEMPORARY = 0x00000100 + +FILE_FLAG_WRITE_THROUGH = 0x80000000 +FILE_FLAG_NO_BUFFERING = 0x20000000 +FILE_FLAG_RANDOM_ACCESS = 0x10000000 +FILE_FLAG_SEQUENTIAL_SCAN = 0x08000000 +FILE_FLAG_DELETE_ON_CLOSE = 0x04000000 +FILE_FLAG_OVERLAPPED = 0x40000000 + +FILE_ATTRIBUTE_READONLY = 0x00000001 +FILE_ATTRIBUTE_HIDDEN = 0x00000002 +FILE_ATTRIBUTE_SYSTEM = 0x00000004 +FILE_ATTRIBUTE_DIRECTORY = 0x00000010 +FILE_ATTRIBUTE_ARCHIVE = 0x00000020 +FILE_ATTRIBUTE_DEVICE = 0x00000040 +FILE_ATTRIBUTE_NORMAL = 0x00000080 +FILE_ATTRIBUTE_TEMPORARY = 0x00000100 + +# Debug events +EXCEPTION_DEBUG_EVENT = 1 +CREATE_THREAD_DEBUG_EVENT = 2 +CREATE_PROCESS_DEBUG_EVENT = 3 +EXIT_THREAD_DEBUG_EVENT = 4 +EXIT_PROCESS_DEBUG_EVENT = 5 +LOAD_DLL_DEBUG_EVENT = 6 +UNLOAD_DLL_DEBUG_EVENT = 7 +OUTPUT_DEBUG_STRING_EVENT = 8 +RIP_EVENT = 9 + +# Debug status codes (ContinueDebugEvent) +DBG_EXCEPTION_HANDLED = long(0x00010001) +DBG_CONTINUE = long(0x00010002) +DBG_REPLY_LATER = long(0x40010001) +DBG_UNABLE_TO_PROVIDE_HANDLE = long(0x40010002) +DBG_TERMINATE_THREAD = long(0x40010003) +DBG_TERMINATE_PROCESS = long(0x40010004) +DBG_CONTROL_C = long(0x40010005) +DBG_PRINTEXCEPTION_C = long(0x40010006) +DBG_RIPEXCEPTION = long(0x40010007) +DBG_CONTROL_BREAK = long(0x40010008) +DBG_COMMAND_EXCEPTION = long(0x40010009) +DBG_EXCEPTION_NOT_HANDLED = long(0x80010001) +DBG_NO_STATE_CHANGE = long(0xC0010001) +DBG_APP_NOT_IDLE = long(0xC0010002) + +# Status codes +STATUS_WAIT_0 = long(0x00000000) +STATUS_ABANDONED_WAIT_0 = long(0x00000080) +STATUS_USER_APC = long(0x000000C0) +STATUS_TIMEOUT = long(0x00000102) +STATUS_PENDING = long(0x00000103) +STATUS_SEGMENT_NOTIFICATION = long(0x40000005) +STATUS_GUARD_PAGE_VIOLATION = long(0x80000001) +STATUS_DATATYPE_MISALIGNMENT = long(0x80000002) +STATUS_BREAKPOINT = long(0x80000003) +STATUS_SINGLE_STEP = long(0x80000004) +STATUS_INVALID_INFO_CLASS = long(0xC0000003) +STATUS_ACCESS_VIOLATION = long(0xC0000005) +STATUS_IN_PAGE_ERROR = long(0xC0000006) +STATUS_INVALID_HANDLE = long(0xC0000008) +STATUS_NO_MEMORY = long(0xC0000017) +STATUS_ILLEGAL_INSTRUCTION = long(0xC000001D) +STATUS_NONCONTINUABLE_EXCEPTION = long(0xC0000025) +STATUS_INVALID_DISPOSITION = long(0xC0000026) +STATUS_ARRAY_BOUNDS_EXCEEDED = long(0xC000008C) +STATUS_FLOAT_DENORMAL_OPERAND = long(0xC000008D) +STATUS_FLOAT_DIVIDE_BY_ZERO = long(0xC000008E) +STATUS_FLOAT_INEXACT_RESULT = long(0xC000008F) +STATUS_FLOAT_INVALID_OPERATION = long(0xC0000090) +STATUS_FLOAT_OVERFLOW = long(0xC0000091) +STATUS_FLOAT_STACK_CHECK = long(0xC0000092) +STATUS_FLOAT_UNDERFLOW = long(0xC0000093) +STATUS_INTEGER_DIVIDE_BY_ZERO = long(0xC0000094) +STATUS_INTEGER_OVERFLOW = long(0xC0000095) +STATUS_PRIVILEGED_INSTRUCTION = long(0xC0000096) +STATUS_STACK_OVERFLOW = long(0xC00000FD) +STATUS_CONTROL_C_EXIT = long(0xC000013A) +STATUS_FLOAT_MULTIPLE_FAULTS = long(0xC00002B4) +STATUS_FLOAT_MULTIPLE_TRAPS = long(0xC00002B5) +STATUS_REG_NAT_CONSUMPTION = long(0xC00002C9) +STATUS_SXS_EARLY_DEACTIVATION = long(0xC015000F) +STATUS_SXS_INVALID_DEACTIVATION = long(0xC0150010) + +STATUS_STACK_BUFFER_OVERRUN = long(0xC0000409) +STATUS_WX86_BREAKPOINT = long(0x4000001F) +STATUS_HEAP_CORRUPTION = long(0xC0000374) + +STATUS_POSSIBLE_DEADLOCK = long(0xC0000194) + +STATUS_UNWIND_CONSOLIDATE = long(0x80000029) + +# Exception codes + +EXCEPTION_ACCESS_VIOLATION = STATUS_ACCESS_VIOLATION +EXCEPTION_ARRAY_BOUNDS_EXCEEDED = STATUS_ARRAY_BOUNDS_EXCEEDED +EXCEPTION_BREAKPOINT = STATUS_BREAKPOINT +EXCEPTION_DATATYPE_MISALIGNMENT = STATUS_DATATYPE_MISALIGNMENT +EXCEPTION_FLT_DENORMAL_OPERAND = STATUS_FLOAT_DENORMAL_OPERAND +EXCEPTION_FLT_DIVIDE_BY_ZERO = STATUS_FLOAT_DIVIDE_BY_ZERO +EXCEPTION_FLT_INEXACT_RESULT = STATUS_FLOAT_INEXACT_RESULT +EXCEPTION_FLT_INVALID_OPERATION = STATUS_FLOAT_INVALID_OPERATION +EXCEPTION_FLT_OVERFLOW = STATUS_FLOAT_OVERFLOW +EXCEPTION_FLT_STACK_CHECK = STATUS_FLOAT_STACK_CHECK +EXCEPTION_FLT_UNDERFLOW = STATUS_FLOAT_UNDERFLOW +EXCEPTION_ILLEGAL_INSTRUCTION = STATUS_ILLEGAL_INSTRUCTION +EXCEPTION_IN_PAGE_ERROR = STATUS_IN_PAGE_ERROR +EXCEPTION_INT_DIVIDE_BY_ZERO = STATUS_INTEGER_DIVIDE_BY_ZERO +EXCEPTION_INT_OVERFLOW = STATUS_INTEGER_OVERFLOW +EXCEPTION_INVALID_DISPOSITION = STATUS_INVALID_DISPOSITION +EXCEPTION_NONCONTINUABLE_EXCEPTION = STATUS_NONCONTINUABLE_EXCEPTION +EXCEPTION_PRIV_INSTRUCTION = STATUS_PRIVILEGED_INSTRUCTION +EXCEPTION_SINGLE_STEP = STATUS_SINGLE_STEP +EXCEPTION_STACK_OVERFLOW = STATUS_STACK_OVERFLOW + +EXCEPTION_GUARD_PAGE = STATUS_GUARD_PAGE_VIOLATION +EXCEPTION_INVALID_HANDLE = STATUS_INVALID_HANDLE +EXCEPTION_POSSIBLE_DEADLOCK = STATUS_POSSIBLE_DEADLOCK +EXCEPTION_WX86_BREAKPOINT = STATUS_WX86_BREAKPOINT + +CONTROL_C_EXIT = STATUS_CONTROL_C_EXIT + +DBG_CONTROL_C = long(0x40010005) +MS_VC_EXCEPTION = long(0x406D1388) + +# Access violation types +ACCESS_VIOLATION_TYPE_READ = EXCEPTION_READ_FAULT +ACCESS_VIOLATION_TYPE_WRITE = EXCEPTION_WRITE_FAULT +ACCESS_VIOLATION_TYPE_DEP = EXCEPTION_EXECUTE_FAULT + +# RIP event types +SLE_ERROR = 1 +SLE_MINORERROR = 2 +SLE_WARNING = 3 + +# DuplicateHandle constants +DUPLICATE_CLOSE_SOURCE = 0x00000001 +DUPLICATE_SAME_ACCESS = 0x00000002 + +# GetFinalPathNameByHandle constants +FILE_NAME_NORMALIZED = 0x0 +FILE_NAME_OPENED = 0x8 +VOLUME_NAME_DOS = 0x0 +VOLUME_NAME_GUID = 0x1 +VOLUME_NAME_NONE = 0x4 +VOLUME_NAME_NT = 0x2 + +# GetProductInfo constants +PRODUCT_BUSINESS = 0x00000006 +PRODUCT_BUSINESS_N = 0x00000010 +PRODUCT_CLUSTER_SERVER = 0x00000012 +PRODUCT_DATACENTER_SERVER = 0x00000008 +PRODUCT_DATACENTER_SERVER_CORE = 0x0000000C +PRODUCT_DATACENTER_SERVER_CORE_V = 0x00000027 +PRODUCT_DATACENTER_SERVER_V = 0x00000025 +PRODUCT_ENTERPRISE = 0x00000004 +PRODUCT_ENTERPRISE_E = 0x00000046 +PRODUCT_ENTERPRISE_N = 0x0000001B +PRODUCT_ENTERPRISE_SERVER = 0x0000000A +PRODUCT_ENTERPRISE_SERVER_CORE = 0x0000000E +PRODUCT_ENTERPRISE_SERVER_CORE_V = 0x00000029 +PRODUCT_ENTERPRISE_SERVER_IA64 = 0x0000000F +PRODUCT_ENTERPRISE_SERVER_V = 0x00000026 +PRODUCT_HOME_BASIC = 0x00000002 +PRODUCT_HOME_BASIC_E = 0x00000043 +PRODUCT_HOME_BASIC_N = 0x00000005 +PRODUCT_HOME_PREMIUM = 0x00000003 +PRODUCT_HOME_PREMIUM_E = 0x00000044 +PRODUCT_HOME_PREMIUM_N = 0x0000001A +PRODUCT_HYPERV = 0x0000002A +PRODUCT_MEDIUMBUSINESS_SERVER_MANAGEMENT = 0x0000001E +PRODUCT_MEDIUMBUSINESS_SERVER_MESSAGING = 0x00000020 +PRODUCT_MEDIUMBUSINESS_SERVER_SECURITY = 0x0000001F +PRODUCT_PROFESSIONAL = 0x00000030 +PRODUCT_PROFESSIONAL_E = 0x00000045 +PRODUCT_PROFESSIONAL_N = 0x00000031 +PRODUCT_SERVER_FOR_SMALLBUSINESS = 0x00000018 +PRODUCT_SERVER_FOR_SMALLBUSINESS_V = 0x00000023 +PRODUCT_SERVER_FOUNDATION = 0x00000021 +PRODUCT_SMALLBUSINESS_SERVER = 0x00000009 +PRODUCT_STANDARD_SERVER = 0x00000007 +PRODUCT_STANDARD_SERVER_CORE = 0x0000000D +PRODUCT_STANDARD_SERVER_CORE_V = 0x00000028 +PRODUCT_STANDARD_SERVER_V = 0x00000024 +PRODUCT_STARTER = 0x0000000B +PRODUCT_STARTER_E = 0x00000042 +PRODUCT_STARTER_N = 0x0000002F +PRODUCT_STORAGE_ENTERPRISE_SERVER = 0x00000017 +PRODUCT_STORAGE_EXPRESS_SERVER = 0x00000014 +PRODUCT_STORAGE_STANDARD_SERVER = 0x00000015 +PRODUCT_STORAGE_WORKGROUP_SERVER = 0x00000016 +PRODUCT_UNDEFINED = 0x00000000 +PRODUCT_UNLICENSED = 0xABCDABCD +PRODUCT_ULTIMATE = 0x00000001 +PRODUCT_ULTIMATE_E = 0x00000047 +PRODUCT_ULTIMATE_N = 0x0000001C +PRODUCT_WEB_SERVER = 0x00000011 +PRODUCT_WEB_SERVER_CORE = 0x0000001D + +# DEP policy flags +PROCESS_DEP_ENABLE = 1 +PROCESS_DEP_DISABLE_ATL_THUNK_EMULATION = 2 + +# Error modes +SEM_FAILCRITICALERRORS = 0x001 +SEM_NOGPFAULTERRORBOX = 0x002 +SEM_NOALIGNMENTFAULTEXCEPT = 0x004 +SEM_NOOPENFILEERRORBOX = 0x800 + +# GetHandleInformation / SetHandleInformation +HANDLE_FLAG_INHERIT = 0x00000001 +HANDLE_FLAG_PROTECT_FROM_CLOSE = 0x00000002 + +#--- Handle wrappers ---------------------------------------------------------- + +class Handle (object): + """ + Encapsulates Win32 handles to avoid leaking them. + + @type inherit: bool + @ivar inherit: C{True} if the handle is to be inherited by child processes, + C{False} otherwise. + + @type protectFromClose: bool + @ivar protectFromClose: Set to C{True} to prevent the handle from being + closed. Must be set to C{False} before you're done using the handle, + or it will be left open until the debugger exits. Use with care! + + @see: + L{ProcessHandle}, L{ThreadHandle}, L{FileHandle}, L{SnapshotHandle} + """ + + # XXX DEBUG + # When this private flag is True each Handle will print a message to + # standard output when it's created and destroyed. This is useful for + # detecting handle leaks within WinAppDbg itself. + __bLeakDetection = False + + def __init__(self, aHandle = None, bOwnership = True): + """ + @type aHandle: int + @param aHandle: Win32 handle value. + + @type bOwnership: bool + @param bOwnership: + C{True} if we own the handle and we need to close it. + C{False} if someone else will be calling L{CloseHandle}. + """ + super(Handle, self).__init__() + self._value = self._normalize(aHandle) + self.bOwnership = bOwnership + if Handle.__bLeakDetection: # XXX DEBUG + print("INIT HANDLE (%r) %r" % (self.value, self)) + + @property + def value(self): + return self._value + + def __del__(self): + """ + Closes the Win32 handle when the Python object is destroyed. + """ + try: + if Handle.__bLeakDetection: # XXX DEBUG + print("DEL HANDLE %r" % self) + self.close() + except Exception: + pass + + def __enter__(self): + """ + Compatibility with the "C{with}" Python statement. + """ + if Handle.__bLeakDetection: # XXX DEBUG + print("ENTER HANDLE %r" % self) + return self + + def __exit__(self, type, value, traceback): + """ + Compatibility with the "C{with}" Python statement. + """ + if Handle.__bLeakDetection: # XXX DEBUG + print("EXIT HANDLE %r" % self) + try: + self.close() + except Exception: + pass + + def __copy__(self): + """ + Duplicates the Win32 handle when copying the Python object. + + @rtype: L{Handle} + @return: A new handle to the same Win32 object. + """ + return self.dup() + + def __deepcopy__(self): + """ + Duplicates the Win32 handle when copying the Python object. + + @rtype: L{Handle} + @return: A new handle to the same win32 object. + """ + return self.dup() + + @property + def _as_parameter_(self): + """ + Compatibility with ctypes. + Allows passing transparently a Handle object to an API call. + """ + return HANDLE(self.value) + + @staticmethod + def from_param(value): + """ + Compatibility with ctypes. + Allows passing transparently a Handle object to an API call. + + @type value: int + @param value: Numeric handle value. + """ + return HANDLE(value) + + def close(self): + """ + Closes the Win32 handle. + """ + if self.bOwnership and self.value not in (None, INVALID_HANDLE_VALUE): + if Handle.__bLeakDetection: # XXX DEBUG + print("CLOSE HANDLE (%d) %r" % (self.value, self)) + try: + self._close() + finally: + self._value = None + + def _close(self): + """ + Low-level close method. + This is a private method, do not call it. + """ + CloseHandle(self.value) + + def dup(self): + """ + @rtype: L{Handle} + @return: A new handle to the same Win32 object. + """ + if self.value is None: + raise ValueError("Closed handles can't be duplicated!") + new_handle = DuplicateHandle(self.value) + if Handle.__bLeakDetection: # XXX DEBUG + print("DUP HANDLE (%d -> %d) %r %r" % \ + (self.value, new_handle.value, self, new_handle)) + return new_handle + + @staticmethod + def _normalize(value): + """ + Normalize handle values. + """ + if hasattr(value, 'value'): + value = value.value + if value is not None: + value = long(value) + return value + + def wait(self, dwMilliseconds = None): + """ + Wait for the Win32 object to be signaled. + + @type dwMilliseconds: int + @param dwMilliseconds: (Optional) Timeout value in milliseconds. + Use C{INFINITE} or C{None} for no timeout. + """ + if self.value is None: + raise ValueError("Handle is already closed!") + if dwMilliseconds is None: + dwMilliseconds = INFINITE + r = WaitForSingleObject(self.value, dwMilliseconds) + if r != WAIT_OBJECT_0: + raise ctypes.WinError(r) + + def __repr__(self): + return '<%s: %s>' % (self.__class__.__name__, self.value) + + def __get_inherit(self): + if self.value is None: + raise ValueError("Handle is already closed!") + return bool( GetHandleInformation(self.value) & HANDLE_FLAG_INHERIT ) + + def __set_inherit(self, value): + if self.value is None: + raise ValueError("Handle is already closed!") + flag = (0, HANDLE_FLAG_INHERIT)[ bool(value) ] + SetHandleInformation(self.value, flag, flag) + + inherit = property(__get_inherit, __set_inherit) + + def __get_protectFromClose(self): + if self.value is None: + raise ValueError("Handle is already closed!") + return bool( GetHandleInformation(self.value) & HANDLE_FLAG_PROTECT_FROM_CLOSE ) + + def __set_protectFromClose(self, value): + if self.value is None: + raise ValueError("Handle is already closed!") + flag = (0, HANDLE_FLAG_PROTECT_FROM_CLOSE)[ bool(value) ] + SetHandleInformation(self.value, flag, flag) + + protectFromClose = property(__get_protectFromClose, __set_protectFromClose) + +class UserModeHandle (Handle): + """ + Base class for non-kernel handles. Generally this means they are closed + by special Win32 API functions instead of CloseHandle() and some standard + operations (synchronizing, duplicating, inheritance) are not supported. + + @type _TYPE: C type + @cvar _TYPE: C type to translate this handle to. + Subclasses should override this. + Defaults to L{HANDLE}. + """ + + # Subclasses should override this. + _TYPE = HANDLE + + # This method must be implemented by subclasses. + def _close(self): + raise NotImplementedError() + + # Translation to C type. + @property + def _as_parameter_(self): + return self._TYPE(self.value) + + # Translation to C type. + @staticmethod + def from_param(value): + return self._TYPE(self.value) + + # Operation not supported. + @property + def inherit(self): + return False + + # Operation not supported. + @property + def protectFromClose(self): + return False + + # Operation not supported. + def dup(self): + raise NotImplementedError() + + # Operation not supported. + def wait(self, dwMilliseconds = None): + raise NotImplementedError() + +class ProcessHandle (Handle): + """ + Win32 process handle. + + @type dwAccess: int + @ivar dwAccess: Current access flags to this handle. + This is the same value passed to L{OpenProcess}. + Can only be C{None} if C{aHandle} is also C{None}. + Defaults to L{PROCESS_ALL_ACCESS}. + + @see: L{Handle} + """ + + def __init__(self, aHandle = None, bOwnership = True, + dwAccess = PROCESS_ALL_ACCESS): + """ + @type aHandle: int + @param aHandle: Win32 handle value. + + @type bOwnership: bool + @param bOwnership: + C{True} if we own the handle and we need to close it. + C{False} if someone else will be calling L{CloseHandle}. + + @type dwAccess: int + @param dwAccess: Current access flags to this handle. + This is the same value passed to L{OpenProcess}. + Can only be C{None} if C{aHandle} is also C{None}. + Defaults to L{PROCESS_ALL_ACCESS}. + """ + super(ProcessHandle, self).__init__(aHandle, bOwnership) + self.dwAccess = dwAccess + if aHandle is not None and dwAccess is None: + msg = "Missing access flags for process handle: %x" % aHandle + raise TypeError(msg) + + def get_pid(self): + """ + @rtype: int + @return: Process global ID. + """ + return GetProcessId(self.value) + +class ThreadHandle (Handle): + """ + Win32 thread handle. + + @type dwAccess: int + @ivar dwAccess: Current access flags to this handle. + This is the same value passed to L{OpenThread}. + Can only be C{None} if C{aHandle} is also C{None}. + Defaults to L{THREAD_ALL_ACCESS}. + + @see: L{Handle} + """ + + def __init__(self, aHandle = None, bOwnership = True, + dwAccess = THREAD_ALL_ACCESS): + """ + @type aHandle: int + @param aHandle: Win32 handle value. + + @type bOwnership: bool + @param bOwnership: + C{True} if we own the handle and we need to close it. + C{False} if someone else will be calling L{CloseHandle}. + + @type dwAccess: int + @param dwAccess: Current access flags to this handle. + This is the same value passed to L{OpenThread}. + Can only be C{None} if C{aHandle} is also C{None}. + Defaults to L{THREAD_ALL_ACCESS}. + """ + super(ThreadHandle, self).__init__(aHandle, bOwnership) + self.dwAccess = dwAccess + if aHandle is not None and dwAccess is None: + msg = "Missing access flags for thread handle: %x" % aHandle + raise TypeError(msg) + + def get_tid(self): + """ + @rtype: int + @return: Thread global ID. + """ + return GetThreadId(self.value) + +class FileHandle (Handle): + """ + Win32 file handle. + + @see: L{Handle} + """ + + def get_filename(self): + """ + @rtype: None or str + @return: Name of the open file, or C{None} if unavailable. + """ + # + # XXX BUG + # + # This code truncates the first two bytes of the path. + # It seems to be the expected behavior of NtQueryInformationFile. + # + # My guess is it only returns the NT pathname, without the device name. + # It's like dropping the drive letter in a Win32 pathname. + # + # Note that using the "official" GetFileInformationByHandleEx + # API introduced in Vista doesn't change the results! + # + dwBufferSize = 0x1004 + lpFileInformation = ctypes.create_string_buffer(dwBufferSize) + try: + GetFileInformationByHandleEx(self.value, + FILE_INFO_BY_HANDLE_CLASS.FileNameInfo, + lpFileInformation, dwBufferSize) + except AttributeError: + from winappdbg.win32.ntdll import NtQueryInformationFile, \ + FileNameInformation, \ + FILE_NAME_INFORMATION + NtQueryInformationFile(self.value, + FileNameInformation, + lpFileInformation, + dwBufferSize) + FileName = compat.unicode(lpFileInformation.raw[sizeof(DWORD):], 'U16') + FileName = ctypes.create_unicode_buffer(FileName).value + if not FileName: + FileName = None + elif FileName[1:2] != ':': + # When the drive letter is missing, we'll assume SYSTEMROOT. + # Not a good solution but it could be worse. + import os + FileName = os.environ['SYSTEMROOT'][:2] + FileName + return FileName + +class FileMappingHandle (Handle): + """ + File mapping handle. + + @see: L{Handle} + """ + pass + +# XXX maybe add functions related to the toolhelp snapshots here? +class SnapshotHandle (Handle): + """ + Toolhelp32 snapshot handle. + + @see: L{Handle} + """ + pass + +#--- Structure wrappers ------------------------------------------------------- + +class ProcessInformation (object): + """ + Process information object returned by L{CreateProcess}. + """ + + def __init__(self, pi): + self.hProcess = ProcessHandle(pi.hProcess) + self.hThread = ThreadHandle(pi.hThread) + self.dwProcessId = pi.dwProcessId + self.dwThreadId = pi.dwThreadId + +# Don't psyco-optimize this class because it needs to be serialized. +class MemoryBasicInformation (object): + """ + Memory information object returned by L{VirtualQueryEx}. + """ + + READABLE = ( + PAGE_EXECUTE_READ | + PAGE_EXECUTE_READWRITE | + PAGE_EXECUTE_WRITECOPY | + PAGE_READONLY | + PAGE_READWRITE | + PAGE_WRITECOPY + ) + + WRITEABLE = ( + PAGE_EXECUTE_READWRITE | + PAGE_EXECUTE_WRITECOPY | + PAGE_READWRITE | + PAGE_WRITECOPY + ) + + COPY_ON_WRITE = ( + PAGE_EXECUTE_WRITECOPY | + PAGE_WRITECOPY + ) + + EXECUTABLE = ( + PAGE_EXECUTE | + PAGE_EXECUTE_READ | + PAGE_EXECUTE_READWRITE | + PAGE_EXECUTE_WRITECOPY + ) + + EXECUTABLE_AND_WRITEABLE = ( + PAGE_EXECUTE_READWRITE | + PAGE_EXECUTE_WRITECOPY + ) + + def __init__(self, mbi=None): + """ + @type mbi: L{MEMORY_BASIC_INFORMATION} or L{MemoryBasicInformation} + @param mbi: Either a L{MEMORY_BASIC_INFORMATION} structure or another + L{MemoryBasicInformation} instance. + """ + if mbi is None: + self.BaseAddress = None + self.AllocationBase = None + self.AllocationProtect = None + self.RegionSize = None + self.State = None + self.Protect = None + self.Type = None + else: + self.BaseAddress = mbi.BaseAddress + self.AllocationBase = mbi.AllocationBase + self.AllocationProtect = mbi.AllocationProtect + self.RegionSize = mbi.RegionSize + self.State = mbi.State + self.Protect = mbi.Protect + self.Type = mbi.Type + + # Only used when copying MemoryBasicInformation objects, instead of + # instancing them from a MEMORY_BASIC_INFORMATION structure. + if hasattr(mbi, 'content'): + self.content = mbi.content + if hasattr(mbi, 'filename'): + self.content = mbi.filename + + def __contains__(self, address): + """ + Test if the given memory address falls within this memory region. + + @type address: int + @param address: Memory address to test. + + @rtype: bool + @return: C{True} if the given memory address falls within this memory + region, C{False} otherwise. + """ + return self.BaseAddress <= address < (self.BaseAddress + self.RegionSize) + + def is_free(self): + """ + @rtype: bool + @return: C{True} if the memory in this region is free. + """ + return self.State == MEM_FREE + + def is_reserved(self): + """ + @rtype: bool + @return: C{True} if the memory in this region is reserved. + """ + return self.State == MEM_RESERVE + + def is_commited(self): + """ + @rtype: bool + @return: C{True} if the memory in this region is commited. + """ + return self.State == MEM_COMMIT + + def is_image(self): + """ + @rtype: bool + @return: C{True} if the memory in this region belongs to an executable + image. + """ + return self.Type == MEM_IMAGE + + def is_mapped(self): + """ + @rtype: bool + @return: C{True} if the memory in this region belongs to a mapped file. + """ + return self.Type == MEM_MAPPED + + def is_private(self): + """ + @rtype: bool + @return: C{True} if the memory in this region is private. + """ + return self.Type == MEM_PRIVATE + + def is_guard(self): + """ + @rtype: bool + @return: C{True} if all pages in this region are guard pages. + """ + return self.is_commited() and bool(self.Protect & PAGE_GUARD) + + def has_content(self): + """ + @rtype: bool + @return: C{True} if the memory in this region has any data in it. + """ + return self.is_commited() and not bool(self.Protect & (PAGE_GUARD | PAGE_NOACCESS)) + + def is_readable(self): + """ + @rtype: bool + @return: C{True} if all pages in this region are readable. + """ + return self.has_content() and bool(self.Protect & self.READABLE) + + def is_writeable(self): + """ + @rtype: bool + @return: C{True} if all pages in this region are writeable. + """ + return self.has_content() and bool(self.Protect & self.WRITEABLE) + + def is_copy_on_write(self): + """ + @rtype: bool + @return: C{True} if all pages in this region are marked as + copy-on-write. This means the pages are writeable, but changes + are not propagated to disk. + @note: + Tipically data sections in executable images are marked like this. + """ + return self.has_content() and bool(self.Protect & self.COPY_ON_WRITE) + + def is_executable(self): + """ + @rtype: bool + @return: C{True} if all pages in this region are executable. + @note: Executable pages are always readable. + """ + return self.has_content() and bool(self.Protect & self.EXECUTABLE) + + def is_executable_and_writeable(self): + """ + @rtype: bool + @return: C{True} if all pages in this region are executable and + writeable. + @note: The presence of such pages make memory corruption + vulnerabilities much easier to exploit. + """ + return self.has_content() and bool(self.Protect & self.EXECUTABLE_AND_WRITEABLE) + +class ProcThreadAttributeList (object): + """ + Extended process and thread attribute support. + + To be used with L{STARTUPINFOEX}. + Only available for Windows Vista and above. + + @type AttributeList: list of tuple( int, ctypes-compatible object ) + @ivar AttributeList: List of (Attribute, Value) pairs. + + @type AttributeListBuffer: L{LPPROC_THREAD_ATTRIBUTE_LIST} + @ivar AttributeListBuffer: Memory buffer used to store the attribute list. + L{InitializeProcThreadAttributeList}, + L{UpdateProcThreadAttribute}, + L{DeleteProcThreadAttributeList} and + L{STARTUPINFOEX}. + """ + + def __init__(self, AttributeList): + """ + @type AttributeList: list of tuple( int, ctypes-compatible object ) + @param AttributeList: List of (Attribute, Value) pairs. + """ + self.AttributeList = AttributeList + self.AttributeListBuffer = InitializeProcThreadAttributeList( + len(AttributeList)) + try: + for Attribute, Value in AttributeList: + UpdateProcThreadAttribute(self.AttributeListBuffer, + Attribute, Value) + except: + ProcThreadAttributeList.__del__(self) + raise + + def __del__(self): + try: + DeleteProcThreadAttributeList(self.AttributeListBuffer) + del self.AttributeListBuffer + except Exception: + pass + + def __copy__(self): + return self.__deepcopy__() + + def __deepcopy__(self): + return self.__class__(self.AttributeList) + + @property + def value(self): + return ctypes.cast(ctypes.pointer(self.AttributeListBuffer), LPVOID) + + @property + def _as_parameter_(self): + return self.value + + # XXX TODO + @staticmethod + def from_param(value): + raise NotImplementedError() + +#--- OVERLAPPED structure ----------------------------------------------------- + +# typedef struct _OVERLAPPED { +# ULONG_PTR Internal; +# ULONG_PTR InternalHigh; +# union { +# struct { +# DWORD Offset; +# DWORD OffsetHigh; +# } ; +# PVOID Pointer; +# } ; +# HANDLE hEvent; +# }OVERLAPPED, *LPOVERLAPPED; +class _OVERLAPPED_STRUCT(Structure): + _fields_ = [ + ('Offset', DWORD), + ('OffsetHigh', DWORD), + ] +class _OVERLAPPED_UNION(Union): + _fields_ = [ + ('s', _OVERLAPPED_STRUCT), + ('Pointer', PVOID), + ] +class OVERLAPPED(Structure): + _fields_ = [ + ('Internal', ULONG_PTR), + ('InternalHigh', ULONG_PTR), + ('u', _OVERLAPPED_UNION), + ('hEvent', HANDLE), + ] +LPOVERLAPPED = POINTER(OVERLAPPED) + +#--- SECURITY_ATTRIBUTES structure -------------------------------------------- + +# typedef struct _SECURITY_ATTRIBUTES { +# DWORD nLength; +# LPVOID lpSecurityDescriptor; +# BOOL bInheritHandle; +# } SECURITY_ATTRIBUTES, *PSECURITY_ATTRIBUTES, *LPSECURITY_ATTRIBUTES; +class SECURITY_ATTRIBUTES(Structure): + _fields_ = [ + ('nLength', DWORD), + ('lpSecurityDescriptor', LPVOID), + ('bInheritHandle', BOOL), + ] +LPSECURITY_ATTRIBUTES = POINTER(SECURITY_ATTRIBUTES) + +# --- Extended process and thread attribute support --------------------------- + +PPROC_THREAD_ATTRIBUTE_LIST = LPVOID +LPPROC_THREAD_ATTRIBUTE_LIST = PPROC_THREAD_ATTRIBUTE_LIST + +PROC_THREAD_ATTRIBUTE_NUMBER = 0x0000FFFF +PROC_THREAD_ATTRIBUTE_THREAD = 0x00010000 # Attribute may be used with thread creation +PROC_THREAD_ATTRIBUTE_INPUT = 0x00020000 # Attribute is input only +PROC_THREAD_ATTRIBUTE_ADDITIVE = 0x00040000 # Attribute may be "accumulated," e.g. bitmasks, counters, etc. + +# PROC_THREAD_ATTRIBUTE_NUM +ProcThreadAttributeParentProcess = 0 +ProcThreadAttributeExtendedFlags = 1 +ProcThreadAttributeHandleList = 2 +ProcThreadAttributeGroupAffinity = 3 +ProcThreadAttributePreferredNode = 4 +ProcThreadAttributeIdealProcessor = 5 +ProcThreadAttributeUmsThread = 6 +ProcThreadAttributeMitigationPolicy = 7 +ProcThreadAttributeMax = 8 + +PROC_THREAD_ATTRIBUTE_PARENT_PROCESS = ProcThreadAttributeParentProcess | PROC_THREAD_ATTRIBUTE_INPUT +PROC_THREAD_ATTRIBUTE_EXTENDED_FLAGS = ProcThreadAttributeExtendedFlags | PROC_THREAD_ATTRIBUTE_INPUT | PROC_THREAD_ATTRIBUTE_ADDITIVE +PROC_THREAD_ATTRIBUTE_HANDLE_LIST = ProcThreadAttributeHandleList | PROC_THREAD_ATTRIBUTE_INPUT +PROC_THREAD_ATTRIBUTE_GROUP_AFFINITY = ProcThreadAttributeGroupAffinity | PROC_THREAD_ATTRIBUTE_THREAD | PROC_THREAD_ATTRIBUTE_INPUT +PROC_THREAD_ATTRIBUTE_PREFERRED_NODE = ProcThreadAttributePreferredNode | PROC_THREAD_ATTRIBUTE_INPUT +PROC_THREAD_ATTRIBUTE_IDEAL_PROCESSOR = ProcThreadAttributeIdealProcessor | PROC_THREAD_ATTRIBUTE_THREAD | PROC_THREAD_ATTRIBUTE_INPUT +PROC_THREAD_ATTRIBUTE_UMS_THREAD = ProcThreadAttributeUmsThread | PROC_THREAD_ATTRIBUTE_THREAD | PROC_THREAD_ATTRIBUTE_INPUT +PROC_THREAD_ATTRIBUTE_MITIGATION_POLICY = ProcThreadAttributeMitigationPolicy | PROC_THREAD_ATTRIBUTE_INPUT + +PROCESS_CREATION_MITIGATION_POLICY_DEP_ENABLE = 0x01 +PROCESS_CREATION_MITIGATION_POLICY_DEP_ATL_THUNK_ENABLE = 0x02 +PROCESS_CREATION_MITIGATION_POLICY_SEHOP_ENABLE = 0x04 + +#--- VS_FIXEDFILEINFO structure ----------------------------------------------- + +# struct VS_FIXEDFILEINFO { +# DWORD dwSignature; +# DWORD dwStrucVersion; +# DWORD dwFileVersionMS; +# DWORD dwFileVersionLS; +# DWORD dwProductVersionMS; +# DWORD dwProductVersionLS; +# DWORD dwFileFlagsMask; +# DWORD dwFileFlags; +# DWORD dwFileOS; +# DWORD dwFileType; +# DWORD dwFileSubtype; +# DWORD dwFileDateMS; +# DWORD dwFileDateLS; +# }; +class VS_FIXEDFILEINFO (Structure): + _fields_ = [ + ("dwSignature", DWORD), # 0xFEEF04BD + ("dwStrucVersion", DWORD), + ("dwFileVersionMS", DWORD), + ("dwFileVersionLS", DWORD), + ("dwProductVersionMS", DWORD), + ("dwProductVersionLS", DWORD), + ("dwFileFlagsMask", DWORD), + ("dwFileFlags", DWORD), + ("dwFileOS", DWORD), + ("dwFileType", DWORD), + ("dwFileSubtype", DWORD), + ("dwFileDateMS", DWORD), + ("dwFileDateLS", DWORD), + ] + +#--- THREADNAME_INFO structure ------------------------------------------------ + +# typedef struct tagTHREADNAME_INFO +# { +# DWORD dwType; // Must be 0x1000. +# LPCSTR szName; // Pointer to name (in user addr space). +# DWORD dwThreadID; // Thread ID (-1=caller thread). +# DWORD dwFlags; // Reserved for future use, must be zero. +# } THREADNAME_INFO; +class THREADNAME_INFO(Structure): + _fields_ = [ + ("dwType", DWORD), # 0x1000 + ("szName", LPVOID), # remote pointer + ("dwThreadID", DWORD), # -1 usually + ("dwFlags", DWORD), # 0 + ] + +#--- MEMORY_BASIC_INFORMATION structure --------------------------------------- + +# typedef struct _MEMORY_BASIC_INFORMATION32 { +# DWORD BaseAddress; +# DWORD AllocationBase; +# DWORD AllocationProtect; +# DWORD RegionSize; +# DWORD State; +# DWORD Protect; +# DWORD Type; +# } MEMORY_BASIC_INFORMATION32, *PMEMORY_BASIC_INFORMATION32; +class MEMORY_BASIC_INFORMATION32(Structure): + _fields_ = [ + ('BaseAddress', DWORD), # remote pointer + ('AllocationBase', DWORD), # remote pointer + ('AllocationProtect', DWORD), + ('RegionSize', DWORD), + ('State', DWORD), + ('Protect', DWORD), + ('Type', DWORD), + ] + +# typedef struct DECLSPEC_ALIGN(16) _MEMORY_BASIC_INFORMATION64 { +# ULONGLONG BaseAddress; +# ULONGLONG AllocationBase; +# DWORD AllocationProtect; +# DWORD __alignment1; +# ULONGLONG RegionSize; +# DWORD State; +# DWORD Protect; +# DWORD Type; +# DWORD __alignment2; +# } MEMORY_BASIC_INFORMATION64, *PMEMORY_BASIC_INFORMATION64; +class MEMORY_BASIC_INFORMATION64(Structure): + _fields_ = [ + ('BaseAddress', ULONGLONG), # remote pointer + ('AllocationBase', ULONGLONG), # remote pointer + ('AllocationProtect', DWORD), + ('__alignment1', DWORD), + ('RegionSize', ULONGLONG), + ('State', DWORD), + ('Protect', DWORD), + ('Type', DWORD), + ('__alignment2', DWORD), + ] + +# typedef struct _MEMORY_BASIC_INFORMATION { +# PVOID BaseAddress; +# PVOID AllocationBase; +# DWORD AllocationProtect; +# SIZE_T RegionSize; +# DWORD State; +# DWORD Protect; +# DWORD Type; +# } MEMORY_BASIC_INFORMATION, *PMEMORY_BASIC_INFORMATION; +class MEMORY_BASIC_INFORMATION(Structure): + _fields_ = [ + ('BaseAddress', SIZE_T), # remote pointer + ('AllocationBase', SIZE_T), # remote pointer + ('AllocationProtect', DWORD), + ('RegionSize', SIZE_T), + ('State', DWORD), + ('Protect', DWORD), + ('Type', DWORD), + ] +PMEMORY_BASIC_INFORMATION = POINTER(MEMORY_BASIC_INFORMATION) + +#--- BY_HANDLE_FILE_INFORMATION structure ------------------------------------- + +# typedef struct _FILETIME { +# DWORD dwLowDateTime; +# DWORD dwHighDateTime; +# } FILETIME, *PFILETIME; +class FILETIME(Structure): + _fields_ = [ + ('dwLowDateTime', DWORD), + ('dwHighDateTime', DWORD), + ] +LPFILETIME = POINTER(FILETIME) + +# typedef struct _SYSTEMTIME { +# WORD wYear; +# WORD wMonth; +# WORD wDayOfWeek; +# WORD wDay; +# WORD wHour; +# WORD wMinute; +# WORD wSecond; +# WORD wMilliseconds; +# }SYSTEMTIME, *PSYSTEMTIME; +class SYSTEMTIME(Structure): + _fields_ = [ + ('wYear', WORD), + ('wMonth', WORD), + ('wDayOfWeek', WORD), + ('wDay', WORD), + ('wHour', WORD), + ('wMinute', WORD), + ('wSecond', WORD), + ('wMilliseconds', WORD), + ] +LPSYSTEMTIME = POINTER(SYSTEMTIME) + +# typedef struct _BY_HANDLE_FILE_INFORMATION { +# DWORD dwFileAttributes; +# FILETIME ftCreationTime; +# FILETIME ftLastAccessTime; +# FILETIME ftLastWriteTime; +# DWORD dwVolumeSerialNumber; +# DWORD nFileSizeHigh; +# DWORD nFileSizeLow; +# DWORD nNumberOfLinks; +# DWORD nFileIndexHigh; +# DWORD nFileIndexLow; +# } BY_HANDLE_FILE_INFORMATION, *PBY_HANDLE_FILE_INFORMATION; +class BY_HANDLE_FILE_INFORMATION(Structure): + _fields_ = [ + ('dwFileAttributes', DWORD), + ('ftCreationTime', FILETIME), + ('ftLastAccessTime', FILETIME), + ('ftLastWriteTime', FILETIME), + ('dwVolumeSerialNumber', DWORD), + ('nFileSizeHigh', DWORD), + ('nFileSizeLow', DWORD), + ('nNumberOfLinks', DWORD), + ('nFileIndexHigh', DWORD), + ('nFileIndexLow', DWORD), + ] +LPBY_HANDLE_FILE_INFORMATION = POINTER(BY_HANDLE_FILE_INFORMATION) + +# typedef enum _FILE_INFO_BY_HANDLE_CLASS { +# FileBasicInfo = 0, +# FileStandardInfo = 1, +# FileNameInfo = 2, +# FileRenameInfo = 3, +# FileDispositionInfo = 4, +# FileAllocationInfo = 5, +# FileEndOfFileInfo = 6, +# FileStreamInfo = 7, +# FileCompressionInfo = 8, +# FileAttributeTagInfo = 9, +# FileIdBothDirectoryInfo = 10, +# FileIdBothDirectoryRestartInfo = 11, +# FileIoPriorityHintInfo = 12, +# MaximumFileInfoByHandlesClass = 13 +# } FILE_INFO_BY_HANDLE_CLASS, *PFILE_INFO_BY_HANDLE_CLASS; +class FILE_INFO_BY_HANDLE_CLASS(object): + FileBasicInfo = 0 + FileStandardInfo = 1 + FileNameInfo = 2 + FileRenameInfo = 3 + FileDispositionInfo = 4 + FileAllocationInfo = 5 + FileEndOfFileInfo = 6 + FileStreamInfo = 7 + FileCompressionInfo = 8 + FileAttributeTagInfo = 9 + FileIdBothDirectoryInfo = 10 + FileIdBothDirectoryRestartInfo = 11 + FileIoPriorityHintInfo = 12 + MaximumFileInfoByHandlesClass = 13 + +# typedef struct _FILE_NAME_INFO { +# DWORD FileNameLength; +# WCHAR FileName[1]; +# } FILE_NAME_INFO, *PFILE_NAME_INFO; +##class FILE_NAME_INFO(Structure): +## _fields_ = [ +## ('FileNameLength', DWORD), +## ('FileName', WCHAR * 1), +## ] + +# TO DO: add more structures used by GetFileInformationByHandleEx() + +#--- PROCESS_INFORMATION structure -------------------------------------------- + +# typedef struct _PROCESS_INFORMATION { +# HANDLE hProcess; +# HANDLE hThread; +# DWORD dwProcessId; +# DWORD dwThreadId; +# } PROCESS_INFORMATION, *PPROCESS_INFORMATION, *LPPROCESS_INFORMATION; +class PROCESS_INFORMATION(Structure): + _fields_ = [ + ('hProcess', HANDLE), + ('hThread', HANDLE), + ('dwProcessId', DWORD), + ('dwThreadId', DWORD), + ] +LPPROCESS_INFORMATION = POINTER(PROCESS_INFORMATION) + +#--- STARTUPINFO and STARTUPINFOEX structures --------------------------------- + +# typedef struct _STARTUPINFO { +# DWORD cb; +# LPTSTR lpReserved; +# LPTSTR lpDesktop; +# LPTSTR lpTitle; +# DWORD dwX; +# DWORD dwY; +# DWORD dwXSize; +# DWORD dwYSize; +# DWORD dwXCountChars; +# DWORD dwYCountChars; +# DWORD dwFillAttribute; +# DWORD dwFlags; +# WORD wShowWindow; +# WORD cbReserved2; +# LPBYTE lpReserved2; +# HANDLE hStdInput; +# HANDLE hStdOutput; +# HANDLE hStdError; +# }STARTUPINFO, *LPSTARTUPINFO; +class STARTUPINFO(Structure): + _fields_ = [ + ('cb', DWORD), + ('lpReserved', LPSTR), + ('lpDesktop', LPSTR), + ('lpTitle', LPSTR), + ('dwX', DWORD), + ('dwY', DWORD), + ('dwXSize', DWORD), + ('dwYSize', DWORD), + ('dwXCountChars', DWORD), + ('dwYCountChars', DWORD), + ('dwFillAttribute', DWORD), + ('dwFlags', DWORD), + ('wShowWindow', WORD), + ('cbReserved2', WORD), + ('lpReserved2', LPVOID), # LPBYTE + ('hStdInput', HANDLE), + ('hStdOutput', HANDLE), + ('hStdError', HANDLE), + ] +LPSTARTUPINFO = POINTER(STARTUPINFO) + +# typedef struct _STARTUPINFOEX { +# STARTUPINFO StartupInfo; +# PPROC_THREAD_ATTRIBUTE_LIST lpAttributeList; +# } STARTUPINFOEX, *LPSTARTUPINFOEX; +class STARTUPINFOEX(Structure): + _fields_ = [ + ('StartupInfo', STARTUPINFO), + ('lpAttributeList', PPROC_THREAD_ATTRIBUTE_LIST), + ] +LPSTARTUPINFOEX = POINTER(STARTUPINFOEX) + +class STARTUPINFOW(Structure): + _fields_ = [ + ('cb', DWORD), + ('lpReserved', LPWSTR), + ('lpDesktop', LPWSTR), + ('lpTitle', LPWSTR), + ('dwX', DWORD), + ('dwY', DWORD), + ('dwXSize', DWORD), + ('dwYSize', DWORD), + ('dwXCountChars', DWORD), + ('dwYCountChars', DWORD), + ('dwFillAttribute', DWORD), + ('dwFlags', DWORD), + ('wShowWindow', WORD), + ('cbReserved2', WORD), + ('lpReserved2', LPVOID), # LPBYTE + ('hStdInput', HANDLE), + ('hStdOutput', HANDLE), + ('hStdError', HANDLE), + ] +LPSTARTUPINFOW = POINTER(STARTUPINFOW) + +class STARTUPINFOEXW(Structure): + _fields_ = [ + ('StartupInfo', STARTUPINFOW), + ('lpAttributeList', PPROC_THREAD_ATTRIBUTE_LIST), + ] +LPSTARTUPINFOEXW = POINTER(STARTUPINFOEXW) + +#--- JIT_DEBUG_INFO structure ------------------------------------------------- + +# typedef struct _JIT_DEBUG_INFO { +# DWORD dwSize; +# DWORD dwProcessorArchitecture; +# DWORD dwThreadID; +# DWORD dwReserved0; +# ULONG64 lpExceptionAddress; +# ULONG64 lpExceptionRecord; +# ULONG64 lpContextRecord; +# } JIT_DEBUG_INFO, *LPJIT_DEBUG_INFO; +class JIT_DEBUG_INFO(Structure): + _fields_ = [ + ('dwSize', DWORD), + ('dwProcessorArchitecture', DWORD), + ('dwThreadID', DWORD), + ('dwReserved0', DWORD), + ('lpExceptionAddress', ULONG64), + ('lpExceptionRecord', ULONG64), + ('lpContextRecord', ULONG64), + ] +JIT_DEBUG_INFO32 = JIT_DEBUG_INFO +JIT_DEBUG_INFO64 = JIT_DEBUG_INFO + +LPJIT_DEBUG_INFO = POINTER(JIT_DEBUG_INFO) +LPJIT_DEBUG_INFO32 = POINTER(JIT_DEBUG_INFO32) +LPJIT_DEBUG_INFO64 = POINTER(JIT_DEBUG_INFO64) + +#--- DEBUG_EVENT structure ---------------------------------------------------- + +# typedef struct _EXCEPTION_RECORD32 { +# DWORD ExceptionCode; +# DWORD ExceptionFlags; +# DWORD ExceptionRecord; +# DWORD ExceptionAddress; +# DWORD NumberParameters; +# DWORD ExceptionInformation[EXCEPTION_MAXIMUM_PARAMETERS]; +# } EXCEPTION_RECORD32, *PEXCEPTION_RECORD32; +class EXCEPTION_RECORD32(Structure): + _fields_ = [ + ('ExceptionCode', DWORD), + ('ExceptionFlags', DWORD), + ('ExceptionRecord', DWORD), + ('ExceptionAddress', DWORD), + ('NumberParameters', DWORD), + ('ExceptionInformation', DWORD * EXCEPTION_MAXIMUM_PARAMETERS), + ] + +PEXCEPTION_RECORD32 = POINTER(EXCEPTION_RECORD32) + +# typedef struct _EXCEPTION_RECORD64 { +# DWORD ExceptionCode; +# DWORD ExceptionFlags; +# DWORD64 ExceptionRecord; +# DWORD64 ExceptionAddress; +# DWORD NumberParameters; +# DWORD __unusedAlignment; +# DWORD64 ExceptionInformation[EXCEPTION_MAXIMUM_PARAMETERS]; +# } EXCEPTION_RECORD64, *PEXCEPTION_RECORD64; +class EXCEPTION_RECORD64(Structure): + _fields_ = [ + ('ExceptionCode', DWORD), + ('ExceptionFlags', DWORD), + ('ExceptionRecord', DWORD64), + ('ExceptionAddress', DWORD64), + ('NumberParameters', DWORD), + ('__unusedAlignment', DWORD), + ('ExceptionInformation', DWORD64 * EXCEPTION_MAXIMUM_PARAMETERS), + ] + +PEXCEPTION_RECORD64 = POINTER(EXCEPTION_RECORD64) + +# typedef struct _EXCEPTION_RECORD { +# DWORD ExceptionCode; +# DWORD ExceptionFlags; +# LPVOID ExceptionRecord; +# LPVOID ExceptionAddress; +# DWORD NumberParameters; +# LPVOID ExceptionInformation[EXCEPTION_MAXIMUM_PARAMETERS]; +# } EXCEPTION_RECORD, *PEXCEPTION_RECORD; +class EXCEPTION_RECORD(Structure): + pass +PEXCEPTION_RECORD = POINTER(EXCEPTION_RECORD) +EXCEPTION_RECORD._fields_ = [ + ('ExceptionCode', DWORD), + ('ExceptionFlags', DWORD), + ('ExceptionRecord', PEXCEPTION_RECORD), + ('ExceptionAddress', LPVOID), + ('NumberParameters', DWORD), + ('ExceptionInformation', LPVOID * EXCEPTION_MAXIMUM_PARAMETERS), + ] + +# typedef struct _EXCEPTION_DEBUG_INFO { +# EXCEPTION_RECORD ExceptionRecord; +# DWORD dwFirstChance; +# } EXCEPTION_DEBUG_INFO; +class EXCEPTION_DEBUG_INFO(Structure): + _fields_ = [ + ('ExceptionRecord', EXCEPTION_RECORD), + ('dwFirstChance', DWORD), + ] + +# typedef struct _CREATE_THREAD_DEBUG_INFO { +# HANDLE hThread; +# LPVOID lpThreadLocalBase; +# LPTHREAD_START_ROUTINE lpStartAddress; +# } CREATE_THREAD_DEBUG_INFO; +class CREATE_THREAD_DEBUG_INFO(Structure): + _fields_ = [ + ('hThread', HANDLE), + ('lpThreadLocalBase', LPVOID), + ('lpStartAddress', LPVOID), + ] + +# typedef struct _CREATE_PROCESS_DEBUG_INFO { +# HANDLE hFile; +# HANDLE hProcess; +# HANDLE hThread; +# LPVOID lpBaseOfImage; +# DWORD dwDebugInfoFileOffset; +# DWORD nDebugInfoSize; +# LPVOID lpThreadLocalBase; +# LPTHREAD_START_ROUTINE lpStartAddress; +# LPVOID lpImageName; +# WORD fUnicode; +# } CREATE_PROCESS_DEBUG_INFO; +class CREATE_PROCESS_DEBUG_INFO(Structure): + _fields_ = [ + ('hFile', HANDLE), + ('hProcess', HANDLE), + ('hThread', HANDLE), + ('lpBaseOfImage', LPVOID), + ('dwDebugInfoFileOffset', DWORD), + ('nDebugInfoSize', DWORD), + ('lpThreadLocalBase', LPVOID), + ('lpStartAddress', LPVOID), + ('lpImageName', LPVOID), + ('fUnicode', WORD), + ] + +# typedef struct _EXIT_THREAD_DEBUG_INFO { +# DWORD dwExitCode; +# } EXIT_THREAD_DEBUG_INFO; +class EXIT_THREAD_DEBUG_INFO(Structure): + _fields_ = [ + ('dwExitCode', DWORD), + ] + +# typedef struct _EXIT_PROCESS_DEBUG_INFO { +# DWORD dwExitCode; +# } EXIT_PROCESS_DEBUG_INFO; +class EXIT_PROCESS_DEBUG_INFO(Structure): + _fields_ = [ + ('dwExitCode', DWORD), + ] + +# typedef struct _LOAD_DLL_DEBUG_INFO { +# HANDLE hFile; +# LPVOID lpBaseOfDll; +# DWORD dwDebugInfoFileOffset; +# DWORD nDebugInfoSize; +# LPVOID lpImageName; +# WORD fUnicode; +# } LOAD_DLL_DEBUG_INFO; +class LOAD_DLL_DEBUG_INFO(Structure): + _fields_ = [ + ('hFile', HANDLE), + ('lpBaseOfDll', LPVOID), + ('dwDebugInfoFileOffset', DWORD), + ('nDebugInfoSize', DWORD), + ('lpImageName', LPVOID), + ('fUnicode', WORD), + ] + +# typedef struct _UNLOAD_DLL_DEBUG_INFO { +# LPVOID lpBaseOfDll; +# } UNLOAD_DLL_DEBUG_INFO; +class UNLOAD_DLL_DEBUG_INFO(Structure): + _fields_ = [ + ('lpBaseOfDll', LPVOID), + ] + +# typedef struct _OUTPUT_DEBUG_STRING_INFO { +# LPSTR lpDebugStringData; +# WORD fUnicode; +# WORD nDebugStringLength; +# } OUTPUT_DEBUG_STRING_INFO; +class OUTPUT_DEBUG_STRING_INFO(Structure): + _fields_ = [ + ('lpDebugStringData', LPVOID), # don't use LPSTR + ('fUnicode', WORD), + ('nDebugStringLength', WORD), + ] + +# typedef struct _RIP_INFO { +# DWORD dwError; +# DWORD dwType; +# } RIP_INFO, *LPRIP_INFO; +class RIP_INFO(Structure): + _fields_ = [ + ('dwError', DWORD), + ('dwType', DWORD), + ] + +# typedef struct _DEBUG_EVENT { +# DWORD dwDebugEventCode; +# DWORD dwProcessId; +# DWORD dwThreadId; +# union { +# EXCEPTION_DEBUG_INFO Exception; +# CREATE_THREAD_DEBUG_INFO CreateThread; +# CREATE_PROCESS_DEBUG_INFO CreateProcessInfo; +# EXIT_THREAD_DEBUG_INFO ExitThread; +# EXIT_PROCESS_DEBUG_INFO ExitProcess; +# LOAD_DLL_DEBUG_INFO LoadDll; +# UNLOAD_DLL_DEBUG_INFO UnloadDll; +# OUTPUT_DEBUG_STRING_INFO DebugString; +# RIP_INFO RipInfo; +# } u; +# } DEBUG_EVENT;. +class _DEBUG_EVENT_UNION_(Union): + _fields_ = [ + ('Exception', EXCEPTION_DEBUG_INFO), + ('CreateThread', CREATE_THREAD_DEBUG_INFO), + ('CreateProcessInfo', CREATE_PROCESS_DEBUG_INFO), + ('ExitThread', EXIT_THREAD_DEBUG_INFO), + ('ExitProcess', EXIT_PROCESS_DEBUG_INFO), + ('LoadDll', LOAD_DLL_DEBUG_INFO), + ('UnloadDll', UNLOAD_DLL_DEBUG_INFO), + ('DebugString', OUTPUT_DEBUG_STRING_INFO), + ('RipInfo', RIP_INFO), + ] +class DEBUG_EVENT(Structure): + _fields_ = [ + ('dwDebugEventCode', DWORD), + ('dwProcessId', DWORD), + ('dwThreadId', DWORD), + ('u', _DEBUG_EVENT_UNION_), + ] +LPDEBUG_EVENT = POINTER(DEBUG_EVENT) + +#--- Console API defines and structures --------------------------------------- + +FOREGROUND_MASK = 0x000F +BACKGROUND_MASK = 0x00F0 +COMMON_LVB_MASK = 0xFF00 + +FOREGROUND_BLACK = 0x0000 +FOREGROUND_BLUE = 0x0001 +FOREGROUND_GREEN = 0x0002 +FOREGROUND_CYAN = 0x0003 +FOREGROUND_RED = 0x0004 +FOREGROUND_MAGENTA = 0x0005 +FOREGROUND_YELLOW = 0x0006 +FOREGROUND_GREY = 0x0007 +FOREGROUND_INTENSITY = 0x0008 + +BACKGROUND_BLACK = 0x0000 +BACKGROUND_BLUE = 0x0010 +BACKGROUND_GREEN = 0x0020 +BACKGROUND_CYAN = 0x0030 +BACKGROUND_RED = 0x0040 +BACKGROUND_MAGENTA = 0x0050 +BACKGROUND_YELLOW = 0x0060 +BACKGROUND_GREY = 0x0070 +BACKGROUND_INTENSITY = 0x0080 + +COMMON_LVB_LEADING_BYTE = 0x0100 +COMMON_LVB_TRAILING_BYTE = 0x0200 +COMMON_LVB_GRID_HORIZONTAL = 0x0400 +COMMON_LVB_GRID_LVERTICAL = 0x0800 +COMMON_LVB_GRID_RVERTICAL = 0x1000 +COMMON_LVB_REVERSE_VIDEO = 0x4000 +COMMON_LVB_UNDERSCORE = 0x8000 + +# typedef struct _CHAR_INFO { +# union { +# WCHAR UnicodeChar; +# CHAR AsciiChar; +# } Char; +# WORD Attributes; +# } CHAR_INFO, *PCHAR_INFO; +class _CHAR_INFO_CHAR(Union): + _fields_ = [ + ('UnicodeChar', WCHAR), + ('AsciiChar', CHAR), + ] +class CHAR_INFO(Structure): + _fields_ = [ + ('Char', _CHAR_INFO_CHAR), + ('Attributes', WORD), + ] +PCHAR_INFO = POINTER(CHAR_INFO) + +# typedef struct _COORD { +# SHORT X; +# SHORT Y; +# } COORD, *PCOORD; +class COORD(Structure): + _fields_ = [ + ('X', SHORT), + ('Y', SHORT), + ] +PCOORD = POINTER(COORD) + +# typedef struct _SMALL_RECT { +# SHORT Left; +# SHORT Top; +# SHORT Right; +# SHORT Bottom; +# } SMALL_RECT; +class SMALL_RECT(Structure): + _fields_ = [ + ('Left', SHORT), + ('Top', SHORT), + ('Right', SHORT), + ('Bottom', SHORT), + ] +PSMALL_RECT = POINTER(SMALL_RECT) + +# typedef struct _CONSOLE_SCREEN_BUFFER_INFO { +# COORD dwSize; +# COORD dwCursorPosition; +# WORD wAttributes; +# SMALL_RECT srWindow; +# COORD dwMaximumWindowSize; +# } CONSOLE_SCREEN_BUFFER_INFO; +class CONSOLE_SCREEN_BUFFER_INFO(Structure): + _fields_ = [ + ('dwSize', COORD), + ('dwCursorPosition', COORD), + ('wAttributes', WORD), + ('srWindow', SMALL_RECT), + ('dwMaximumWindowSize', COORD), + ] +PCONSOLE_SCREEN_BUFFER_INFO = POINTER(CONSOLE_SCREEN_BUFFER_INFO) + +#--- Toolhelp library defines and structures ---------------------------------- + +TH32CS_SNAPHEAPLIST = 0x00000001 +TH32CS_SNAPPROCESS = 0x00000002 +TH32CS_SNAPTHREAD = 0x00000004 +TH32CS_SNAPMODULE = 0x00000008 +TH32CS_INHERIT = 0x80000000 +TH32CS_SNAPALL = (TH32CS_SNAPHEAPLIST | TH32CS_SNAPPROCESS | TH32CS_SNAPTHREAD | TH32CS_SNAPMODULE) + +# typedef struct tagTHREADENTRY32 { +# DWORD dwSize; +# DWORD cntUsage; +# DWORD th32ThreadID; +# DWORD th32OwnerProcessID; +# LONG tpBasePri; +# LONG tpDeltaPri; +# DWORD dwFlags; +# } THREADENTRY32, *PTHREADENTRY32; +class THREADENTRY32(Structure): + _fields_ = [ + ('dwSize', DWORD), + ('cntUsage', DWORD), + ('th32ThreadID', DWORD), + ('th32OwnerProcessID', DWORD), + ('tpBasePri', LONG), + ('tpDeltaPri', LONG), + ('dwFlags', DWORD), + ] +LPTHREADENTRY32 = POINTER(THREADENTRY32) + +# typedef struct tagPROCESSENTRY32 { +# DWORD dwSize; +# DWORD cntUsage; +# DWORD th32ProcessID; +# ULONG_PTR th32DefaultHeapID; +# DWORD th32ModuleID; +# DWORD cntThreads; +# DWORD th32ParentProcessID; +# LONG pcPriClassBase; +# DWORD dwFlags; +# TCHAR szExeFile[MAX_PATH]; +# } PROCESSENTRY32, *PPROCESSENTRY32; +class PROCESSENTRY32(Structure): + _fields_ = [ + ('dwSize', DWORD), + ('cntUsage', DWORD), + ('th32ProcessID', DWORD), + ('th32DefaultHeapID', ULONG_PTR), + ('th32ModuleID', DWORD), + ('cntThreads', DWORD), + ('th32ParentProcessID', DWORD), + ('pcPriClassBase', LONG), + ('dwFlags', DWORD), + ('szExeFile', TCHAR * 260), + ] +LPPROCESSENTRY32 = POINTER(PROCESSENTRY32) + +# typedef struct tagMODULEENTRY32 { +# DWORD dwSize; +# DWORD th32ModuleID; +# DWORD th32ProcessID; +# DWORD GlblcntUsage; +# DWORD ProccntUsage; +# BYTE* modBaseAddr; +# DWORD modBaseSize; +# HMODULE hModule; +# TCHAR szModule[MAX_MODULE_NAME32 + 1]; +# TCHAR szExePath[MAX_PATH]; +# } MODULEENTRY32, *PMODULEENTRY32; +class MODULEENTRY32(Structure): + _fields_ = [ + ("dwSize", DWORD), + ("th32ModuleID", DWORD), + ("th32ProcessID", DWORD), + ("GlblcntUsage", DWORD), + ("ProccntUsage", DWORD), + ("modBaseAddr", LPVOID), # BYTE* + ("modBaseSize", DWORD), + ("hModule", HMODULE), + ("szModule", TCHAR * (MAX_MODULE_NAME32 + 1)), + ("szExePath", TCHAR * MAX_PATH), + ] +LPMODULEENTRY32 = POINTER(MODULEENTRY32) + +# typedef struct tagHEAPENTRY32 { +# SIZE_T dwSize; +# HANDLE hHandle; +# ULONG_PTR dwAddress; +# SIZE_T dwBlockSize; +# DWORD dwFlags; +# DWORD dwLockCount; +# DWORD dwResvd; +# DWORD th32ProcessID; +# ULONG_PTR th32HeapID; +# } HEAPENTRY32, +# *PHEAPENTRY32; +class HEAPENTRY32(Structure): + _fields_ = [ + ("dwSize", SIZE_T), + ("hHandle", HANDLE), + ("dwAddress", ULONG_PTR), + ("dwBlockSize", SIZE_T), + ("dwFlags", DWORD), + ("dwLockCount", DWORD), + ("dwResvd", DWORD), + ("th32ProcessID", DWORD), + ("th32HeapID", ULONG_PTR), +] +LPHEAPENTRY32 = POINTER(HEAPENTRY32) + +# typedef struct tagHEAPLIST32 { +# SIZE_T dwSize; +# DWORD th32ProcessID; +# ULONG_PTR th32HeapID; +# DWORD dwFlags; +# } HEAPLIST32, +# *PHEAPLIST32; +class HEAPLIST32(Structure): + _fields_ = [ + ("dwSize", SIZE_T), + ("th32ProcessID", DWORD), + ("th32HeapID", ULONG_PTR), + ("dwFlags", DWORD), +] +LPHEAPLIST32 = POINTER(HEAPLIST32) + +#--- kernel32.dll ------------------------------------------------------------- + +# DWORD WINAPI GetLastError(void); +def GetLastError(): + _GetLastError = windll.kernel32.GetLastError + _GetLastError.argtypes = [] + _GetLastError.restype = DWORD + return _GetLastError() + +# void WINAPI SetLastError( +# __in DWORD dwErrCode +# ); +def SetLastError(dwErrCode): + _SetLastError = windll.kernel32.SetLastError + _SetLastError.argtypes = [DWORD] + _SetLastError.restype = None + _SetLastError(dwErrCode) + +# UINT WINAPI GetErrorMode(void); +def GetErrorMode(): + _GetErrorMode = windll.kernel32.GetErrorMode + _GetErrorMode.argtypes = [] + _GetErrorMode.restype = UINT + return _GetErrorMode() + +# UINT WINAPI SetErrorMode( +# __in UINT uMode +# ); +def SetErrorMode(uMode): + _SetErrorMode = windll.kernel32.SetErrorMode + _SetErrorMode.argtypes = [UINT] + _SetErrorMode.restype = UINT + return _SetErrorMode(dwErrCode) + +# DWORD GetThreadErrorMode(void); +def GetThreadErrorMode(): + _GetThreadErrorMode = windll.kernel32.GetThreadErrorMode + _GetThreadErrorMode.argtypes = [] + _GetThreadErrorMode.restype = DWORD + return _GetThreadErrorMode() + +# BOOL SetThreadErrorMode( +# __in DWORD dwNewMode, +# __out LPDWORD lpOldMode +# ); +def SetThreadErrorMode(dwNewMode): + _SetThreadErrorMode = windll.kernel32.SetThreadErrorMode + _SetThreadErrorMode.argtypes = [DWORD, LPDWORD] + _SetThreadErrorMode.restype = BOOL + _SetThreadErrorMode.errcheck = RaiseIfZero + + old = DWORD(0) + _SetThreadErrorMode(dwErrCode, byref(old)) + return old.value + +# BOOL WINAPI CloseHandle( +# __in HANDLE hObject +# ); +def CloseHandle(hHandle): + if isinstance(hHandle, Handle): + # Prevents the handle from being closed without notifying the Handle object. + hHandle.close() + else: + _CloseHandle = windll.kernel32.CloseHandle + _CloseHandle.argtypes = [HANDLE] + _CloseHandle.restype = bool + _CloseHandle.errcheck = RaiseIfZero + _CloseHandle(hHandle) + +# BOOL WINAPI DuplicateHandle( +# __in HANDLE hSourceProcessHandle, +# __in HANDLE hSourceHandle, +# __in HANDLE hTargetProcessHandle, +# __out LPHANDLE lpTargetHandle, +# __in DWORD dwDesiredAccess, +# __in BOOL bInheritHandle, +# __in DWORD dwOptions +# ); +def DuplicateHandle(hSourceHandle, hSourceProcessHandle = None, hTargetProcessHandle = None, dwDesiredAccess = STANDARD_RIGHTS_ALL, bInheritHandle = False, dwOptions = DUPLICATE_SAME_ACCESS): + _DuplicateHandle = windll.kernel32.DuplicateHandle + _DuplicateHandle.argtypes = [HANDLE, HANDLE, HANDLE, LPHANDLE, DWORD, BOOL, DWORD] + _DuplicateHandle.restype = bool + _DuplicateHandle.errcheck = RaiseIfZero + + # NOTE: the arguments to this function are in a different order, + # so we can set default values for all of them but one (hSourceHandle). + + if hSourceProcessHandle is None: + hSourceProcessHandle = GetCurrentProcess() + if hTargetProcessHandle is None: + hTargetProcessHandle = hSourceProcessHandle + lpTargetHandle = HANDLE(INVALID_HANDLE_VALUE) + _DuplicateHandle(hSourceProcessHandle, hSourceHandle, hTargetProcessHandle, byref(lpTargetHandle), dwDesiredAccess, bool(bInheritHandle), dwOptions) + if isinstance(hSourceHandle, Handle): + HandleClass = hSourceHandle.__class__ + else: + HandleClass = Handle + if hasattr(hSourceHandle, 'dwAccess'): + return HandleClass(lpTargetHandle.value, dwAccess = hSourceHandle.dwAccess) + else: + return HandleClass(lpTargetHandle.value) + +# HLOCAL WINAPI LocalFree( +# __in HLOCAL hMem +# ); +def LocalFree(hMem): + _LocalFree = windll.kernel32.LocalFree + _LocalFree.argtypes = [HLOCAL] + _LocalFree.restype = HLOCAL + + result = _LocalFree(hMem) + if result != NULL: + ctypes.WinError() + +#------------------------------------------------------------------------------ +# Console API + +# HANDLE WINAPI GetStdHandle( +# _In_ DWORD nStdHandle +# ); +def GetStdHandle(nStdHandle): + _GetStdHandle = windll.kernel32.GetStdHandle + _GetStdHandle.argytpes = [DWORD] + _GetStdHandle.restype = HANDLE + _GetStdHandle.errcheck = RaiseIfZero + return Handle( _GetStdHandle(nStdHandle), bOwnership = False ) + +# BOOL WINAPI SetStdHandle( +# _In_ DWORD nStdHandle, +# _In_ HANDLE hHandle +# ); + +# TODO + +# UINT WINAPI GetConsoleCP(void); +def GetConsoleCP(): + _GetConsoleCP = windll.kernel32.GetConsoleCP + _GetConsoleCP.argytpes = [] + _GetConsoleCP.restype = UINT + return _GetConsoleCP() + +# UINT WINAPI GetConsoleOutputCP(void); +def GetConsoleOutputCP(): + _GetConsoleOutputCP = windll.kernel32.GetConsoleOutputCP + _GetConsoleOutputCP.argytpes = [] + _GetConsoleOutputCP.restype = UINT + return _GetConsoleOutputCP() + +#BOOL WINAPI SetConsoleCP( +# _In_ UINT wCodePageID +#); +def SetConsoleCP(wCodePageID): + _SetConsoleCP = windll.kernel32.SetConsoleCP + _SetConsoleCP.argytpes = [UINT] + _SetConsoleCP.restype = bool + _SetConsoleCP.errcheck = RaiseIfZero + _SetConsoleCP(wCodePageID) + +#BOOL WINAPI SetConsoleOutputCP( +# _In_ UINT wCodePageID +#); +def SetConsoleOutputCP(wCodePageID): + _SetConsoleOutputCP = windll.kernel32.SetConsoleOutputCP + _SetConsoleOutputCP.argytpes = [UINT] + _SetConsoleOutputCP.restype = bool + _SetConsoleOutputCP.errcheck = RaiseIfZero + _SetConsoleOutputCP(wCodePageID) + +# HANDLE WINAPI CreateConsoleScreenBuffer( +# _In_ DWORD dwDesiredAccess, +# _In_ DWORD dwShareMode, +# _In_opt_ const SECURITY_ATTRIBUTES *lpSecurityAttributes, +# _In_ DWORD dwFlags, +# _Reserved_ LPVOID lpScreenBufferData +# ); + +# TODO + +# BOOL WINAPI SetConsoleActiveScreenBuffer( +# _In_ HANDLE hConsoleOutput +# ); +def SetConsoleActiveScreenBuffer(hConsoleOutput = None): + _SetConsoleActiveScreenBuffer = windll.kernel32.SetConsoleActiveScreenBuffer + _SetConsoleActiveScreenBuffer.argytpes = [HANDLE] + _SetConsoleActiveScreenBuffer.restype = bool + _SetConsoleActiveScreenBuffer.errcheck = RaiseIfZero + + if hConsoleOutput is None: + hConsoleOutput = GetStdHandle(STD_OUTPUT_HANDLE) + _SetConsoleActiveScreenBuffer(hConsoleOutput) + +# BOOL WINAPI GetConsoleScreenBufferInfo( +# _In_ HANDLE hConsoleOutput, +# _Out_ PCONSOLE_SCREEN_BUFFER_INFO lpConsoleScreenBufferInfo +# ); +def GetConsoleScreenBufferInfo(hConsoleOutput = None): + _GetConsoleScreenBufferInfo = windll.kernel32.GetConsoleScreenBufferInfo + _GetConsoleScreenBufferInfo.argytpes = [HANDLE, PCONSOLE_SCREEN_BUFFER_INFO] + _GetConsoleScreenBufferInfo.restype = bool + _GetConsoleScreenBufferInfo.errcheck = RaiseIfZero + + if hConsoleOutput is None: + hConsoleOutput = GetStdHandle(STD_OUTPUT_HANDLE) + ConsoleScreenBufferInfo = CONSOLE_SCREEN_BUFFER_INFO() + _GetConsoleScreenBufferInfo(hConsoleOutput, byref(ConsoleScreenBufferInfo)) + return ConsoleScreenBufferInfo + +# BOOL WINAPI GetConsoleScreenBufferInfoEx( +# _In_ HANDLE hConsoleOutput, +# _Out_ PCONSOLE_SCREEN_BUFFER_INFOEX lpConsoleScreenBufferInfoEx +# ); + +# TODO + +# BOOL WINAPI SetConsoleWindowInfo( +# _In_ HANDLE hConsoleOutput, +# _In_ BOOL bAbsolute, +# _In_ const SMALL_RECT *lpConsoleWindow +# ); +def SetConsoleWindowInfo(hConsoleOutput, bAbsolute, lpConsoleWindow): + _SetConsoleWindowInfo = windll.kernel32.SetConsoleWindowInfo + _SetConsoleWindowInfo.argytpes = [HANDLE, BOOL, PSMALL_RECT] + _SetConsoleWindowInfo.restype = bool + _SetConsoleWindowInfo.errcheck = RaiseIfZero + + if hConsoleOutput is None: + hConsoleOutput = GetStdHandle(STD_OUTPUT_HANDLE) + if isinstance(lpConsoleWindow, SMALL_RECT): + ConsoleWindow = lpConsoleWindow + else: + ConsoleWindow = SMALL_RECT(*lpConsoleWindow) + _SetConsoleWindowInfo(hConsoleOutput, bAbsolute, byref(ConsoleWindow)) + +# BOOL WINAPI SetConsoleTextAttribute( +# _In_ HANDLE hConsoleOutput, +# _In_ WORD wAttributes +# ); +def SetConsoleTextAttribute(hConsoleOutput = None, wAttributes = 0): + _SetConsoleTextAttribute = windll.kernel32.SetConsoleTextAttribute + _SetConsoleTextAttribute.argytpes = [HANDLE, WORD] + _SetConsoleTextAttribute.restype = bool + _SetConsoleTextAttribute.errcheck = RaiseIfZero + + if hConsoleOutput is None: + hConsoleOutput = GetStdHandle(STD_OUTPUT_HANDLE) + _SetConsoleTextAttribute(hConsoleOutput, wAttributes) + +# HANDLE WINAPI CreateConsoleScreenBuffer( +# _In_ DWORD dwDesiredAccess, +# _In_ DWORD dwShareMode, +# _In_opt_ const SECURITY_ATTRIBUTES *lpSecurityAttributes, +# _In_ DWORD dwFlags, +# _Reserved_ LPVOID lpScreenBufferData +# ); + +# TODO + +# BOOL WINAPI AllocConsole(void); +def AllocConsole(): + _AllocConsole = windll.kernel32.AllocConsole + _AllocConsole.argytpes = [] + _AllocConsole.restype = bool + _AllocConsole.errcheck = RaiseIfZero + _AllocConsole() + +# BOOL WINAPI AttachConsole( +# _In_ DWORD dwProcessId +# ); +def AttachConsole(dwProcessId = ATTACH_PARENT_PROCESS): + _AttachConsole = windll.kernel32.AttachConsole + _AttachConsole.argytpes = [DWORD] + _AttachConsole.restype = bool + _AttachConsole.errcheck = RaiseIfZero + _AttachConsole(dwProcessId) + +# BOOL WINAPI FreeConsole(void); +def FreeConsole(): + _FreeConsole = windll.kernel32.FreeConsole + _FreeConsole.argytpes = [] + _FreeConsole.restype = bool + _FreeConsole.errcheck = RaiseIfZero + _FreeConsole() + +# DWORD WINAPI GetConsoleProcessList( +# _Out_ LPDWORD lpdwProcessList, +# _In_ DWORD dwProcessCount +# ); + +# TODO + +# DWORD WINAPI GetConsoleTitle( +# _Out_ LPTSTR lpConsoleTitle, +# _In_ DWORD nSize +# ); + +# TODO + +#BOOL WINAPI SetConsoleTitle( +# _In_ LPCTSTR lpConsoleTitle +#); + +# TODO + +# COORD WINAPI GetLargestConsoleWindowSize( +# _In_ HANDLE hConsoleOutput +# ); + +# TODO + +# BOOL WINAPI GetConsoleHistoryInfo( +# _Out_ PCONSOLE_HISTORY_INFO lpConsoleHistoryInfo +# ); + +# TODO + +#------------------------------------------------------------------------------ +# DLL API + +# DWORD WINAPI GetDllDirectory( +# __in DWORD nBufferLength, +# __out LPTSTR lpBuffer +# ); +def GetDllDirectoryA(): + _GetDllDirectoryA = windll.kernel32.GetDllDirectoryA + _GetDllDirectoryA.argytpes = [DWORD, LPSTR] + _GetDllDirectoryA.restype = DWORD + + nBufferLength = _GetDllDirectoryA(0, None) + if nBufferLength == 0: + return None + lpBuffer = ctypes.create_string_buffer("", nBufferLength) + _GetDllDirectoryA(nBufferLength, byref(lpBuffer)) + return lpBuffer.value + +def GetDllDirectoryW(): + _GetDllDirectoryW = windll.kernel32.GetDllDirectoryW + _GetDllDirectoryW.argytpes = [DWORD, LPWSTR] + _GetDllDirectoryW.restype = DWORD + + nBufferLength = _GetDllDirectoryW(0, None) + if nBufferLength == 0: + return None + lpBuffer = ctypes.create_unicode_buffer(u"", nBufferLength) + _GetDllDirectoryW(nBufferLength, byref(lpBuffer)) + return lpBuffer.value + +GetDllDirectory = GuessStringType(GetDllDirectoryA, GetDllDirectoryW) + +# BOOL WINAPI SetDllDirectory( +# __in_opt LPCTSTR lpPathName +# ); +def SetDllDirectoryA(lpPathName = None): + _SetDllDirectoryA = windll.kernel32.SetDllDirectoryA + _SetDllDirectoryA.argytpes = [LPSTR] + _SetDllDirectoryA.restype = bool + _SetDllDirectoryA.errcheck = RaiseIfZero + _SetDllDirectoryA(lpPathName) + +def SetDllDirectoryW(lpPathName): + _SetDllDirectoryW = windll.kernel32.SetDllDirectoryW + _SetDllDirectoryW.argytpes = [LPWSTR] + _SetDllDirectoryW.restype = bool + _SetDllDirectoryW.errcheck = RaiseIfZero + _SetDllDirectoryW(lpPathName) + +SetDllDirectory = GuessStringType(SetDllDirectoryA, SetDllDirectoryW) + +# HMODULE WINAPI LoadLibrary( +# __in LPCTSTR lpFileName +# ); +def LoadLibraryA(pszLibrary): + _LoadLibraryA = windll.kernel32.LoadLibraryA + _LoadLibraryA.argtypes = [LPSTR] + _LoadLibraryA.restype = HMODULE + hModule = _LoadLibraryA(pszLibrary) + if hModule == NULL: + raise ctypes.WinError() + return hModule + +def LoadLibraryW(pszLibrary): + _LoadLibraryW = windll.kernel32.LoadLibraryW + _LoadLibraryW.argtypes = [LPWSTR] + _LoadLibraryW.restype = HMODULE + hModule = _LoadLibraryW(pszLibrary) + if hModule == NULL: + raise ctypes.WinError() + return hModule + +LoadLibrary = GuessStringType(LoadLibraryA, LoadLibraryW) + +# HMODULE WINAPI LoadLibraryEx( +# __in LPCTSTR lpFileName, +# __reserved HANDLE hFile, +# __in DWORD dwFlags +# ); +def LoadLibraryExA(pszLibrary, dwFlags = 0): + _LoadLibraryExA = windll.kernel32.LoadLibraryExA + _LoadLibraryExA.argtypes = [LPSTR, HANDLE, DWORD] + _LoadLibraryExA.restype = HMODULE + hModule = _LoadLibraryExA(pszLibrary, NULL, dwFlags) + if hModule == NULL: + raise ctypes.WinError() + return hModule + +def LoadLibraryExW(pszLibrary, dwFlags = 0): + _LoadLibraryExW = windll.kernel32.LoadLibraryExW + _LoadLibraryExW.argtypes = [LPWSTR, HANDLE, DWORD] + _LoadLibraryExW.restype = HMODULE + hModule = _LoadLibraryExW(pszLibrary, NULL, dwFlags) + if hModule == NULL: + raise ctypes.WinError() + return hModule + +LoadLibraryEx = GuessStringType(LoadLibraryExA, LoadLibraryExW) + +# HMODULE WINAPI GetModuleHandle( +# __in_opt LPCTSTR lpModuleName +# ); +def GetModuleHandleA(lpModuleName): + _GetModuleHandleA = windll.kernel32.GetModuleHandleA + _GetModuleHandleA.argtypes = [LPSTR] + _GetModuleHandleA.restype = HMODULE + hModule = _GetModuleHandleA(lpModuleName) + if hModule == NULL: + raise ctypes.WinError() + return hModule + +def GetModuleHandleW(lpModuleName): + _GetModuleHandleW = windll.kernel32.GetModuleHandleW + _GetModuleHandleW.argtypes = [LPWSTR] + _GetModuleHandleW.restype = HMODULE + hModule = _GetModuleHandleW(lpModuleName) + if hModule == NULL: + raise ctypes.WinError() + return hModule + +GetModuleHandle = GuessStringType(GetModuleHandleA, GetModuleHandleW) + +# FARPROC WINAPI GetProcAddress( +# __in HMODULE hModule, +# __in LPCSTR lpProcName +# ); +def GetProcAddressA(hModule, lpProcName): + _GetProcAddress = windll.kernel32.GetProcAddress + _GetProcAddress.argtypes = [HMODULE, LPVOID] + _GetProcAddress.restype = LPVOID + + if type(lpProcName) in (type(0), type(long(0))): + lpProcName = LPVOID(lpProcName) + if lpProcName.value & (~0xFFFF): + raise ValueError('Ordinal number too large: %d' % lpProcName.value) + elif type(lpProcName) == type(compat.b("")): + lpProcName = ctypes.c_char_p(lpProcName) + else: + raise TypeError(str(type(lpProcName))) + return _GetProcAddress(hModule, lpProcName) + +GetProcAddressW = MakeWideVersion(GetProcAddressA) +GetProcAddress = GuessStringType(GetProcAddressA, GetProcAddressW) + +# BOOL WINAPI FreeLibrary( +# __in HMODULE hModule +# ); +def FreeLibrary(hModule): + _FreeLibrary = windll.kernel32.FreeLibrary + _FreeLibrary.argtypes = [HMODULE] + _FreeLibrary.restype = bool + _FreeLibrary.errcheck = RaiseIfZero + _FreeLibrary(hModule) + +# PVOID WINAPI RtlPcToFileHeader( +# __in PVOID PcValue, +# __out PVOID *BaseOfImage +# ); +def RtlPcToFileHeader(PcValue): + _RtlPcToFileHeader = windll.kernel32.RtlPcToFileHeader + _RtlPcToFileHeader.argtypes = [PVOID, POINTER(PVOID)] + _RtlPcToFileHeader.restype = PRUNTIME_FUNCTION + + BaseOfImage = PVOID(0) + _RtlPcToFileHeader(PcValue, byref(BaseOfImage)) + return BaseOfImage.value + +#------------------------------------------------------------------------------ +# File API and related + +# BOOL WINAPI GetHandleInformation( +# __in HANDLE hObject, +# __out LPDWORD lpdwFlags +# ); +def GetHandleInformation(hObject): + _GetHandleInformation = windll.kernel32.GetHandleInformation + _GetHandleInformation.argtypes = [HANDLE, PDWORD] + _GetHandleInformation.restype = bool + _GetHandleInformation.errcheck = RaiseIfZero + + dwFlags = DWORD(0) + _GetHandleInformation(hObject, byref(dwFlags)) + return dwFlags.value + +# BOOL WINAPI SetHandleInformation( +# __in HANDLE hObject, +# __in DWORD dwMask, +# __in DWORD dwFlags +# ); +def SetHandleInformation(hObject, dwMask, dwFlags): + _SetHandleInformation = windll.kernel32.SetHandleInformation + _SetHandleInformation.argtypes = [HANDLE, DWORD, DWORD] + _SetHandleInformation.restype = bool + _SetHandleInformation.errcheck = RaiseIfZero + _SetHandleInformation(hObject, dwMask, dwFlags) + +# UINT WINAPI GetWindowModuleFileName( +# __in HWND hwnd, +# __out LPTSTR lpszFileName, +# __in UINT cchFileNameMax +# ); +# Not included because it doesn't work in other processes. +# See: http://support.microsoft.com/?id=228469 + +# BOOL WINAPI QueryFullProcessImageName( +# __in HANDLE hProcess, +# __in DWORD dwFlags, +# __out LPTSTR lpExeName, +# __inout PDWORD lpdwSize +# ); +def QueryFullProcessImageNameA(hProcess, dwFlags = 0): + _QueryFullProcessImageNameA = windll.kernel32.QueryFullProcessImageNameA + _QueryFullProcessImageNameA.argtypes = [HANDLE, DWORD, LPSTR, PDWORD] + _QueryFullProcessImageNameA.restype = bool + + dwSize = MAX_PATH + while 1: + lpdwSize = DWORD(dwSize) + lpExeName = ctypes.create_string_buffer('', lpdwSize.value + 1) + success = _QueryFullProcessImageNameA(hProcess, dwFlags, lpExeName, byref(lpdwSize)) + if success and 0 < lpdwSize.value < dwSize: + break + error = GetLastError() + if error != ERROR_INSUFFICIENT_BUFFER: + raise ctypes.WinError(error) + dwSize = dwSize + 256 + if dwSize > 0x1000: + # this prevents an infinite loop in Windows 2008 when the path has spaces, + # see http://msdn.microsoft.com/en-us/library/ms684919(VS.85).aspx#4 + raise ctypes.WinError(error) + return lpExeName.value + +def QueryFullProcessImageNameW(hProcess, dwFlags = 0): + _QueryFullProcessImageNameW = windll.kernel32.QueryFullProcessImageNameW + _QueryFullProcessImageNameW.argtypes = [HANDLE, DWORD, LPWSTR, PDWORD] + _QueryFullProcessImageNameW.restype = bool + + dwSize = MAX_PATH + while 1: + lpdwSize = DWORD(dwSize) + lpExeName = ctypes.create_unicode_buffer('', lpdwSize.value + 1) + success = _QueryFullProcessImageNameW(hProcess, dwFlags, lpExeName, byref(lpdwSize)) + if success and 0 < lpdwSize.value < dwSize: + break + error = GetLastError() + if error != ERROR_INSUFFICIENT_BUFFER: + raise ctypes.WinError(error) + dwSize = dwSize + 256 + if dwSize > 0x1000: + # this prevents an infinite loop in Windows 2008 when the path has spaces, + # see http://msdn.microsoft.com/en-us/library/ms684919(VS.85).aspx#4 + raise ctypes.WinError(error) + return lpExeName.value + +QueryFullProcessImageName = GuessStringType(QueryFullProcessImageNameA, QueryFullProcessImageNameW) + +# DWORD WINAPI GetLogicalDriveStrings( +# __in DWORD nBufferLength, +# __out LPTSTR lpBuffer +# ); +def GetLogicalDriveStringsA(): + _GetLogicalDriveStringsA = ctypes.windll.kernel32.GetLogicalDriveStringsA + _GetLogicalDriveStringsA.argtypes = [DWORD, LPSTR] + _GetLogicalDriveStringsA.restype = DWORD + _GetLogicalDriveStringsA.errcheck = RaiseIfZero + + nBufferLength = (4 * 26) + 1 # "X:\\\0" from A to Z plus empty string + lpBuffer = ctypes.create_string_buffer('', nBufferLength) + _GetLogicalDriveStringsA(nBufferLength, lpBuffer) + drive_strings = list() + string_p = addressof(lpBuffer) + sizeof_char = sizeof(ctypes.c_char) + while True: + string_v = ctypes.string_at(string_p) + if string_v == '': + break + drive_strings.append(string_v) + string_p += len(string_v) + sizeof_char + return drive_strings + +def GetLogicalDriveStringsW(): + _GetLogicalDriveStringsW = ctypes.windll.kernel32.GetLogicalDriveStringsW + _GetLogicalDriveStringsW.argtypes = [DWORD, LPWSTR] + _GetLogicalDriveStringsW.restype = DWORD + _GetLogicalDriveStringsW.errcheck = RaiseIfZero + + nBufferLength = (4 * 26) + 1 # "X:\\\0" from A to Z plus empty string + lpBuffer = ctypes.create_unicode_buffer(u'', nBufferLength) + _GetLogicalDriveStringsW(nBufferLength, lpBuffer) + drive_strings = list() + string_p = addressof(lpBuffer) + sizeof_wchar = sizeof(ctypes.c_wchar) + while True: + string_v = ctypes.wstring_at(string_p) + if string_v == u'': + break + drive_strings.append(string_v) + string_p += (len(string_v) * sizeof_wchar) + sizeof_wchar + return drive_strings + +##def GetLogicalDriveStringsA(): +## _GetLogicalDriveStringsA = windll.kernel32.GetLogicalDriveStringsA +## _GetLogicalDriveStringsA.argtypes = [DWORD, LPSTR] +## _GetLogicalDriveStringsA.restype = DWORD +## _GetLogicalDriveStringsA.errcheck = RaiseIfZero +## +## nBufferLength = (4 * 26) + 1 # "X:\\\0" from A to Z plus empty string +## lpBuffer = ctypes.create_string_buffer('', nBufferLength) +## _GetLogicalDriveStringsA(nBufferLength, lpBuffer) +## result = list() +## index = 0 +## while 1: +## string = list() +## while 1: +## character = lpBuffer[index] +## index = index + 1 +## if character == '\0': +## break +## string.append(character) +## if not string: +## break +## result.append(''.join(string)) +## return result +## +##def GetLogicalDriveStringsW(): +## _GetLogicalDriveStringsW = windll.kernel32.GetLogicalDriveStringsW +## _GetLogicalDriveStringsW.argtypes = [DWORD, LPWSTR] +## _GetLogicalDriveStringsW.restype = DWORD +## _GetLogicalDriveStringsW.errcheck = RaiseIfZero +## +## nBufferLength = (4 * 26) + 1 # "X:\\\0" from A to Z plus empty string +## lpBuffer = ctypes.create_unicode_buffer(u'', nBufferLength) +## _GetLogicalDriveStringsW(nBufferLength, lpBuffer) +## result = list() +## index = 0 +## while 1: +## string = list() +## while 1: +## character = lpBuffer[index] +## index = index + 1 +## if character == u'\0': +## break +## string.append(character) +## if not string: +## break +## result.append(u''.join(string)) +## return result + +GetLogicalDriveStrings = GuessStringType(GetLogicalDriveStringsA, GetLogicalDriveStringsW) + +# DWORD WINAPI QueryDosDevice( +# __in_opt LPCTSTR lpDeviceName, +# __out LPTSTR lpTargetPath, +# __in DWORD ucchMax +# ); +def QueryDosDeviceA(lpDeviceName = None): + _QueryDosDeviceA = windll.kernel32.QueryDosDeviceA + _QueryDosDeviceA.argtypes = [LPSTR, LPSTR, DWORD] + _QueryDosDeviceA.restype = DWORD + _QueryDosDeviceA.errcheck = RaiseIfZero + + if not lpDeviceName: + lpDeviceName = None + ucchMax = 0x1000 + lpTargetPath = ctypes.create_string_buffer('', ucchMax) + _QueryDosDeviceA(lpDeviceName, lpTargetPath, ucchMax) + return lpTargetPath.value + +def QueryDosDeviceW(lpDeviceName): + _QueryDosDeviceW = windll.kernel32.QueryDosDeviceW + _QueryDosDeviceW.argtypes = [LPWSTR, LPWSTR, DWORD] + _QueryDosDeviceW.restype = DWORD + _QueryDosDeviceW.errcheck = RaiseIfZero + + if not lpDeviceName: + lpDeviceName = None + ucchMax = 0x1000 + lpTargetPath = ctypes.create_unicode_buffer(u'', ucchMax) + _QueryDosDeviceW(lpDeviceName, lpTargetPath, ucchMax) + return lpTargetPath.value + +QueryDosDevice = GuessStringType(QueryDosDeviceA, QueryDosDeviceW) + +# LPVOID WINAPI MapViewOfFile( +# __in HANDLE hFileMappingObject, +# __in DWORD dwDesiredAccess, +# __in DWORD dwFileOffsetHigh, +# __in DWORD dwFileOffsetLow, +# __in SIZE_T dwNumberOfBytesToMap +# ); +def MapViewOfFile(hFileMappingObject, dwDesiredAccess = FILE_MAP_ALL_ACCESS | FILE_MAP_EXECUTE, dwFileOffsetHigh = 0, dwFileOffsetLow = 0, dwNumberOfBytesToMap = 0): + _MapViewOfFile = windll.kernel32.MapViewOfFile + _MapViewOfFile.argtypes = [HANDLE, DWORD, DWORD, DWORD, SIZE_T] + _MapViewOfFile.restype = LPVOID + lpBaseAddress = _MapViewOfFile(hFileMappingObject, dwDesiredAccess, dwFileOffsetHigh, dwFileOffsetLow, dwNumberOfBytesToMap) + if lpBaseAddress == NULL: + raise ctypes.WinError() + return lpBaseAddress + +# BOOL WINAPI UnmapViewOfFile( +# __in LPCVOID lpBaseAddress +# ); +def UnmapViewOfFile(lpBaseAddress): + _UnmapViewOfFile = windll.kernel32.UnmapViewOfFile + _UnmapViewOfFile.argtypes = [LPVOID] + _UnmapViewOfFile.restype = bool + _UnmapViewOfFile.errcheck = RaiseIfZero + _UnmapViewOfFile(lpBaseAddress) + +# HANDLE WINAPI OpenFileMapping( +# __in DWORD dwDesiredAccess, +# __in BOOL bInheritHandle, +# __in LPCTSTR lpName +# ); +def OpenFileMappingA(dwDesiredAccess, bInheritHandle, lpName): + _OpenFileMappingA = windll.kernel32.OpenFileMappingA + _OpenFileMappingA.argtypes = [DWORD, BOOL, LPSTR] + _OpenFileMappingA.restype = HANDLE + _OpenFileMappingA.errcheck = RaiseIfZero + hFileMappingObject = _OpenFileMappingA(dwDesiredAccess, bool(bInheritHandle), lpName) + return FileMappingHandle(hFileMappingObject) + +def OpenFileMappingW(dwDesiredAccess, bInheritHandle, lpName): + _OpenFileMappingW = windll.kernel32.OpenFileMappingW + _OpenFileMappingW.argtypes = [DWORD, BOOL, LPWSTR] + _OpenFileMappingW.restype = HANDLE + _OpenFileMappingW.errcheck = RaiseIfZero + hFileMappingObject = _OpenFileMappingW(dwDesiredAccess, bool(bInheritHandle), lpName) + return FileMappingHandle(hFileMappingObject) + +OpenFileMapping = GuessStringType(OpenFileMappingA, OpenFileMappingW) + +# HANDLE WINAPI CreateFileMapping( +# __in HANDLE hFile, +# __in_opt LPSECURITY_ATTRIBUTES lpAttributes, +# __in DWORD flProtect, +# __in DWORD dwMaximumSizeHigh, +# __in DWORD dwMaximumSizeLow, +# __in_opt LPCTSTR lpName +# ); +def CreateFileMappingA(hFile, lpAttributes = None, flProtect = PAGE_EXECUTE_READWRITE, dwMaximumSizeHigh = 0, dwMaximumSizeLow = 0, lpName = None): + _CreateFileMappingA = windll.kernel32.CreateFileMappingA + _CreateFileMappingA.argtypes = [HANDLE, LPVOID, DWORD, DWORD, DWORD, LPSTR] + _CreateFileMappingA.restype = HANDLE + _CreateFileMappingA.errcheck = RaiseIfZero + + if lpAttributes: + lpAttributes = ctypes.pointer(lpAttributes) + if not lpName: + lpName = None + hFileMappingObject = _CreateFileMappingA(hFile, lpAttributes, flProtect, dwMaximumSizeHigh, dwMaximumSizeLow, lpName) + return FileMappingHandle(hFileMappingObject) + +def CreateFileMappingW(hFile, lpAttributes = None, flProtect = PAGE_EXECUTE_READWRITE, dwMaximumSizeHigh = 0, dwMaximumSizeLow = 0, lpName = None): + _CreateFileMappingW = windll.kernel32.CreateFileMappingW + _CreateFileMappingW.argtypes = [HANDLE, LPVOID, DWORD, DWORD, DWORD, LPWSTR] + _CreateFileMappingW.restype = HANDLE + _CreateFileMappingW.errcheck = RaiseIfZero + + if lpAttributes: + lpAttributes = ctypes.pointer(lpAttributes) + if not lpName: + lpName = None + hFileMappingObject = _CreateFileMappingW(hFile, lpAttributes, flProtect, dwMaximumSizeHigh, dwMaximumSizeLow, lpName) + return FileMappingHandle(hFileMappingObject) + +CreateFileMapping = GuessStringType(CreateFileMappingA, CreateFileMappingW) + +# HANDLE WINAPI CreateFile( +# __in LPCTSTR lpFileName, +# __in DWORD dwDesiredAccess, +# __in DWORD dwShareMode, +# __in_opt LPSECURITY_ATTRIBUTES lpSecurityAttributes, +# __in DWORD dwCreationDisposition, +# __in DWORD dwFlagsAndAttributes, +# __in_opt HANDLE hTemplateFile +# ); +def CreateFileA(lpFileName, dwDesiredAccess = GENERIC_ALL, dwShareMode = 0, lpSecurityAttributes = None, dwCreationDisposition = OPEN_ALWAYS, dwFlagsAndAttributes = FILE_ATTRIBUTE_NORMAL, hTemplateFile = None): + _CreateFileA = windll.kernel32.CreateFileA + _CreateFileA.argtypes = [LPSTR, DWORD, DWORD, LPVOID, DWORD, DWORD, HANDLE] + _CreateFileA.restype = HANDLE + + if not lpFileName: + lpFileName = None + if lpSecurityAttributes: + lpSecurityAttributes = ctypes.pointer(lpSecurityAttributes) + hFile = _CreateFileA(lpFileName, dwDesiredAccess, dwShareMode, lpSecurityAttributes, dwCreationDisposition, dwFlagsAndAttributes, hTemplateFile) + if hFile == INVALID_HANDLE_VALUE: + raise ctypes.WinError() + return FileHandle(hFile) + +def CreateFileW(lpFileName, dwDesiredAccess = GENERIC_ALL, dwShareMode = 0, lpSecurityAttributes = None, dwCreationDisposition = OPEN_ALWAYS, dwFlagsAndAttributes = FILE_ATTRIBUTE_NORMAL, hTemplateFile = None): + _CreateFileW = windll.kernel32.CreateFileW + _CreateFileW.argtypes = [LPWSTR, DWORD, DWORD, LPVOID, DWORD, DWORD, HANDLE] + _CreateFileW.restype = HANDLE + + if not lpFileName: + lpFileName = None + if lpSecurityAttributes: + lpSecurityAttributes = ctypes.pointer(lpSecurityAttributes) + hFile = _CreateFileW(lpFileName, dwDesiredAccess, dwShareMode, lpSecurityAttributes, dwCreationDisposition, dwFlagsAndAttributes, hTemplateFile) + if hFile == INVALID_HANDLE_VALUE: + raise ctypes.WinError() + return FileHandle(hFile) + +CreateFile = GuessStringType(CreateFileA, CreateFileW) + +# BOOL WINAPI FlushFileBuffers( +# __in HANDLE hFile +# ); +def FlushFileBuffers(hFile): + _FlushFileBuffers = windll.kernel32.FlushFileBuffers + _FlushFileBuffers.argtypes = [HANDLE] + _FlushFileBuffers.restype = bool + _FlushFileBuffers.errcheck = RaiseIfZero + _FlushFileBuffers(hFile) + +# BOOL WINAPI FlushViewOfFile( +# __in LPCVOID lpBaseAddress, +# __in SIZE_T dwNumberOfBytesToFlush +# ); +def FlushViewOfFile(lpBaseAddress, dwNumberOfBytesToFlush = 0): + _FlushViewOfFile = windll.kernel32.FlushViewOfFile + _FlushViewOfFile.argtypes = [LPVOID, SIZE_T] + _FlushViewOfFile.restype = bool + _FlushViewOfFile.errcheck = RaiseIfZero + _FlushViewOfFile(lpBaseAddress, dwNumberOfBytesToFlush) + +# DWORD WINAPI SearchPath( +# __in_opt LPCTSTR lpPath, +# __in LPCTSTR lpFileName, +# __in_opt LPCTSTR lpExtension, +# __in DWORD nBufferLength, +# __out LPTSTR lpBuffer, +# __out_opt LPTSTR *lpFilePart +# ); +def SearchPathA(lpPath, lpFileName, lpExtension): + _SearchPathA = windll.kernel32.SearchPathA + _SearchPathA.argtypes = [LPSTR, LPSTR, LPSTR, DWORD, LPSTR, POINTER(LPSTR)] + _SearchPathA.restype = DWORD + _SearchPathA.errcheck = RaiseIfZero + + if not lpPath: + lpPath = None + if not lpExtension: + lpExtension = None + nBufferLength = _SearchPathA(lpPath, lpFileName, lpExtension, 0, None, None) + lpBuffer = ctypes.create_string_buffer('', nBufferLength + 1) + lpFilePart = LPSTR() + _SearchPathA(lpPath, lpFileName, lpExtension, nBufferLength, lpBuffer, byref(lpFilePart)) + lpFilePart = lpFilePart.value + lpBuffer = lpBuffer.value + if lpBuffer == '': + if GetLastError() == ERROR_SUCCESS: + raise ctypes.WinError(ERROR_FILE_NOT_FOUND) + raise ctypes.WinError() + return (lpBuffer, lpFilePart) + +def SearchPathW(lpPath, lpFileName, lpExtension): + _SearchPathW = windll.kernel32.SearchPathW + _SearchPathW.argtypes = [LPWSTR, LPWSTR, LPWSTR, DWORD, LPWSTR, POINTER(LPWSTR)] + _SearchPathW.restype = DWORD + _SearchPathW.errcheck = RaiseIfZero + + if not lpPath: + lpPath = None + if not lpExtension: + lpExtension = None + nBufferLength = _SearchPathW(lpPath, lpFileName, lpExtension, 0, None, None) + lpBuffer = ctypes.create_unicode_buffer(u'', nBufferLength + 1) + lpFilePart = LPWSTR() + _SearchPathW(lpPath, lpFileName, lpExtension, nBufferLength, lpBuffer, byref(lpFilePart)) + lpFilePart = lpFilePart.value + lpBuffer = lpBuffer.value + if lpBuffer == u'': + if GetLastError() == ERROR_SUCCESS: + raise ctypes.WinError(ERROR_FILE_NOT_FOUND) + raise ctypes.WinError() + return (lpBuffer, lpFilePart) + +SearchPath = GuessStringType(SearchPathA, SearchPathW) + +# BOOL SetSearchPathMode( +# __in DWORD Flags +# ); +def SetSearchPathMode(Flags): + _SetSearchPathMode = windll.kernel32.SetSearchPathMode + _SetSearchPathMode.argtypes = [DWORD] + _SetSearchPathMode.restype = bool + _SetSearchPathMode.errcheck = RaiseIfZero + _SetSearchPathMode(Flags) + +# BOOL WINAPI DeviceIoControl( +# __in HANDLE hDevice, +# __in DWORD dwIoControlCode, +# __in_opt LPVOID lpInBuffer, +# __in DWORD nInBufferSize, +# __out_opt LPVOID lpOutBuffer, +# __in DWORD nOutBufferSize, +# __out_opt LPDWORD lpBytesReturned, +# __inout_opt LPOVERLAPPED lpOverlapped +# ); +def DeviceIoControl(hDevice, dwIoControlCode, lpInBuffer, nInBufferSize, lpOutBuffer, nOutBufferSize, lpOverlapped): + _DeviceIoControl = windll.kernel32.DeviceIoControl + _DeviceIoControl.argtypes = [HANDLE, DWORD, LPVOID, DWORD, LPVOID, DWORD, LPDWORD, LPOVERLAPPED] + _DeviceIoControl.restype = bool + _DeviceIoControl.errcheck = RaiseIfZero + + if not lpInBuffer: + lpInBuffer = None + if not lpOutBuffer: + lpOutBuffer = None + if lpOverlapped: + lpOverlapped = ctypes.pointer(lpOverlapped) + lpBytesReturned = DWORD(0) + _DeviceIoControl(hDevice, dwIoControlCode, lpInBuffer, nInBufferSize, lpOutBuffer, nOutBufferSize, byref(lpBytesReturned), lpOverlapped) + return lpBytesReturned.value + +# BOOL GetFileInformationByHandle( +# HANDLE hFile, +# LPBY_HANDLE_FILE_INFORMATION lpFileInformation +# ); +def GetFileInformationByHandle(hFile): + _GetFileInformationByHandle = windll.kernel32.GetFileInformationByHandle + _GetFileInformationByHandle.argtypes = [HANDLE, LPBY_HANDLE_FILE_INFORMATION] + _GetFileInformationByHandle.restype = bool + _GetFileInformationByHandle.errcheck = RaiseIfZero + + lpFileInformation = BY_HANDLE_FILE_INFORMATION() + _GetFileInformationByHandle(hFile, byref(lpFileInformation)) + return lpFileInformation + +# BOOL WINAPI GetFileInformationByHandleEx( +# __in HANDLE hFile, +# __in FILE_INFO_BY_HANDLE_CLASS FileInformationClass, +# __out LPVOID lpFileInformation, +# __in DWORD dwBufferSize +# ); +def GetFileInformationByHandleEx(hFile, FileInformationClass, lpFileInformation, dwBufferSize): + _GetFileInformationByHandleEx = windll.kernel32.GetFileInformationByHandleEx + _GetFileInformationByHandleEx.argtypes = [HANDLE, DWORD, LPVOID, DWORD] + _GetFileInformationByHandleEx.restype = bool + _GetFileInformationByHandleEx.errcheck = RaiseIfZero + # XXX TODO + # support each FileInformationClass so the function can allocate the + # corresponding structure for the lpFileInformation parameter + _GetFileInformationByHandleEx(hFile, FileInformationClass, byref(lpFileInformation), dwBufferSize) + +# DWORD WINAPI GetFinalPathNameByHandle( +# __in HANDLE hFile, +# __out LPTSTR lpszFilePath, +# __in DWORD cchFilePath, +# __in DWORD dwFlags +# ); +def GetFinalPathNameByHandleA(hFile, dwFlags = FILE_NAME_NORMALIZED | VOLUME_NAME_DOS): + _GetFinalPathNameByHandleA = windll.kernel32.GetFinalPathNameByHandleA + _GetFinalPathNameByHandleA.argtypes = [HANDLE, LPSTR, DWORD, DWORD] + _GetFinalPathNameByHandleA.restype = DWORD + + cchFilePath = _GetFinalPathNameByHandleA(hFile, None, 0, dwFlags) + if cchFilePath == 0: + raise ctypes.WinError() + lpszFilePath = ctypes.create_string_buffer('', cchFilePath + 1) + nCopied = _GetFinalPathNameByHandleA(hFile, lpszFilePath, cchFilePath, dwFlags) + if nCopied <= 0 or nCopied > cchFilePath: + raise ctypes.WinError() + return lpszFilePath.value + +def GetFinalPathNameByHandleW(hFile, dwFlags = FILE_NAME_NORMALIZED | VOLUME_NAME_DOS): + _GetFinalPathNameByHandleW = windll.kernel32.GetFinalPathNameByHandleW + _GetFinalPathNameByHandleW.argtypes = [HANDLE, LPWSTR, DWORD, DWORD] + _GetFinalPathNameByHandleW.restype = DWORD + + cchFilePath = _GetFinalPathNameByHandleW(hFile, None, 0, dwFlags) + if cchFilePath == 0: + raise ctypes.WinError() + lpszFilePath = ctypes.create_unicode_buffer(u'', cchFilePath + 1) + nCopied = _GetFinalPathNameByHandleW(hFile, lpszFilePath, cchFilePath, dwFlags) + if nCopied <= 0 or nCopied > cchFilePath: + raise ctypes.WinError() + return lpszFilePath.value + +GetFinalPathNameByHandle = GuessStringType(GetFinalPathNameByHandleA, GetFinalPathNameByHandleW) + +# DWORD GetFullPathName( +# LPCTSTR lpFileName, +# DWORD nBufferLength, +# LPTSTR lpBuffer, +# LPTSTR* lpFilePart +# ); +def GetFullPathNameA(lpFileName): + _GetFullPathNameA = windll.kernel32.GetFullPathNameA + _GetFullPathNameA.argtypes = [LPSTR, DWORD, LPSTR, POINTER(LPSTR)] + _GetFullPathNameA.restype = DWORD + + nBufferLength = _GetFullPathNameA(lpFileName, 0, None, None) + if nBufferLength <= 0: + raise ctypes.WinError() + lpBuffer = ctypes.create_string_buffer('', nBufferLength + 1) + lpFilePart = LPSTR() + nCopied = _GetFullPathNameA(lpFileName, nBufferLength, lpBuffer, byref(lpFilePart)) + if nCopied > nBufferLength or nCopied == 0: + raise ctypes.WinError() + return lpBuffer.value, lpFilePart.value + +def GetFullPathNameW(lpFileName): + _GetFullPathNameW = windll.kernel32.GetFullPathNameW + _GetFullPathNameW.argtypes = [LPWSTR, DWORD, LPWSTR, POINTER(LPWSTR)] + _GetFullPathNameW.restype = DWORD + + nBufferLength = _GetFullPathNameW(lpFileName, 0, None, None) + if nBufferLength <= 0: + raise ctypes.WinError() + lpBuffer = ctypes.create_unicode_buffer(u'', nBufferLength + 1) + lpFilePart = LPWSTR() + nCopied = _GetFullPathNameW(lpFileName, nBufferLength, lpBuffer, byref(lpFilePart)) + if nCopied > nBufferLength or nCopied == 0: + raise ctypes.WinError() + return lpBuffer.value, lpFilePart.value + +GetFullPathName = GuessStringType(GetFullPathNameA, GetFullPathNameW) + +# DWORD WINAPI GetTempPath( +# __in DWORD nBufferLength, +# __out LPTSTR lpBuffer +# ); +def GetTempPathA(): + _GetTempPathA = windll.kernel32.GetTempPathA + _GetTempPathA.argtypes = [DWORD, LPSTR] + _GetTempPathA.restype = DWORD + + nBufferLength = _GetTempPathA(0, None) + if nBufferLength <= 0: + raise ctypes.WinError() + lpBuffer = ctypes.create_string_buffer('', nBufferLength) + nCopied = _GetTempPathA(nBufferLength, lpBuffer) + if nCopied > nBufferLength or nCopied == 0: + raise ctypes.WinError() + return lpBuffer.value + +def GetTempPathW(): + _GetTempPathW = windll.kernel32.GetTempPathW + _GetTempPathW.argtypes = [DWORD, LPWSTR] + _GetTempPathW.restype = DWORD + + nBufferLength = _GetTempPathW(0, None) + if nBufferLength <= 0: + raise ctypes.WinError() + lpBuffer = ctypes.create_unicode_buffer(u'', nBufferLength) + nCopied = _GetTempPathW(nBufferLength, lpBuffer) + if nCopied > nBufferLength or nCopied == 0: + raise ctypes.WinError() + return lpBuffer.value + +GetTempPath = GuessStringType(GetTempPathA, GetTempPathW) + +# UINT WINAPI GetTempFileName( +# __in LPCTSTR lpPathName, +# __in LPCTSTR lpPrefixString, +# __in UINT uUnique, +# __out LPTSTR lpTempFileName +# ); +def GetTempFileNameA(lpPathName = None, lpPrefixString = "TMP", uUnique = 0): + _GetTempFileNameA = windll.kernel32.GetTempFileNameA + _GetTempFileNameA.argtypes = [LPSTR, LPSTR, UINT, LPSTR] + _GetTempFileNameA.restype = UINT + + if lpPathName is None: + lpPathName = GetTempPathA() + lpTempFileName = ctypes.create_string_buffer('', MAX_PATH) + uUnique = _GetTempFileNameA(lpPathName, lpPrefixString, uUnique, lpTempFileName) + if uUnique == 0: + raise ctypes.WinError() + return lpTempFileName.value, uUnique + +def GetTempFileNameW(lpPathName = None, lpPrefixString = u"TMP", uUnique = 0): + _GetTempFileNameW = windll.kernel32.GetTempFileNameW + _GetTempFileNameW.argtypes = [LPWSTR, LPWSTR, UINT, LPWSTR] + _GetTempFileNameW.restype = UINT + + if lpPathName is None: + lpPathName = GetTempPathW() + lpTempFileName = ctypes.create_unicode_buffer(u'', MAX_PATH) + uUnique = _GetTempFileNameW(lpPathName, lpPrefixString, uUnique, lpTempFileName) + if uUnique == 0: + raise ctypes.WinError() + return lpTempFileName.value, uUnique + +GetTempFileName = GuessStringType(GetTempFileNameA, GetTempFileNameW) + +# DWORD WINAPI GetCurrentDirectory( +# __in DWORD nBufferLength, +# __out LPTSTR lpBuffer +# ); +def GetCurrentDirectoryA(): + _GetCurrentDirectoryA = windll.kernel32.GetCurrentDirectoryA + _GetCurrentDirectoryA.argtypes = [DWORD, LPSTR] + _GetCurrentDirectoryA.restype = DWORD + + nBufferLength = _GetCurrentDirectoryA(0, None) + if nBufferLength <= 0: + raise ctypes.WinError() + lpBuffer = ctypes.create_string_buffer('', nBufferLength) + nCopied = _GetCurrentDirectoryA(nBufferLength, lpBuffer) + if nCopied > nBufferLength or nCopied == 0: + raise ctypes.WinError() + return lpBuffer.value + +def GetCurrentDirectoryW(): + _GetCurrentDirectoryW = windll.kernel32.GetCurrentDirectoryW + _GetCurrentDirectoryW.argtypes = [DWORD, LPWSTR] + _GetCurrentDirectoryW.restype = DWORD + + nBufferLength = _GetCurrentDirectoryW(0, None) + if nBufferLength <= 0: + raise ctypes.WinError() + lpBuffer = ctypes.create_unicode_buffer(u'', nBufferLength) + nCopied = _GetCurrentDirectoryW(nBufferLength, lpBuffer) + if nCopied > nBufferLength or nCopied == 0: + raise ctypes.WinError() + return lpBuffer.value + +GetCurrentDirectory = GuessStringType(GetCurrentDirectoryA, GetCurrentDirectoryW) + +#------------------------------------------------------------------------------ +# Contrl-C handler + +# BOOL WINAPI HandlerRoutine( +# __in DWORD dwCtrlType +# ); +PHANDLER_ROUTINE = ctypes.WINFUNCTYPE(BOOL, DWORD) + +# BOOL WINAPI SetConsoleCtrlHandler( +# __in_opt PHANDLER_ROUTINE HandlerRoutine, +# __in BOOL Add +# ); +def SetConsoleCtrlHandler(HandlerRoutine = None, Add = True): + _SetConsoleCtrlHandler = windll.kernel32.SetConsoleCtrlHandler + _SetConsoleCtrlHandler.argtypes = [PHANDLER_ROUTINE, BOOL] + _SetConsoleCtrlHandler.restype = bool + _SetConsoleCtrlHandler.errcheck = RaiseIfZero + _SetConsoleCtrlHandler(HandlerRoutine, bool(Add)) + # we can't automagically transform Python functions to PHANDLER_ROUTINE + # because a) the actual pointer value is meaningful to the API + # and b) if it gets garbage collected bad things would happen + +# BOOL WINAPI GenerateConsoleCtrlEvent( +# __in DWORD dwCtrlEvent, +# __in DWORD dwProcessGroupId +# ); +def GenerateConsoleCtrlEvent(dwCtrlEvent, dwProcessGroupId): + _GenerateConsoleCtrlEvent = windll.kernel32.GenerateConsoleCtrlEvent + _GenerateConsoleCtrlEvent.argtypes = [DWORD, DWORD] + _GenerateConsoleCtrlEvent.restype = bool + _GenerateConsoleCtrlEvent.errcheck = RaiseIfZero + _GenerateConsoleCtrlEvent(dwCtrlEvent, dwProcessGroupId) + +#------------------------------------------------------------------------------ +# Synchronization API + +# XXX NOTE +# +# Instead of waiting forever, we wait for a small period of time and loop. +# This is a workaround for an unwanted behavior of psyco-accelerated code: +# you can't interrupt a blocking call using Ctrl+C, because signal processing +# is only done between C calls. +# +# Also see: bug #2793618 in Psyco project +# http://sourceforge.net/tracker/?func=detail&aid=2793618&group_id=41036&atid=429622 + +# DWORD WINAPI WaitForSingleObject( +# HANDLE hHandle, +# DWORD dwMilliseconds +# ); +def WaitForSingleObject(hHandle, dwMilliseconds = INFINITE): + _WaitForSingleObject = windll.kernel32.WaitForSingleObject + _WaitForSingleObject.argtypes = [HANDLE, DWORD] + _WaitForSingleObject.restype = DWORD + + if not dwMilliseconds and dwMilliseconds != 0: + dwMilliseconds = INFINITE + if dwMilliseconds != INFINITE: + r = _WaitForSingleObject(hHandle, dwMilliseconds) + if r == WAIT_FAILED: + raise ctypes.WinError() + else: + while 1: + r = _WaitForSingleObject(hHandle, 100) + if r == WAIT_FAILED: + raise ctypes.WinError() + if r != WAIT_TIMEOUT: + break + return r + +# DWORD WINAPI WaitForSingleObjectEx( +# HANDLE hHandle, +# DWORD dwMilliseconds, +# BOOL bAlertable +# ); +def WaitForSingleObjectEx(hHandle, dwMilliseconds = INFINITE, bAlertable = True): + _WaitForSingleObjectEx = windll.kernel32.WaitForSingleObjectEx + _WaitForSingleObjectEx.argtypes = [HANDLE, DWORD, BOOL] + _WaitForSingleObjectEx.restype = DWORD + + if not dwMilliseconds and dwMilliseconds != 0: + dwMilliseconds = INFINITE + if dwMilliseconds != INFINITE: + r = _WaitForSingleObjectEx(hHandle, dwMilliseconds, bool(bAlertable)) + if r == WAIT_FAILED: + raise ctypes.WinError() + else: + while 1: + r = _WaitForSingleObjectEx(hHandle, 100, bool(bAlertable)) + if r == WAIT_FAILED: + raise ctypes.WinError() + if r != WAIT_TIMEOUT: + break + return r + +# DWORD WINAPI WaitForMultipleObjects( +# DWORD nCount, +# const HANDLE *lpHandles, +# BOOL bWaitAll, +# DWORD dwMilliseconds +# ); +def WaitForMultipleObjects(handles, bWaitAll = False, dwMilliseconds = INFINITE): + _WaitForMultipleObjects = windll.kernel32.WaitForMultipleObjects + _WaitForMultipleObjects.argtypes = [DWORD, POINTER(HANDLE), BOOL, DWORD] + _WaitForMultipleObjects.restype = DWORD + + if not dwMilliseconds and dwMilliseconds != 0: + dwMilliseconds = INFINITE + nCount = len(handles) + lpHandlesType = HANDLE * nCount + lpHandles = lpHandlesType(*handles) + if dwMilliseconds != INFINITE: + r = _WaitForMultipleObjects(byref(lpHandles), bool(bWaitAll), dwMilliseconds) + if r == WAIT_FAILED: + raise ctypes.WinError() + else: + while 1: + r = _WaitForMultipleObjects(byref(lpHandles), bool(bWaitAll), 100) + if r == WAIT_FAILED: + raise ctypes.WinError() + if r != WAIT_TIMEOUT: + break + return r + +# DWORD WINAPI WaitForMultipleObjectsEx( +# DWORD nCount, +# const HANDLE *lpHandles, +# BOOL bWaitAll, +# DWORD dwMilliseconds, +# BOOL bAlertable +# ); +def WaitForMultipleObjectsEx(handles, bWaitAll = False, dwMilliseconds = INFINITE, bAlertable = True): + _WaitForMultipleObjectsEx = windll.kernel32.WaitForMultipleObjectsEx + _WaitForMultipleObjectsEx.argtypes = [DWORD, POINTER(HANDLE), BOOL, DWORD] + _WaitForMultipleObjectsEx.restype = DWORD + + if not dwMilliseconds and dwMilliseconds != 0: + dwMilliseconds = INFINITE + nCount = len(handles) + lpHandlesType = HANDLE * nCount + lpHandles = lpHandlesType(*handles) + if dwMilliseconds != INFINITE: + r = _WaitForMultipleObjectsEx(byref(lpHandles), bool(bWaitAll), dwMilliseconds, bool(bAlertable)) + if r == WAIT_FAILED: + raise ctypes.WinError() + else: + while 1: + r = _WaitForMultipleObjectsEx(byref(lpHandles), bool(bWaitAll), 100, bool(bAlertable)) + if r == WAIT_FAILED: + raise ctypes.WinError() + if r != WAIT_TIMEOUT: + break + return r + +# HANDLE WINAPI CreateMutex( +# _In_opt_ LPSECURITY_ATTRIBUTES lpMutexAttributes, +# _In_ BOOL bInitialOwner, +# _In_opt_ LPCTSTR lpName +# ); +def CreateMutexA(lpMutexAttributes = None, bInitialOwner = True, lpName = None): + _CreateMutexA = windll.kernel32.CreateMutexA + _CreateMutexA.argtypes = [LPVOID, BOOL, LPSTR] + _CreateMutexA.restype = HANDLE + _CreateMutexA.errcheck = RaiseIfZero + return Handle( _CreateMutexA(lpMutexAttributes, bInitialOwner, lpName) ) + +def CreateMutexW(lpMutexAttributes = None, bInitialOwner = True, lpName = None): + _CreateMutexW = windll.kernel32.CreateMutexW + _CreateMutexW.argtypes = [LPVOID, BOOL, LPWSTR] + _CreateMutexW.restype = HANDLE + _CreateMutexW.errcheck = RaiseIfZero + return Handle( _CreateMutexW(lpMutexAttributes, bInitialOwner, lpName) ) + +CreateMutex = GuessStringType(CreateMutexA, CreateMutexW) + +# HANDLE WINAPI OpenMutex( +# _In_ DWORD dwDesiredAccess, +# _In_ BOOL bInheritHandle, +# _In_ LPCTSTR lpName +# ); +def OpenMutexA(dwDesiredAccess = MUTEX_ALL_ACCESS, bInitialOwner = True, lpName = None): + _OpenMutexA = windll.kernel32.OpenMutexA + _OpenMutexA.argtypes = [DWORD, BOOL, LPSTR] + _OpenMutexA.restype = HANDLE + _OpenMutexA.errcheck = RaiseIfZero + return Handle( _OpenMutexA(lpMutexAttributes, bInitialOwner, lpName) ) + +def OpenMutexW(dwDesiredAccess = MUTEX_ALL_ACCESS, bInitialOwner = True, lpName = None): + _OpenMutexW = windll.kernel32.OpenMutexW + _OpenMutexW.argtypes = [DWORD, BOOL, LPWSTR] + _OpenMutexW.restype = HANDLE + _OpenMutexW.errcheck = RaiseIfZero + return Handle( _OpenMutexW(lpMutexAttributes, bInitialOwner, lpName) ) + +OpenMutex = GuessStringType(OpenMutexA, OpenMutexW) + +# HANDLE WINAPI CreateEvent( +# _In_opt_ LPSECURITY_ATTRIBUTES lpEventAttributes, +# _In_ BOOL bManualReset, +# _In_ BOOL bInitialState, +# _In_opt_ LPCTSTR lpName +# ); +def CreateEventA(lpMutexAttributes = None, bManualReset = False, bInitialState = False, lpName = None): + _CreateEventA = windll.kernel32.CreateEventA + _CreateEventA.argtypes = [LPVOID, BOOL, BOOL, LPSTR] + _CreateEventA.restype = HANDLE + _CreateEventA.errcheck = RaiseIfZero + return Handle( _CreateEventA(lpMutexAttributes, bManualReset, bInitialState, lpName) ) + +def CreateEventW(lpMutexAttributes = None, bManualReset = False, bInitialState = False, lpName = None): + _CreateEventW = windll.kernel32.CreateEventW + _CreateEventW.argtypes = [LPVOID, BOOL, BOOL, LPWSTR] + _CreateEventW.restype = HANDLE + _CreateEventW.errcheck = RaiseIfZero + return Handle( _CreateEventW(lpMutexAttributes, bManualReset, bInitialState, lpName) ) + +CreateEvent = GuessStringType(CreateEventA, CreateEventW) + +# HANDLE WINAPI OpenEvent( +# _In_ DWORD dwDesiredAccess, +# _In_ BOOL bInheritHandle, +# _In_ LPCTSTR lpName +# ); +def OpenEventA(dwDesiredAccess = EVENT_ALL_ACCESS, bInheritHandle = False, lpName = None): + _OpenEventA = windll.kernel32.OpenEventA + _OpenEventA.argtypes = [DWORD, BOOL, LPSTR] + _OpenEventA.restype = HANDLE + _OpenEventA.errcheck = RaiseIfZero + return Handle( _OpenEventA(dwDesiredAccess, bInheritHandle, lpName) ) + +def OpenEventW(dwDesiredAccess = EVENT_ALL_ACCESS, bInheritHandle = False, lpName = None): + _OpenEventW = windll.kernel32.OpenEventW + _OpenEventW.argtypes = [DWORD, BOOL, LPWSTR] + _OpenEventW.restype = HANDLE + _OpenEventW.errcheck = RaiseIfZero + return Handle( _OpenEventW(dwDesiredAccess, bInheritHandle, lpName) ) + +OpenEvent = GuessStringType(OpenEventA, OpenEventW) + +# HANDLE WINAPI CreateSemaphore( +# _In_opt_ LPSECURITY_ATTRIBUTES lpSemaphoreAttributes, +# _In_ LONG lInitialCount, +# _In_ LONG lMaximumCount, +# _In_opt_ LPCTSTR lpName +# ); + +# TODO + +# HANDLE WINAPI OpenSemaphore( +# _In_ DWORD dwDesiredAccess, +# _In_ BOOL bInheritHandle, +# _In_ LPCTSTR lpName +# ); + +# TODO + +# BOOL WINAPI ReleaseMutex( +# _In_ HANDLE hMutex +# ); +def ReleaseMutex(hMutex): + _ReleaseMutex = windll.kernel32.ReleaseMutex + _ReleaseMutex.argtypes = [HANDLE] + _ReleaseMutex.restype = bool + _ReleaseMutex.errcheck = RaiseIfZero + _ReleaseMutex(hMutex) + +# BOOL WINAPI SetEvent( +# _In_ HANDLE hEvent +# ); +def SetEvent(hEvent): + _SetEvent = windll.kernel32.SetEvent + _SetEvent.argtypes = [HANDLE] + _SetEvent.restype = bool + _SetEvent.errcheck = RaiseIfZero + _SetEvent(hEvent) + +# BOOL WINAPI ResetEvent( +# _In_ HANDLE hEvent +# ); +def ResetEvent(hEvent): + _ResetEvent = windll.kernel32.ResetEvent + _ResetEvent.argtypes = [HANDLE] + _ResetEvent.restype = bool + _ResetEvent.errcheck = RaiseIfZero + _ResetEvent(hEvent) + +# BOOL WINAPI PulseEvent( +# _In_ HANDLE hEvent +# ); +def PulseEvent(hEvent): + _PulseEvent = windll.kernel32.PulseEvent + _PulseEvent.argtypes = [HANDLE] + _PulseEvent.restype = bool + _PulseEvent.errcheck = RaiseIfZero + _PulseEvent(hEvent) + +# BOOL WINAPI ReleaseSemaphore( +# _In_ HANDLE hSemaphore, +# _In_ LONG lReleaseCount, +# _Out_opt_ LPLONG lpPreviousCount +# ); + +# TODO + +#------------------------------------------------------------------------------ +# Debug API + +# BOOL WaitForDebugEvent( +# LPDEBUG_EVENT lpDebugEvent, +# DWORD dwMilliseconds +# ); +def WaitForDebugEvent(dwMilliseconds = INFINITE): + _WaitForDebugEvent = windll.kernel32.WaitForDebugEvent + _WaitForDebugEvent.argtypes = [LPDEBUG_EVENT, DWORD] + _WaitForDebugEvent.restype = DWORD + + if not dwMilliseconds and dwMilliseconds != 0: + dwMilliseconds = INFINITE + lpDebugEvent = DEBUG_EVENT() + lpDebugEvent.dwDebugEventCode = 0 + lpDebugEvent.dwProcessId = 0 + lpDebugEvent.dwThreadId = 0 + if dwMilliseconds != INFINITE: + success = _WaitForDebugEvent(byref(lpDebugEvent), dwMilliseconds) + if success == 0: + raise ctypes.WinError() + else: + # this avoids locking the Python GIL for too long + while 1: + success = _WaitForDebugEvent(byref(lpDebugEvent), 100) + if success != 0: + break + code = GetLastError() + if code not in (ERROR_SEM_TIMEOUT, WAIT_TIMEOUT): + raise ctypes.WinError(code) + return lpDebugEvent + +# BOOL ContinueDebugEvent( +# DWORD dwProcessId, +# DWORD dwThreadId, +# DWORD dwContinueStatus +# ); +def ContinueDebugEvent(dwProcessId, dwThreadId, dwContinueStatus = DBG_EXCEPTION_NOT_HANDLED): + _ContinueDebugEvent = windll.kernel32.ContinueDebugEvent + _ContinueDebugEvent.argtypes = [DWORD, DWORD, DWORD] + _ContinueDebugEvent.restype = bool + _ContinueDebugEvent.errcheck = RaiseIfZero + _ContinueDebugEvent(dwProcessId, dwThreadId, dwContinueStatus) + +# BOOL WINAPI FlushInstructionCache( +# __in HANDLE hProcess, +# __in LPCVOID lpBaseAddress, +# __in SIZE_T dwSize +# ); +def FlushInstructionCache(hProcess, lpBaseAddress = None, dwSize = 0): + # http://blogs.msdn.com/oldnewthing/archive/2003/12/08/55954.aspx#55958 + _FlushInstructionCache = windll.kernel32.FlushInstructionCache + _FlushInstructionCache.argtypes = [HANDLE, LPVOID, SIZE_T] + _FlushInstructionCache.restype = bool + _FlushInstructionCache.errcheck = RaiseIfZero + _FlushInstructionCache(hProcess, lpBaseAddress, dwSize) + +# BOOL DebugActiveProcess( +# DWORD dwProcessId +# ); +def DebugActiveProcess(dwProcessId): + _DebugActiveProcess = windll.kernel32.DebugActiveProcess + _DebugActiveProcess.argtypes = [DWORD] + _DebugActiveProcess.restype = bool + _DebugActiveProcess.errcheck = RaiseIfZero + _DebugActiveProcess(dwProcessId) + +# BOOL DebugActiveProcessStop( +# DWORD dwProcessId +# ); +def DebugActiveProcessStop(dwProcessId): + _DebugActiveProcessStop = windll.kernel32.DebugActiveProcessStop + _DebugActiveProcessStop.argtypes = [DWORD] + _DebugActiveProcessStop.restype = bool + _DebugActiveProcessStop.errcheck = RaiseIfZero + _DebugActiveProcessStop(dwProcessId) + +# BOOL CheckRemoteDebuggerPresent( +# HANDLE hProcess, +# PBOOL pbDebuggerPresent +# ); +def CheckRemoteDebuggerPresent(hProcess): + _CheckRemoteDebuggerPresent = windll.kernel32.CheckRemoteDebuggerPresent + _CheckRemoteDebuggerPresent.argtypes = [HANDLE, PBOOL] + _CheckRemoteDebuggerPresent.restype = bool + _CheckRemoteDebuggerPresent.errcheck = RaiseIfZero + + pbDebuggerPresent = BOOL(0) + _CheckRemoteDebuggerPresent(hProcess, byref(pbDebuggerPresent)) + return bool(pbDebuggerPresent.value) + +# BOOL DebugSetProcessKillOnExit( +# BOOL KillOnExit +# ); +def DebugSetProcessKillOnExit(KillOnExit): + _DebugSetProcessKillOnExit = windll.kernel32.DebugSetProcessKillOnExit + _DebugSetProcessKillOnExit.argtypes = [BOOL] + _DebugSetProcessKillOnExit.restype = bool + _DebugSetProcessKillOnExit.errcheck = RaiseIfZero + _DebugSetProcessKillOnExit(bool(KillOnExit)) + +# BOOL DebugBreakProcess( +# HANDLE Process +# ); +def DebugBreakProcess(hProcess): + _DebugBreakProcess = windll.kernel32.DebugBreakProcess + _DebugBreakProcess.argtypes = [HANDLE] + _DebugBreakProcess.restype = bool + _DebugBreakProcess.errcheck = RaiseIfZero + _DebugBreakProcess(hProcess) + +# void WINAPI OutputDebugString( +# __in_opt LPCTSTR lpOutputString +# ); +def OutputDebugStringA(lpOutputString): + _OutputDebugStringA = windll.kernel32.OutputDebugStringA + _OutputDebugStringA.argtypes = [LPSTR] + _OutputDebugStringA.restype = None + _OutputDebugStringA(lpOutputString) + +def OutputDebugStringW(lpOutputString): + _OutputDebugStringW = windll.kernel32.OutputDebugStringW + _OutputDebugStringW.argtypes = [LPWSTR] + _OutputDebugStringW.restype = None + _OutputDebugStringW(lpOutputString) + +OutputDebugString = GuessStringType(OutputDebugStringA, OutputDebugStringW) + +# BOOL WINAPI ReadProcessMemory( +# __in HANDLE hProcess, +# __in LPCVOID lpBaseAddress, +# __out LPVOID lpBuffer, +# __in SIZE_T nSize, +# __out SIZE_T* lpNumberOfBytesRead +# ); +def ReadProcessMemory(hProcess, lpBaseAddress, nSize): + _ReadProcessMemory = windll.kernel32.ReadProcessMemory + _ReadProcessMemory.argtypes = [HANDLE, LPVOID, LPVOID, SIZE_T, POINTER(SIZE_T)] + _ReadProcessMemory.restype = bool + + lpBuffer = ctypes.create_string_buffer(compat.b(''), nSize) + lpNumberOfBytesRead = SIZE_T(0) + success = _ReadProcessMemory(hProcess, lpBaseAddress, lpBuffer, nSize, byref(lpNumberOfBytesRead)) + if not success and GetLastError() != ERROR_PARTIAL_COPY: + raise ctypes.WinError() + return compat.b(lpBuffer.raw)[:lpNumberOfBytesRead.value] + +# BOOL WINAPI WriteProcessMemory( +# __in HANDLE hProcess, +# __in LPCVOID lpBaseAddress, +# __in LPVOID lpBuffer, +# __in SIZE_T nSize, +# __out SIZE_T* lpNumberOfBytesWritten +# ); +def WriteProcessMemory(hProcess, lpBaseAddress, lpBuffer): + _WriteProcessMemory = windll.kernel32.WriteProcessMemory + _WriteProcessMemory.argtypes = [HANDLE, LPVOID, LPVOID, SIZE_T, POINTER(SIZE_T)] + _WriteProcessMemory.restype = bool + + nSize = len(lpBuffer) + lpBuffer = ctypes.create_string_buffer(lpBuffer) + lpNumberOfBytesWritten = SIZE_T(0) + success = _WriteProcessMemory(hProcess, lpBaseAddress, lpBuffer, nSize, byref(lpNumberOfBytesWritten)) + if not success and GetLastError() != ERROR_PARTIAL_COPY: + raise ctypes.WinError() + return lpNumberOfBytesWritten.value + +# LPVOID WINAPI VirtualAllocEx( +# __in HANDLE hProcess, +# __in_opt LPVOID lpAddress, +# __in SIZE_T dwSize, +# __in DWORD flAllocationType, +# __in DWORD flProtect +# ); +def VirtualAllocEx(hProcess, lpAddress = 0, dwSize = 0x1000, flAllocationType = MEM_COMMIT | MEM_RESERVE, flProtect = PAGE_EXECUTE_READWRITE): + _VirtualAllocEx = windll.kernel32.VirtualAllocEx + _VirtualAllocEx.argtypes = [HANDLE, LPVOID, SIZE_T, DWORD, DWORD] + _VirtualAllocEx.restype = LPVOID + + lpAddress = _VirtualAllocEx(hProcess, lpAddress, dwSize, flAllocationType, flProtect) + if lpAddress == NULL: + raise ctypes.WinError() + return lpAddress + +# SIZE_T WINAPI VirtualQueryEx( +# __in HANDLE hProcess, +# __in_opt LPCVOID lpAddress, +# __out PMEMORY_BASIC_INFORMATION lpBuffer, +# __in SIZE_T dwLength +# ); +def VirtualQueryEx(hProcess, lpAddress): + _VirtualQueryEx = windll.kernel32.VirtualQueryEx + _VirtualQueryEx.argtypes = [HANDLE, LPVOID, PMEMORY_BASIC_INFORMATION, SIZE_T] + _VirtualQueryEx.restype = SIZE_T + + lpBuffer = MEMORY_BASIC_INFORMATION() + dwLength = sizeof(MEMORY_BASIC_INFORMATION) + success = _VirtualQueryEx(hProcess, lpAddress, byref(lpBuffer), dwLength) + if success == 0: + raise ctypes.WinError() + return MemoryBasicInformation(lpBuffer) + +# BOOL WINAPI VirtualProtectEx( +# __in HANDLE hProcess, +# __in LPVOID lpAddress, +# __in SIZE_T dwSize, +# __in DWORD flNewProtect, +# __out PDWORD lpflOldProtect +# ); +def VirtualProtectEx(hProcess, lpAddress, dwSize, flNewProtect = PAGE_EXECUTE_READWRITE): + _VirtualProtectEx = windll.kernel32.VirtualProtectEx + _VirtualProtectEx.argtypes = [HANDLE, LPVOID, SIZE_T, DWORD, PDWORD] + _VirtualProtectEx.restype = bool + _VirtualProtectEx.errcheck = RaiseIfZero + + flOldProtect = DWORD(0) + _VirtualProtectEx(hProcess, lpAddress, dwSize, flNewProtect, byref(flOldProtect)) + return flOldProtect.value + +# BOOL WINAPI VirtualFreeEx( +# __in HANDLE hProcess, +# __in LPVOID lpAddress, +# __in SIZE_T dwSize, +# __in DWORD dwFreeType +# ); +def VirtualFreeEx(hProcess, lpAddress, dwSize = 0, dwFreeType = MEM_RELEASE): + _VirtualFreeEx = windll.kernel32.VirtualFreeEx + _VirtualFreeEx.argtypes = [HANDLE, LPVOID, SIZE_T, DWORD] + _VirtualFreeEx.restype = bool + _VirtualFreeEx.errcheck = RaiseIfZero + _VirtualFreeEx(hProcess, lpAddress, dwSize, dwFreeType) + +# HANDLE WINAPI CreateRemoteThread( +# __in HANDLE hProcess, +# __in LPSECURITY_ATTRIBUTES lpThreadAttributes, +# __in SIZE_T dwStackSize, +# __in LPTHREAD_START_ROUTINE lpStartAddress, +# __in LPVOID lpParameter, +# __in DWORD dwCreationFlags, +# __out LPDWORD lpThreadId +# ); +def CreateRemoteThread(hProcess, lpThreadAttributes, dwStackSize, lpStartAddress, lpParameter, dwCreationFlags): + _CreateRemoteThread = windll.kernel32.CreateRemoteThread + _CreateRemoteThread.argtypes = [HANDLE, LPSECURITY_ATTRIBUTES, SIZE_T, LPVOID, LPVOID, DWORD, LPDWORD] + _CreateRemoteThread.restype = HANDLE + + if not lpThreadAttributes: + lpThreadAttributes = None + else: + lpThreadAttributes = byref(lpThreadAttributes) + dwThreadId = DWORD(0) + hThread = _CreateRemoteThread(hProcess, lpThreadAttributes, dwStackSize, lpStartAddress, lpParameter, dwCreationFlags, byref(dwThreadId)) + if not hThread: + raise ctypes.WinError() + return ThreadHandle(hThread), dwThreadId.value + +#------------------------------------------------------------------------------ +# Process API + +# BOOL WINAPI CreateProcess( +# __in_opt LPCTSTR lpApplicationName, +# __inout_opt LPTSTR lpCommandLine, +# __in_opt LPSECURITY_ATTRIBUTES lpProcessAttributes, +# __in_opt LPSECURITY_ATTRIBUTES lpThreadAttributes, +# __in BOOL bInheritHandles, +# __in DWORD dwCreationFlags, +# __in_opt LPVOID lpEnvironment, +# __in_opt LPCTSTR lpCurrentDirectory, +# __in LPSTARTUPINFO lpStartupInfo, +# __out LPPROCESS_INFORMATION lpProcessInformation +# ); +def CreateProcessA(lpApplicationName, lpCommandLine=None, lpProcessAttributes=None, lpThreadAttributes=None, bInheritHandles=False, dwCreationFlags=0, lpEnvironment=None, lpCurrentDirectory=None, lpStartupInfo=None): + _CreateProcessA = windll.kernel32.CreateProcessA + _CreateProcessA.argtypes = [LPSTR, LPSTR, LPSECURITY_ATTRIBUTES, LPSECURITY_ATTRIBUTES, BOOL, DWORD, LPVOID, LPSTR, LPVOID, LPPROCESS_INFORMATION] + _CreateProcessA.restype = bool + _CreateProcessA.errcheck = RaiseIfZero + + if not lpApplicationName: + lpApplicationName = None + if not lpCommandLine: + lpCommandLine = None + else: + lpCommandLine = ctypes.create_string_buffer(lpCommandLine, max(MAX_PATH, len(lpCommandLine))) + if not lpEnvironment: + lpEnvironment = None + else: + lpEnvironment = ctypes.create_string_buffer(lpEnvironment) + if not lpCurrentDirectory: + lpCurrentDirectory = None + if not lpProcessAttributes: + lpProcessAttributes = None + else: + lpProcessAttributes = byref(lpProcessAttributes) + if not lpThreadAttributes: + lpThreadAttributes = None + else: + lpThreadAttributes = byref(lpThreadAttributes) + if not lpStartupInfo: + lpStartupInfo = STARTUPINFO() + lpStartupInfo.cb = sizeof(STARTUPINFO) + lpStartupInfo.lpReserved = 0 + lpStartupInfo.lpDesktop = 0 + lpStartupInfo.lpTitle = 0 + lpStartupInfo.dwFlags = 0 + lpStartupInfo.cbReserved2 = 0 + lpStartupInfo.lpReserved2 = 0 + lpProcessInformation = PROCESS_INFORMATION() + lpProcessInformation.hProcess = INVALID_HANDLE_VALUE + lpProcessInformation.hThread = INVALID_HANDLE_VALUE + lpProcessInformation.dwProcessId = 0 + lpProcessInformation.dwThreadId = 0 + _CreateProcessA(lpApplicationName, lpCommandLine, lpProcessAttributes, lpThreadAttributes, bool(bInheritHandles), dwCreationFlags, lpEnvironment, lpCurrentDirectory, byref(lpStartupInfo), byref(lpProcessInformation)) + return ProcessInformation(lpProcessInformation) + +def CreateProcessW(lpApplicationName, lpCommandLine=None, lpProcessAttributes=None, lpThreadAttributes=None, bInheritHandles=False, dwCreationFlags=0, lpEnvironment=None, lpCurrentDirectory=None, lpStartupInfo=None): + _CreateProcessW = windll.kernel32.CreateProcessW + _CreateProcessW.argtypes = [LPWSTR, LPWSTR, LPSECURITY_ATTRIBUTES, LPSECURITY_ATTRIBUTES, BOOL, DWORD, LPVOID, LPWSTR, LPVOID, LPPROCESS_INFORMATION] + _CreateProcessW.restype = bool + _CreateProcessW.errcheck = RaiseIfZero + + if not lpApplicationName: + lpApplicationName = None + if not lpCommandLine: + lpCommandLine = None + else: + lpCommandLine = ctypes.create_unicode_buffer(lpCommandLine, max(MAX_PATH, len(lpCommandLine))) + if not lpEnvironment: + lpEnvironment = None + else: + lpEnvironment = ctypes.create_unicode_buffer(lpEnvironment) + if not lpCurrentDirectory: + lpCurrentDirectory = None + if not lpProcessAttributes: + lpProcessAttributes = None + else: + lpProcessAttributes = byref(lpProcessAttributes) + if not lpThreadAttributes: + lpThreadAttributes = None + else: + lpThreadAttributes = byref(lpThreadAttributes) + if not lpStartupInfo: + lpStartupInfo = STARTUPINFO() + lpStartupInfo.cb = sizeof(STARTUPINFO) + lpStartupInfo.lpReserved = 0 + lpStartupInfo.lpDesktop = 0 + lpStartupInfo.lpTitle = 0 + lpStartupInfo.dwFlags = 0 + lpStartupInfo.cbReserved2 = 0 + lpStartupInfo.lpReserved2 = 0 + lpProcessInformation = PROCESS_INFORMATION() + lpProcessInformation.hProcess = INVALID_HANDLE_VALUE + lpProcessInformation.hThread = INVALID_HANDLE_VALUE + lpProcessInformation.dwProcessId = 0 + lpProcessInformation.dwThreadId = 0 + _CreateProcessW(lpApplicationName, lpCommandLine, lpProcessAttributes, lpThreadAttributes, bool(bInheritHandles), dwCreationFlags, lpEnvironment, lpCurrentDirectory, byref(lpStartupInfo), byref(lpProcessInformation)) + return ProcessInformation(lpProcessInformation) + +CreateProcess = GuessStringType(CreateProcessA, CreateProcessW) + +# BOOL WINAPI InitializeProcThreadAttributeList( +# __out_opt LPPROC_THREAD_ATTRIBUTE_LIST lpAttributeList, +# __in DWORD dwAttributeCount, +# __reserved DWORD dwFlags, +# __inout PSIZE_T lpSize +# ); +def InitializeProcThreadAttributeList(dwAttributeCount): + _InitializeProcThreadAttributeList = windll.kernel32.InitializeProcThreadAttributeList + _InitializeProcThreadAttributeList.argtypes = [LPPROC_THREAD_ATTRIBUTE_LIST, DWORD, DWORD, PSIZE_T] + _InitializeProcThreadAttributeList.restype = bool + + Size = SIZE_T(0) + _InitializeProcThreadAttributeList(None, dwAttributeCount, 0, byref(Size)) + RaiseIfZero(Size.value) + AttributeList = (BYTE * Size.value)() + success = _InitializeProcThreadAttributeList(byref(AttributeList), dwAttributeCount, 0, byref(Size)) + RaiseIfZero(success) + return AttributeList + +# BOOL WINAPI UpdateProcThreadAttribute( +# __inout LPPROC_THREAD_ATTRIBUTE_LIST lpAttributeList, +# __in DWORD dwFlags, +# __in DWORD_PTR Attribute, +# __in PVOID lpValue, +# __in SIZE_T cbSize, +# __out_opt PVOID lpPreviousValue, +# __in_opt PSIZE_T lpReturnSize +# ); +def UpdateProcThreadAttribute(lpAttributeList, Attribute, Value, cbSize = None): + _UpdateProcThreadAttribute = windll.kernel32.UpdateProcThreadAttribute + _UpdateProcThreadAttribute.argtypes = [LPPROC_THREAD_ATTRIBUTE_LIST, DWORD, DWORD_PTR, PVOID, SIZE_T, PVOID, PSIZE_T] + _UpdateProcThreadAttribute.restype = bool + _UpdateProcThreadAttribute.errcheck = RaiseIfZero + + if cbSize is None: + cbSize = sizeof(Value) + _UpdateProcThreadAttribute(byref(lpAttributeList), 0, Attribute, byref(Value), cbSize, None, None) + +# VOID WINAPI DeleteProcThreadAttributeList( +# __inout LPPROC_THREAD_ATTRIBUTE_LIST lpAttributeList +# ); +def DeleteProcThreadAttributeList(lpAttributeList): + _DeleteProcThreadAttributeList = windll.kernel32.DeleteProcThreadAttributeList + _DeleteProcThreadAttributeList.restype = None + _DeleteProcThreadAttributeList(byref(lpAttributeList)) + +# HANDLE WINAPI OpenProcess( +# __in DWORD dwDesiredAccess, +# __in BOOL bInheritHandle, +# __in DWORD dwProcessId +# ); +def OpenProcess(dwDesiredAccess, bInheritHandle, dwProcessId): + _OpenProcess = windll.kernel32.OpenProcess + _OpenProcess.argtypes = [DWORD, BOOL, DWORD] + _OpenProcess.restype = HANDLE + + hProcess = _OpenProcess(dwDesiredAccess, bool(bInheritHandle), dwProcessId) + if hProcess == NULL: + raise ctypes.WinError() + return ProcessHandle(hProcess, dwAccess = dwDesiredAccess) + +# HANDLE WINAPI OpenThread( +# __in DWORD dwDesiredAccess, +# __in BOOL bInheritHandle, +# __in DWORD dwThreadId +# ); +def OpenThread(dwDesiredAccess, bInheritHandle, dwThreadId): + _OpenThread = windll.kernel32.OpenThread + _OpenThread.argtypes = [DWORD, BOOL, DWORD] + _OpenThread.restype = HANDLE + + hThread = _OpenThread(dwDesiredAccess, bool(bInheritHandle), dwThreadId) + if hThread == NULL: + raise ctypes.WinError() + return ThreadHandle(hThread, dwAccess = dwDesiredAccess) + +# DWORD WINAPI SuspendThread( +# __in HANDLE hThread +# ); +def SuspendThread(hThread): + _SuspendThread = windll.kernel32.SuspendThread + _SuspendThread.argtypes = [HANDLE] + _SuspendThread.restype = DWORD + + previousCount = _SuspendThread(hThread) + if previousCount == DWORD(-1).value: + raise ctypes.WinError() + return previousCount + +# DWORD WINAPI ResumeThread( +# __in HANDLE hThread +# ); +def ResumeThread(hThread): + _ResumeThread = windll.kernel32.ResumeThread + _ResumeThread.argtypes = [HANDLE] + _ResumeThread.restype = DWORD + + previousCount = _ResumeThread(hThread) + if previousCount == DWORD(-1).value: + raise ctypes.WinError() + return previousCount + +# BOOL WINAPI TerminateThread( +# __inout HANDLE hThread, +# __in DWORD dwExitCode +# ); +def TerminateThread(hThread, dwExitCode = 0): + _TerminateThread = windll.kernel32.TerminateThread + _TerminateThread.argtypes = [HANDLE, DWORD] + _TerminateThread.restype = bool + _TerminateThread.errcheck = RaiseIfZero + _TerminateThread(hThread, dwExitCode) + +# BOOL WINAPI TerminateProcess( +# __inout HANDLE hProcess, +# __in DWORD dwExitCode +# ); +def TerminateProcess(hProcess, dwExitCode = 0): + _TerminateProcess = windll.kernel32.TerminateProcess + _TerminateProcess.argtypes = [HANDLE, DWORD] + _TerminateProcess.restype = bool + _TerminateProcess.errcheck = RaiseIfZero + _TerminateProcess(hProcess, dwExitCode) + +# DWORD WINAPI GetCurrentProcessId(void); +def GetCurrentProcessId(): + _GetCurrentProcessId = windll.kernel32.GetCurrentProcessId + _GetCurrentProcessId.argtypes = [] + _GetCurrentProcessId.restype = DWORD + return _GetCurrentProcessId() + +# DWORD WINAPI GetCurrentThreadId(void); +def GetCurrentThreadId(): + _GetCurrentThreadId = windll.kernel32.GetCurrentThreadId + _GetCurrentThreadId.argtypes = [] + _GetCurrentThreadId.restype = DWORD + return _GetCurrentThreadId() + +# DWORD WINAPI GetProcessId( +# __in HANDLE hProcess +# ); +def GetProcessId(hProcess): + _GetProcessId = windll.kernel32.GetProcessId + _GetProcessId.argtypes = [HANDLE] + _GetProcessId.restype = DWORD + _GetProcessId.errcheck = RaiseIfZero + return _GetProcessId(hProcess) + +# DWORD WINAPI GetThreadId( +# __in HANDLE hThread +# ); +def GetThreadId(hThread): + _GetThreadId = windll.kernel32._GetThreadId + _GetThreadId.argtypes = [HANDLE] + _GetThreadId.restype = DWORD + + dwThreadId = _GetThreadId(hThread) + if dwThreadId == 0: + raise ctypes.WinError() + return dwThreadId + +# DWORD WINAPI GetProcessIdOfThread( +# __in HANDLE hThread +# ); +def GetProcessIdOfThread(hThread): + _GetProcessIdOfThread = windll.kernel32.GetProcessIdOfThread + _GetProcessIdOfThread.argtypes = [HANDLE] + _GetProcessIdOfThread.restype = DWORD + + dwProcessId = _GetProcessIdOfThread(hThread) + if dwProcessId == 0: + raise ctypes.WinError() + return dwProcessId + +# BOOL WINAPI GetExitCodeProcess( +# __in HANDLE hProcess, +# __out LPDWORD lpExitCode +# ); +def GetExitCodeProcess(hProcess): + _GetExitCodeProcess = windll.kernel32.GetExitCodeProcess + _GetExitCodeProcess.argtypes = [HANDLE] + _GetExitCodeProcess.restype = bool + _GetExitCodeProcess.errcheck = RaiseIfZero + + lpExitCode = DWORD(0) + _GetExitCodeProcess(hProcess, byref(lpExitCode)) + return lpExitCode.value + +# BOOL WINAPI GetExitCodeThread( +# __in HANDLE hThread, +# __out LPDWORD lpExitCode +# ); +def GetExitCodeThread(hThread): + _GetExitCodeThread = windll.kernel32.GetExitCodeThread + _GetExitCodeThread.argtypes = [HANDLE] + _GetExitCodeThread.restype = bool + _GetExitCodeThread.errcheck = RaiseIfZero + + lpExitCode = DWORD(0) + _GetExitCodeThread(hThread, byref(lpExitCode)) + return lpExitCode.value + +# DWORD WINAPI GetProcessVersion( +# __in DWORD ProcessId +# ); +def GetProcessVersion(ProcessId): + _GetProcessVersion = windll.kernel32.GetProcessVersion + _GetProcessVersion.argtypes = [DWORD] + _GetProcessVersion.restype = DWORD + + retval = _GetProcessVersion(ProcessId) + if retval == 0: + raise ctypes.WinError() + return retval + +# DWORD WINAPI GetPriorityClass( +# __in HANDLE hProcess +# ); +def GetPriorityClass(hProcess): + _GetPriorityClass = windll.kernel32.GetPriorityClass + _GetPriorityClass.argtypes = [HANDLE] + _GetPriorityClass.restype = DWORD + + retval = _GetPriorityClass(hProcess) + if retval == 0: + raise ctypes.WinError() + return retval + +# BOOL WINAPI SetPriorityClass( +# __in HANDLE hProcess, +# __in DWORD dwPriorityClass +# ); +def SetPriorityClass(hProcess, dwPriorityClass = NORMAL_PRIORITY_CLASS): + _SetPriorityClass = windll.kernel32.SetPriorityClass + _SetPriorityClass.argtypes = [HANDLE, DWORD] + _SetPriorityClass.restype = bool + _SetPriorityClass.errcheck = RaiseIfZero + _SetPriorityClass(hProcess, dwPriorityClass) + +# BOOL WINAPI GetProcessPriorityBoost( +# __in HANDLE hProcess, +# __out PBOOL pDisablePriorityBoost +# ); +def GetProcessPriorityBoost(hProcess): + _GetProcessPriorityBoost = windll.kernel32.GetProcessPriorityBoost + _GetProcessPriorityBoost.argtypes = [HANDLE, PBOOL] + _GetProcessPriorityBoost.restype = bool + _GetProcessPriorityBoost.errcheck = RaiseIfZero + + pDisablePriorityBoost = BOOL(False) + _GetProcessPriorityBoost(hProcess, byref(pDisablePriorityBoost)) + return bool(pDisablePriorityBoost.value) + +# BOOL WINAPI SetProcessPriorityBoost( +# __in HANDLE hProcess, +# __in BOOL DisablePriorityBoost +# ); +def SetProcessPriorityBoost(hProcess, DisablePriorityBoost): + _SetProcessPriorityBoost = windll.kernel32.SetProcessPriorityBoost + _SetProcessPriorityBoost.argtypes = [HANDLE, BOOL] + _SetProcessPriorityBoost.restype = bool + _SetProcessPriorityBoost.errcheck = RaiseIfZero + _SetProcessPriorityBoost(hProcess, bool(DisablePriorityBoost)) + +# BOOL WINAPI GetProcessAffinityMask( +# __in HANDLE hProcess, +# __out PDWORD_PTR lpProcessAffinityMask, +# __out PDWORD_PTR lpSystemAffinityMask +# ); +def GetProcessAffinityMask(hProcess): + _GetProcessAffinityMask = windll.kernel32.GetProcessAffinityMask + _GetProcessAffinityMask.argtypes = [HANDLE, PDWORD_PTR, PDWORD_PTR] + _GetProcessAffinityMask.restype = bool + _GetProcessAffinityMask.errcheck = RaiseIfZero + + lpProcessAffinityMask = DWORD_PTR(0) + lpSystemAffinityMask = DWORD_PTR(0) + _GetProcessAffinityMask(hProcess, byref(lpProcessAffinityMask), byref(lpSystemAffinityMask)) + return lpProcessAffinityMask.value, lpSystemAffinityMask.value + +# BOOL WINAPI SetProcessAffinityMask( +# __in HANDLE hProcess, +# __in DWORD_PTR dwProcessAffinityMask +# ); +def SetProcessAffinityMask(hProcess, dwProcessAffinityMask): + _SetProcessAffinityMask = windll.kernel32.SetProcessAffinityMask + _SetProcessAffinityMask.argtypes = [HANDLE, DWORD_PTR] + _SetProcessAffinityMask.restype = bool + _SetProcessAffinityMask.errcheck = RaiseIfZero + _SetProcessAffinityMask(hProcess, dwProcessAffinityMask) + +#------------------------------------------------------------------------------ +# Toolhelp32 API + +# HANDLE WINAPI CreateToolhelp32Snapshot( +# __in DWORD dwFlags, +# __in DWORD th32ProcessID +# ); +def CreateToolhelp32Snapshot(dwFlags = TH32CS_SNAPALL, th32ProcessID = 0): + _CreateToolhelp32Snapshot = windll.kernel32.CreateToolhelp32Snapshot + _CreateToolhelp32Snapshot.argtypes = [DWORD, DWORD] + _CreateToolhelp32Snapshot.restype = HANDLE + + hSnapshot = _CreateToolhelp32Snapshot(dwFlags, th32ProcessID) + if hSnapshot == INVALID_HANDLE_VALUE: + raise ctypes.WinError() + return SnapshotHandle(hSnapshot) + +# BOOL WINAPI Process32First( +# __in HANDLE hSnapshot, +# __inout LPPROCESSENTRY32 lppe +# ); +def Process32First(hSnapshot): + _Process32First = windll.kernel32.Process32First + _Process32First.argtypes = [HANDLE, LPPROCESSENTRY32] + _Process32First.restype = bool + + pe = PROCESSENTRY32() + pe.dwSize = sizeof(PROCESSENTRY32) + success = _Process32First(hSnapshot, byref(pe)) + if not success: + if GetLastError() == ERROR_NO_MORE_FILES: + return None + raise ctypes.WinError() + return pe + +# BOOL WINAPI Process32Next( +# __in HANDLE hSnapshot, +# __out LPPROCESSENTRY32 lppe +# ); +def Process32Next(hSnapshot, pe = None): + _Process32Next = windll.kernel32.Process32Next + _Process32Next.argtypes = [HANDLE, LPPROCESSENTRY32] + _Process32Next.restype = bool + + if pe is None: + pe = PROCESSENTRY32() + pe.dwSize = sizeof(PROCESSENTRY32) + success = _Process32Next(hSnapshot, byref(pe)) + if not success: + if GetLastError() == ERROR_NO_MORE_FILES: + return None + raise ctypes.WinError() + return pe + +# BOOL WINAPI Thread32First( +# __in HANDLE hSnapshot, +# __inout LPTHREADENTRY32 lpte +# ); +def Thread32First(hSnapshot): + _Thread32First = windll.kernel32.Thread32First + _Thread32First.argtypes = [HANDLE, LPTHREADENTRY32] + _Thread32First.restype = bool + + te = THREADENTRY32() + te.dwSize = sizeof(THREADENTRY32) + success = _Thread32First(hSnapshot, byref(te)) + if not success: + if GetLastError() == ERROR_NO_MORE_FILES: + return None + raise ctypes.WinError() + return te + +# BOOL WINAPI Thread32Next( +# __in HANDLE hSnapshot, +# __out LPTHREADENTRY32 lpte +# ); +def Thread32Next(hSnapshot, te = None): + _Thread32Next = windll.kernel32.Thread32Next + _Thread32Next.argtypes = [HANDLE, LPTHREADENTRY32] + _Thread32Next.restype = bool + + if te is None: + te = THREADENTRY32() + te.dwSize = sizeof(THREADENTRY32) + success = _Thread32Next(hSnapshot, byref(te)) + if not success: + if GetLastError() == ERROR_NO_MORE_FILES: + return None + raise ctypes.WinError() + return te + +# BOOL WINAPI Module32First( +# __in HANDLE hSnapshot, +# __inout LPMODULEENTRY32 lpme +# ); +def Module32First(hSnapshot): + _Module32First = windll.kernel32.Module32First + _Module32First.argtypes = [HANDLE, LPMODULEENTRY32] + _Module32First.restype = bool + + me = MODULEENTRY32() + me.dwSize = sizeof(MODULEENTRY32) + success = _Module32First(hSnapshot, byref(me)) + if not success: + if GetLastError() == ERROR_NO_MORE_FILES: + return None + raise ctypes.WinError() + return me + +# BOOL WINAPI Module32Next( +# __in HANDLE hSnapshot, +# __out LPMODULEENTRY32 lpme +# ); +def Module32Next(hSnapshot, me = None): + _Module32Next = windll.kernel32.Module32Next + _Module32Next.argtypes = [HANDLE, LPMODULEENTRY32] + _Module32Next.restype = bool + + if me is None: + me = MODULEENTRY32() + me.dwSize = sizeof(MODULEENTRY32) + success = _Module32Next(hSnapshot, byref(me)) + if not success: + if GetLastError() == ERROR_NO_MORE_FILES: + return None + raise ctypes.WinError() + return me + +# BOOL WINAPI Heap32First( +# __inout LPHEAPENTRY32 lphe, +# __in DWORD th32ProcessID, +# __in ULONG_PTR th32HeapID +# ); +def Heap32First(th32ProcessID, th32HeapID): + _Heap32First = windll.kernel32.Heap32First + _Heap32First.argtypes = [LPHEAPENTRY32, DWORD, ULONG_PTR] + _Heap32First.restype = bool + + he = HEAPENTRY32() + he.dwSize = sizeof(HEAPENTRY32) + success = _Heap32First(byref(he), th32ProcessID, th32HeapID) + if not success: + if GetLastError() == ERROR_NO_MORE_FILES: + return None + raise ctypes.WinError() + return he + +# BOOL WINAPI Heap32Next( +# __out LPHEAPENTRY32 lphe +# ); +def Heap32Next(he): + _Heap32Next = windll.kernel32.Heap32Next + _Heap32Next.argtypes = [LPHEAPENTRY32] + _Heap32Next.restype = bool + + he.dwSize = sizeof(HEAPENTRY32) + success = _Heap32Next(byref(he)) + if not success: + if GetLastError() == ERROR_NO_MORE_FILES: + return None + raise ctypes.WinError() + return he + +# BOOL WINAPI Heap32ListFirst( +# __in HANDLE hSnapshot, +# __inout LPHEAPLIST32 lphl +# ); +def Heap32ListFirst(hSnapshot): + _Heap32ListFirst = windll.kernel32.Heap32ListFirst + _Heap32ListFirst.argtypes = [HANDLE, LPHEAPLIST32] + _Heap32ListFirst.restype = bool + + hl = HEAPLIST32() + hl.dwSize = sizeof(HEAPLIST32) + success = _Heap32ListFirst(hSnapshot, byref(hl)) + if not success: + if GetLastError() == ERROR_NO_MORE_FILES: + return None + raise ctypes.WinError() + return hl + +# BOOL WINAPI Heap32ListNext( +# __in HANDLE hSnapshot, +# __out LPHEAPLIST32 lphl +# ); +def Heap32ListNext(hSnapshot, hl = None): + _Heap32ListNext = windll.kernel32.Heap32ListNext + _Heap32ListNext.argtypes = [HANDLE, LPHEAPLIST32] + _Heap32ListNext.restype = bool + + if hl is None: + hl = HEAPLIST32() + hl.dwSize = sizeof(HEAPLIST32) + success = _Heap32ListNext(hSnapshot, byref(hl)) + if not success: + if GetLastError() == ERROR_NO_MORE_FILES: + return None + raise ctypes.WinError() + return hl + +# BOOL WINAPI Toolhelp32ReadProcessMemory( +# __in DWORD th32ProcessID, +# __in LPCVOID lpBaseAddress, +# __out LPVOID lpBuffer, +# __in SIZE_T cbRead, +# __out SIZE_T lpNumberOfBytesRead +# ); +def Toolhelp32ReadProcessMemory(th32ProcessID, lpBaseAddress, cbRead): + _Toolhelp32ReadProcessMemory = windll.kernel32.Toolhelp32ReadProcessMemory + _Toolhelp32ReadProcessMemory.argtypes = [DWORD, LPVOID, LPVOID, SIZE_T, POINTER(SIZE_T)] + _Toolhelp32ReadProcessMemory.restype = bool + + lpBuffer = ctypes.create_string_buffer('', cbRead) + lpNumberOfBytesRead = SIZE_T(0) + success = _Toolhelp32ReadProcessMemory(th32ProcessID, lpBaseAddress, lpBuffer, cbRead, byref(lpNumberOfBytesRead)) + if not success and GetLastError() != ERROR_PARTIAL_COPY: + raise ctypes.WinError() + return str(lpBuffer.raw)[:lpNumberOfBytesRead.value] + +#------------------------------------------------------------------------------ +# Miscellaneous system information + +# BOOL WINAPI GetProcessDEPPolicy( +# __in HANDLE hProcess, +# __out LPDWORD lpFlags, +# __out PBOOL lpPermanent +# ); +# Contribution by ivanlef0u (http://ivanlef0u.fr/) +# XP SP3 and > only +def GetProcessDEPPolicy(hProcess): + _GetProcessDEPPolicy = windll.kernel32.GetProcessDEPPolicy + _GetProcessDEPPolicy.argtypes = [HANDLE, LPDWORD, PBOOL] + _GetProcessDEPPolicy.restype = bool + _GetProcessDEPPolicy.errcheck = RaiseIfZero + + lpFlags = DWORD(0) + lpPermanent = BOOL(0) + _GetProcessDEPPolicy(hProcess, byref(lpFlags), byref(lpPermanent)) + return (lpFlags.value, lpPermanent.value) + +# DWORD WINAPI GetCurrentProcessorNumber(void); +def GetCurrentProcessorNumber(): + _GetCurrentProcessorNumber = windll.kernel32.GetCurrentProcessorNumber + _GetCurrentProcessorNumber.argtypes = [] + _GetCurrentProcessorNumber.restype = DWORD + _GetCurrentProcessorNumber.errcheck = RaiseIfZero + return _GetCurrentProcessorNumber() + +# VOID WINAPI FlushProcessWriteBuffers(void); +def FlushProcessWriteBuffers(): + _FlushProcessWriteBuffers = windll.kernel32.FlushProcessWriteBuffers + _FlushProcessWriteBuffers.argtypes = [] + _FlushProcessWriteBuffers.restype = None + _FlushProcessWriteBuffers() + +# BOOL WINAPI GetLogicalProcessorInformation( +# __out PSYSTEM_LOGICAL_PROCESSOR_INFORMATION Buffer, +# __inout PDWORD ReturnLength +# ); + +# TO DO http://msdn.microsoft.com/en-us/library/ms683194(VS.85).aspx + +# BOOL WINAPI GetProcessIoCounters( +# __in HANDLE hProcess, +# __out PIO_COUNTERS lpIoCounters +# ); + +# TO DO http://msdn.microsoft.com/en-us/library/ms683218(VS.85).aspx + +# DWORD WINAPI GetGuiResources( +# __in HANDLE hProcess, +# __in DWORD uiFlags +# ); +def GetGuiResources(hProcess, uiFlags = GR_GDIOBJECTS): + _GetGuiResources = windll.kernel32.GetGuiResources + _GetGuiResources.argtypes = [HANDLE, DWORD] + _GetGuiResources.restype = DWORD + + dwCount = _GetGuiResources(hProcess, uiFlags) + if dwCount == 0: + errcode = GetLastError() + if errcode != ERROR_SUCCESS: + raise ctypes.WinError(errcode) + return dwCount + +# BOOL WINAPI GetProcessHandleCount( +# __in HANDLE hProcess, +# __inout PDWORD pdwHandleCount +# ); +def GetProcessHandleCount(hProcess): + _GetProcessHandleCount = windll.kernel32.GetProcessHandleCount + _GetProcessHandleCount.argtypes = [HANDLE, PDWORD] + _GetProcessHandleCount.restype = DWORD + _GetProcessHandleCount.errcheck = RaiseIfZero + + pdwHandleCount = DWORD(0) + _GetProcessHandleCount(hProcess, byref(pdwHandleCount)) + return pdwHandleCount.value + +# BOOL WINAPI GetProcessTimes( +# __in HANDLE hProcess, +# __out LPFILETIME lpCreationTime, +# __out LPFILETIME lpExitTime, +# __out LPFILETIME lpKernelTime, +# __out LPFILETIME lpUserTime +# ); +def GetProcessTimes(hProcess = None): + _GetProcessTimes = windll.kernel32.GetProcessTimes + _GetProcessTimes.argtypes = [HANDLE, LPFILETIME, LPFILETIME, LPFILETIME, LPFILETIME] + _GetProcessTimes.restype = bool + _GetProcessTimes.errcheck = RaiseIfZero + + if hProcess is None: + hProcess = GetCurrentProcess() + + CreationTime = FILETIME() + ExitTime = FILETIME() + KernelTime = FILETIME() + UserTime = FILETIME() + + _GetProcessTimes(hProcess, byref(CreationTime), byref(ExitTime), byref(KernelTime), byref(UserTime)) + + return (CreationTime, ExitTime, KernelTime, UserTime) + +# BOOL WINAPI FileTimeToSystemTime( +# __in const FILETIME *lpFileTime, +# __out LPSYSTEMTIME lpSystemTime +# ); +def FileTimeToSystemTime(lpFileTime): + _FileTimeToSystemTime = windll.kernel32.FileTimeToSystemTime + _FileTimeToSystemTime.argtypes = [LPFILETIME, LPSYSTEMTIME] + _FileTimeToSystemTime.restype = bool + _FileTimeToSystemTime.errcheck = RaiseIfZero + + if isinstance(lpFileTime, FILETIME): + FileTime = lpFileTime + else: + FileTime = FILETIME() + FileTime.dwLowDateTime = lpFileTime & 0xFFFFFFFF + FileTime.dwHighDateTime = lpFileTime >> 32 + SystemTime = SYSTEMTIME() + _FileTimeToSystemTime(byref(FileTime), byref(SystemTime)) + return SystemTime + +# void WINAPI GetSystemTimeAsFileTime( +# __out LPFILETIME lpSystemTimeAsFileTime +# ); +def GetSystemTimeAsFileTime(): + _GetSystemTimeAsFileTime = windll.kernel32.GetSystemTimeAsFileTime + _GetSystemTimeAsFileTime.argtypes = [LPFILETIME] + _GetSystemTimeAsFileTime.restype = None + + FileTime = FILETIME() + _GetSystemTimeAsFileTime(byref(FileTime)) + return FileTime + +#------------------------------------------------------------------------------ +# Global ATOM API + +# ATOM GlobalAddAtom( +# __in LPCTSTR lpString +# ); +def GlobalAddAtomA(lpString): + _GlobalAddAtomA = windll.kernel32.GlobalAddAtomA + _GlobalAddAtomA.argtypes = [LPSTR] + _GlobalAddAtomA.restype = ATOM + _GlobalAddAtomA.errcheck = RaiseIfZero + return _GlobalAddAtomA(lpString) + +def GlobalAddAtomW(lpString): + _GlobalAddAtomW = windll.kernel32.GlobalAddAtomW + _GlobalAddAtomW.argtypes = [LPWSTR] + _GlobalAddAtomW.restype = ATOM + _GlobalAddAtomW.errcheck = RaiseIfZero + return _GlobalAddAtomW(lpString) + +GlobalAddAtom = GuessStringType(GlobalAddAtomA, GlobalAddAtomW) + +# ATOM GlobalFindAtom( +# __in LPCTSTR lpString +# ); +def GlobalFindAtomA(lpString): + _GlobalFindAtomA = windll.kernel32.GlobalFindAtomA + _GlobalFindAtomA.argtypes = [LPSTR] + _GlobalFindAtomA.restype = ATOM + _GlobalFindAtomA.errcheck = RaiseIfZero + return _GlobalFindAtomA(lpString) + +def GlobalFindAtomW(lpString): + _GlobalFindAtomW = windll.kernel32.GlobalFindAtomW + _GlobalFindAtomW.argtypes = [LPWSTR] + _GlobalFindAtomW.restype = ATOM + _GlobalFindAtomW.errcheck = RaiseIfZero + return _GlobalFindAtomW(lpString) + +GlobalFindAtom = GuessStringType(GlobalFindAtomA, GlobalFindAtomW) + +# UINT GlobalGetAtomName( +# __in ATOM nAtom, +# __out LPTSTR lpBuffer, +# __in int nSize +# ); +def GlobalGetAtomNameA(nAtom): + _GlobalGetAtomNameA = windll.kernel32.GlobalGetAtomNameA + _GlobalGetAtomNameA.argtypes = [ATOM, LPSTR, ctypes.c_int] + _GlobalGetAtomNameA.restype = UINT + _GlobalGetAtomNameA.errcheck = RaiseIfZero + + nSize = 64 + while 1: + lpBuffer = ctypes.create_string_buffer("", nSize) + nCopied = _GlobalGetAtomNameA(nAtom, lpBuffer, nSize) + if nCopied < nSize - 1: + break + nSize = nSize + 64 + return lpBuffer.value + +def GlobalGetAtomNameW(nAtom): + _GlobalGetAtomNameW = windll.kernel32.GlobalGetAtomNameW + _GlobalGetAtomNameW.argtypes = [ATOM, LPWSTR, ctypes.c_int] + _GlobalGetAtomNameW.restype = UINT + _GlobalGetAtomNameW.errcheck = RaiseIfZero + + nSize = 64 + while 1: + lpBuffer = ctypes.create_unicode_buffer(u"", nSize) + nCopied = _GlobalGetAtomNameW(nAtom, lpBuffer, nSize) + if nCopied < nSize - 1: + break + nSize = nSize + 64 + return lpBuffer.value + +GlobalGetAtomName = GuessStringType(GlobalGetAtomNameA, GlobalGetAtomNameW) + +# ATOM GlobalDeleteAtom( +# __in ATOM nAtom +# ); +def GlobalDeleteAtom(nAtom): + _GlobalDeleteAtom = windll.kernel32.GlobalDeleteAtom + _GlobalDeleteAtom.argtypes + _GlobalDeleteAtom.restype + SetLastError(ERROR_SUCCESS) + _GlobalDeleteAtom(nAtom) + error = GetLastError() + if error != ERROR_SUCCESS: + raise ctypes.WinError(error) + +#------------------------------------------------------------------------------ +# Wow64 + +# DWORD WINAPI Wow64SuspendThread( +# _In_ HANDLE hThread +# ); +def Wow64SuspendThread(hThread): + _Wow64SuspendThread = windll.kernel32.Wow64SuspendThread + _Wow64SuspendThread.argtypes = [HANDLE] + _Wow64SuspendThread.restype = DWORD + + previousCount = _Wow64SuspendThread(hThread) + if previousCount == DWORD(-1).value: + raise ctypes.WinError() + return previousCount + +# BOOLEAN WINAPI Wow64EnableWow64FsRedirection( +# __in BOOLEAN Wow64FsEnableRedirection +# ); +def Wow64EnableWow64FsRedirection(Wow64FsEnableRedirection): + """ + This function may not work reliably when there are nested calls. Therefore, + this function has been replaced by the L{Wow64DisableWow64FsRedirection} + and L{Wow64RevertWow64FsRedirection} functions. + + @see: U{http://msdn.microsoft.com/en-us/library/windows/desktop/aa365744(v=vs.85).aspx} + """ + _Wow64EnableWow64FsRedirection = windll.kernel32.Wow64EnableWow64FsRedirection + _Wow64EnableWow64FsRedirection.argtypes = [BOOLEAN] + _Wow64EnableWow64FsRedirection.restype = BOOLEAN + _Wow64EnableWow64FsRedirection.errcheck = RaiseIfZero + +# BOOL WINAPI Wow64DisableWow64FsRedirection( +# __out PVOID *OldValue +# ); +def Wow64DisableWow64FsRedirection(): + _Wow64DisableWow64FsRedirection = windll.kernel32.Wow64DisableWow64FsRedirection + _Wow64DisableWow64FsRedirection.argtypes = [PPVOID] + _Wow64DisableWow64FsRedirection.restype = BOOL + _Wow64DisableWow64FsRedirection.errcheck = RaiseIfZero + + OldValue = PVOID(None) + _Wow64DisableWow64FsRedirection(byref(OldValue)) + return OldValue + +# BOOL WINAPI Wow64RevertWow64FsRedirection( +# __in PVOID OldValue +# ); +def Wow64RevertWow64FsRedirection(OldValue): + _Wow64RevertWow64FsRedirection = windll.kernel32.Wow64RevertWow64FsRedirection + _Wow64RevertWow64FsRedirection.argtypes = [PVOID] + _Wow64RevertWow64FsRedirection.restype = BOOL + _Wow64RevertWow64FsRedirection.errcheck = RaiseIfZero + _Wow64RevertWow64FsRedirection(OldValue) + +#============================================================================== +# This calculates the list of exported symbols. +_all = set(vars().keys()).difference(_all) +__all__ = [_x for _x in _all if not _x.startswith('_')] +__all__.sort() +#============================================================================== + +#============================================================================== +# Mark functions that Psyco cannot compile. +# In your programs, don't use psyco.full(). +# Call psyco.bind() on your main function instead. + +try: + import psyco + psyco.cannotcompile(WaitForDebugEvent) + psyco.cannotcompile(WaitForSingleObject) + psyco.cannotcompile(WaitForSingleObjectEx) + psyco.cannotcompile(WaitForMultipleObjects) + psyco.cannotcompile(WaitForMultipleObjectsEx) +except ImportError: + pass +#============================================================================== diff --git a/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/win32/ntdll.py b/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/win32/ntdll.py new file mode 100644 index 00000000..39037661 --- /dev/null +++ b/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/win32/ntdll.py @@ -0,0 +1,539 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# Copyright (c) 2009-2014, Mario Vilas +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright +# notice,this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# * Neither the name of the copyright holder nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. + +""" +Wrapper for ntdll.dll in ctypes. +""" + +__revision__ = "$Id$" + +from winappdbg.win32.defines import * + +#============================================================================== +# This is used later on to calculate the list of exported symbols. +_all = None +_all = set(vars().keys()) +_all.add('peb_teb') +#============================================================================== + +from winappdbg.win32.peb_teb import * + +#--- Types -------------------------------------------------------------------- + +SYSDBG_COMMAND = DWORD +PROCESSINFOCLASS = DWORD +THREADINFOCLASS = DWORD +FILE_INFORMATION_CLASS = DWORD + +#--- Constants ---------------------------------------------------------------- + +# DEP flags for ProcessExecuteFlags +MEM_EXECUTE_OPTION_ENABLE = 1 +MEM_EXECUTE_OPTION_DISABLE = 2 +MEM_EXECUTE_OPTION_ATL7_THUNK_EMULATION = 4 +MEM_EXECUTE_OPTION_PERMANENT = 8 + +# SYSTEM_INFORMATION_CLASS +# http://www.informit.com/articles/article.aspx?p=22442&seqNum=4 +SystemBasicInformation = 1 # 0x002C +SystemProcessorInformation = 2 # 0x000C +SystemPerformanceInformation = 3 # 0x0138 +SystemTimeInformation = 4 # 0x0020 +SystemPathInformation = 5 # not implemented +SystemProcessInformation = 6 # 0x00F8 + per process +SystemCallInformation = 7 # 0x0018 + (n * 0x0004) +SystemConfigurationInformation = 8 # 0x0018 +SystemProcessorCounters = 9 # 0x0030 per cpu +SystemGlobalFlag = 10 # 0x0004 +SystemInfo10 = 11 # not implemented +SystemModuleInformation = 12 # 0x0004 + (n * 0x011C) +SystemLockInformation = 13 # 0x0004 + (n * 0x0024) +SystemInfo13 = 14 # not implemented +SystemPagedPoolInformation = 15 # checked build only +SystemNonPagedPoolInformation = 16 # checked build only +SystemHandleInformation = 17 # 0x0004 + (n * 0x0010) +SystemObjectInformation = 18 # 0x0038+ + (n * 0x0030+) +SystemPagefileInformation = 19 # 0x0018+ per page file +SystemInstemulInformation = 20 # 0x0088 +SystemInfo20 = 21 # invalid info class +SystemCacheInformation = 22 # 0x0024 +SystemPoolTagInformation = 23 # 0x0004 + (n * 0x001C) +SystemProcessorStatistics = 24 # 0x0000, or 0x0018 per cpu +SystemDpcInformation = 25 # 0x0014 +SystemMemoryUsageInformation1 = 26 # checked build only +SystemLoadImage = 27 # 0x0018, set mode only +SystemUnloadImage = 28 # 0x0004, set mode only +SystemTimeAdjustmentInformation = 29 # 0x000C, 0x0008 writeable +SystemMemoryUsageInformation2 = 30 # checked build only +SystemInfo30 = 31 # checked build only +SystemInfo31 = 32 # checked build only +SystemCrashDumpInformation = 33 # 0x0004 +SystemExceptionInformation = 34 # 0x0010 +SystemCrashDumpStateInformation = 35 # 0x0008 +SystemDebuggerInformation = 36 # 0x0002 +SystemThreadSwitchInformation = 37 # 0x0030 +SystemRegistryQuotaInformation = 38 # 0x000C +SystemLoadDriver = 39 # 0x0008, set mode only +SystemPrioritySeparationInformation = 40 # 0x0004, set mode only +SystemInfo40 = 41 # not implemented +SystemInfo41 = 42 # not implemented +SystemInfo42 = 43 # invalid info class +SystemInfo43 = 44 # invalid info class +SystemTimeZoneInformation = 45 # 0x00AC +SystemLookasideInformation = 46 # n * 0x0020 +# info classes specific to Windows 2000 +# WTS = Windows Terminal Server +SystemSetTimeSlipEvent = 47 # set mode only +SystemCreateSession = 48 # WTS, set mode only +SystemDeleteSession = 49 # WTS, set mode only +SystemInfo49 = 50 # invalid info class +SystemRangeStartInformation = 51 # 0x0004 +SystemVerifierInformation = 52 # 0x0068 +SystemAddVerifier = 53 # set mode only +SystemSessionProcessesInformation = 54 # WTS + +# NtQueryInformationProcess constants (from MSDN) +##ProcessBasicInformation = 0 +##ProcessDebugPort = 7 +##ProcessWow64Information = 26 +##ProcessImageFileName = 27 + +# PROCESS_INFORMATION_CLASS +# http://undocumented.ntinternals.net/UserMode/Undocumented%20Functions/NT%20Objects/Process/PROCESS_INFORMATION_CLASS.html +ProcessBasicInformation = 0 +ProcessQuotaLimits = 1 +ProcessIoCounters = 2 +ProcessVmCounters = 3 +ProcessTimes = 4 +ProcessBasePriority = 5 +ProcessRaisePriority = 6 +ProcessDebugPort = 7 +ProcessExceptionPort = 8 +ProcessAccessToken = 9 +ProcessLdtInformation = 10 +ProcessLdtSize = 11 +ProcessDefaultHardErrorMode = 12 +ProcessIoPortHandlers = 13 +ProcessPooledUsageAndLimits = 14 +ProcessWorkingSetWatch = 15 +ProcessUserModeIOPL = 16 +ProcessEnableAlignmentFaultFixup = 17 +ProcessPriorityClass = 18 +ProcessWx86Information = 19 +ProcessHandleCount = 20 +ProcessAffinityMask = 21 +ProcessPriorityBoost = 22 + +ProcessWow64Information = 26 +ProcessImageFileName = 27 + +# http://www.codeproject.com/KB/security/AntiReverseEngineering.aspx +ProcessDebugObjectHandle = 30 + +ProcessExecuteFlags = 34 + +# THREAD_INFORMATION_CLASS +ThreadBasicInformation = 0 +ThreadTimes = 1 +ThreadPriority = 2 +ThreadBasePriority = 3 +ThreadAffinityMask = 4 +ThreadImpersonationToken = 5 +ThreadDescriptorTableEntry = 6 +ThreadEnableAlignmentFaultFixup = 7 +ThreadEventPair = 8 +ThreadQuerySetWin32StartAddress = 9 +ThreadZeroTlsCell = 10 +ThreadPerformanceCount = 11 +ThreadAmILastThread = 12 +ThreadIdealProcessor = 13 +ThreadPriorityBoost = 14 +ThreadSetTlsArrayAddress = 15 +ThreadIsIoPending = 16 +ThreadHideFromDebugger = 17 + +# OBJECT_INFORMATION_CLASS +ObjectBasicInformation = 0 +ObjectNameInformation = 1 +ObjectTypeInformation = 2 +ObjectAllTypesInformation = 3 +ObjectHandleInformation = 4 + +# FILE_INFORMATION_CLASS +FileDirectoryInformation = 1 +FileFullDirectoryInformation = 2 +FileBothDirectoryInformation = 3 +FileBasicInformation = 4 +FileStandardInformation = 5 +FileInternalInformation = 6 +FileEaInformation = 7 +FileAccessInformation = 8 +FileNameInformation = 9 +FileRenameInformation = 10 +FileLinkInformation = 11 +FileNamesInformation = 12 +FileDispositionInformation = 13 +FilePositionInformation = 14 +FileFullEaInformation = 15 +FileModeInformation = 16 +FileAlignmentInformation = 17 +FileAllInformation = 18 +FileAllocationInformation = 19 +FileEndOfFileInformation = 20 +FileAlternateNameInformation = 21 +FileStreamInformation = 22 +FilePipeInformation = 23 +FilePipeLocalInformation = 24 +FilePipeRemoteInformation = 25 +FileMailslotQueryInformation = 26 +FileMailslotSetInformation = 27 +FileCompressionInformation = 28 +FileCopyOnWriteInformation = 29 +FileCompletionInformation = 30 +FileMoveClusterInformation = 31 +FileQuotaInformation = 32 +FileReparsePointInformation = 33 +FileNetworkOpenInformation = 34 +FileObjectIdInformation = 35 +FileTrackingInformation = 36 +FileOleDirectoryInformation = 37 +FileContentIndexInformation = 38 +FileInheritContentIndexInformation = 37 +FileOleInformation = 39 +FileMaximumInformation = 40 + +# From http://www.nirsoft.net/kernel_struct/vista/EXCEPTION_DISPOSITION.html +# typedef enum _EXCEPTION_DISPOSITION +# { +# ExceptionContinueExecution = 0, +# ExceptionContinueSearch = 1, +# ExceptionNestedException = 2, +# ExceptionCollidedUnwind = 3 +# } EXCEPTION_DISPOSITION; +ExceptionContinueExecution = 0 +ExceptionContinueSearch = 1 +ExceptionNestedException = 2 +ExceptionCollidedUnwind = 3 + +#--- PROCESS_BASIC_INFORMATION structure -------------------------------------- + +# From MSDN: +# +# typedef struct _PROCESS_BASIC_INFORMATION { +# PVOID Reserved1; +# PPEB PebBaseAddress; +# PVOID Reserved2[2]; +# ULONG_PTR UniqueProcessId; +# PVOID Reserved3; +# } PROCESS_BASIC_INFORMATION; +##class PROCESS_BASIC_INFORMATION(Structure): +## _fields_ = [ +## ("Reserved1", PVOID), +## ("PebBaseAddress", PPEB), +## ("Reserved2", PVOID * 2), +## ("UniqueProcessId", ULONG_PTR), +## ("Reserved3", PVOID), +##] + +# From http://catch22.net/tuts/tips2 +# (Only valid for 32 bits) +# +# typedef struct +# { +# ULONG ExitStatus; +# PVOID PebBaseAddress; +# ULONG AffinityMask; +# ULONG BasePriority; +# ULONG_PTR UniqueProcessId; +# ULONG_PTR InheritedFromUniqueProcessId; +# } PROCESS_BASIC_INFORMATION; + +# My own definition follows: +class PROCESS_BASIC_INFORMATION(Structure): + _fields_ = [ + ("ExitStatus", SIZE_T), + ("PebBaseAddress", PVOID), # PPEB + ("AffinityMask", KAFFINITY), + ("BasePriority", SDWORD), + ("UniqueProcessId", ULONG_PTR), + ("InheritedFromUniqueProcessId", ULONG_PTR), +] + +#--- THREAD_BASIC_INFORMATION structure --------------------------------------- + +# From http://undocumented.ntinternals.net/UserMode/Structures/THREAD_BASIC_INFORMATION.html +# +# typedef struct _THREAD_BASIC_INFORMATION { +# NTSTATUS ExitStatus; +# PVOID TebBaseAddress; +# CLIENT_ID ClientId; +# KAFFINITY AffinityMask; +# KPRIORITY Priority; +# KPRIORITY BasePriority; +# } THREAD_BASIC_INFORMATION, *PTHREAD_BASIC_INFORMATION; +class THREAD_BASIC_INFORMATION(Structure): + _fields_ = [ + ("ExitStatus", NTSTATUS), + ("TebBaseAddress", PVOID), # PTEB + ("ClientId", CLIENT_ID), + ("AffinityMask", KAFFINITY), + ("Priority", SDWORD), + ("BasePriority", SDWORD), +] + +#--- FILE_NAME_INFORMATION structure ------------------------------------------ + +# typedef struct _FILE_NAME_INFORMATION { +# ULONG FileNameLength; +# WCHAR FileName[1]; +# } FILE_NAME_INFORMATION, *PFILE_NAME_INFORMATION; +class FILE_NAME_INFORMATION(Structure): + _fields_ = [ + ("FileNameLength", ULONG), + ("FileName", WCHAR * 1), + ] + +#--- SYSDBG_MSR structure and constants --------------------------------------- + +SysDbgReadMsr = 16 +SysDbgWriteMsr = 17 + +class SYSDBG_MSR(Structure): + _fields_ = [ + ("Address", ULONG), + ("Data", ULONGLONG), +] + +#--- IO_STATUS_BLOCK structure ------------------------------------------------ + +# typedef struct _IO_STATUS_BLOCK { +# union { +# NTSTATUS Status; +# PVOID Pointer; +# }; +# ULONG_PTR Information; +# } IO_STATUS_BLOCK, *PIO_STATUS_BLOCK; +class IO_STATUS_BLOCK(Structure): + _fields_ = [ + ("Status", NTSTATUS), + ("Information", ULONG_PTR), + ] + def __get_Pointer(self): + return PVOID(self.Status) + def __set_Pointer(self, ptr): + self.Status = ptr.value + Pointer = property(__get_Pointer, __set_Pointer) + +PIO_STATUS_BLOCK = POINTER(IO_STATUS_BLOCK) + +#--- ntdll.dll ---------------------------------------------------------------- + +# ULONG WINAPI RtlNtStatusToDosError( +# __in NTSTATUS Status +# ); +def RtlNtStatusToDosError(Status): + _RtlNtStatusToDosError = windll.ntdll.RtlNtStatusToDosError + _RtlNtStatusToDosError.argtypes = [NTSTATUS] + _RtlNtStatusToDosError.restype = ULONG + return _RtlNtStatusToDosError(Status) + +# NTSYSAPI NTSTATUS NTAPI NtSystemDebugControl( +# IN SYSDBG_COMMAND Command, +# IN PVOID InputBuffer OPTIONAL, +# IN ULONG InputBufferLength, +# OUT PVOID OutputBuffer OPTIONAL, +# IN ULONG OutputBufferLength, +# OUT PULONG ReturnLength OPTIONAL +# ); +def NtSystemDebugControl(Command, InputBuffer = None, InputBufferLength = None, OutputBuffer = None, OutputBufferLength = None): + _NtSystemDebugControl = windll.ntdll.NtSystemDebugControl + _NtSystemDebugControl.argtypes = [SYSDBG_COMMAND, PVOID, ULONG, PVOID, ULONG, PULONG] + _NtSystemDebugControl.restype = NTSTATUS + + # Validate the input buffer + if InputBuffer is None: + if InputBufferLength is None: + InputBufferLength = 0 + else: + raise ValueError( + "Invalid call to NtSystemDebugControl: " + "input buffer length given but no input buffer!") + else: + if InputBufferLength is None: + InputBufferLength = sizeof(InputBuffer) + InputBuffer = byref(InputBuffer) + + # Validate the output buffer + if OutputBuffer is None: + if OutputBufferLength is None: + OutputBufferLength = 0 + else: + OutputBuffer = ctypes.create_string_buffer("", OutputBufferLength) + elif OutputBufferLength is None: + OutputBufferLength = sizeof(OutputBuffer) + + # Make the call (with an output buffer) + if OutputBuffer is not None: + ReturnLength = ULONG(0) + ntstatus = _NtSystemDebugControl(Command, InputBuffer, InputBufferLength, byref(OutputBuffer), OutputBufferLength, byref(ReturnLength)) + if ntstatus != 0: + raise ctypes.WinError( RtlNtStatusToDosError(ntstatus) ) + ReturnLength = ReturnLength.value + if ReturnLength != OutputBufferLength: + raise ctypes.WinError(ERROR_BAD_LENGTH) + return OutputBuffer, ReturnLength + + # Make the call (without an output buffer) + ntstatus = _NtSystemDebugControl(Command, InputBuffer, InputBufferLength, OutputBuffer, OutputBufferLength, None) + if ntstatus != 0: + raise ctypes.WinError( RtlNtStatusToDosError(ntstatus) ) + +ZwSystemDebugControl = NtSystemDebugControl + +# NTSTATUS WINAPI NtQueryInformationProcess( +# __in HANDLE ProcessHandle, +# __in PROCESSINFOCLASS ProcessInformationClass, +# __out PVOID ProcessInformation, +# __in ULONG ProcessInformationLength, +# __out_opt PULONG ReturnLength +# ); +def NtQueryInformationProcess(ProcessHandle, ProcessInformationClass, ProcessInformationLength = None): + _NtQueryInformationProcess = windll.ntdll.NtQueryInformationProcess + _NtQueryInformationProcess.argtypes = [HANDLE, PROCESSINFOCLASS, PVOID, ULONG, PULONG] + _NtQueryInformationProcess.restype = NTSTATUS + if ProcessInformationLength is not None: + ProcessInformation = ctypes.create_string_buffer("", ProcessInformationLength) + else: + if ProcessInformationClass == ProcessBasicInformation: + ProcessInformation = PROCESS_BASIC_INFORMATION() + ProcessInformationLength = sizeof(PROCESS_BASIC_INFORMATION) + elif ProcessInformationClass == ProcessImageFileName: + unicode_buffer = ctypes.create_unicode_buffer(u"", 0x1000) + ProcessInformation = UNICODE_STRING(0, 0x1000, addressof(unicode_buffer)) + ProcessInformationLength = sizeof(UNICODE_STRING) + elif ProcessInformationClass in (ProcessDebugPort, ProcessWow64Information, ProcessWx86Information, ProcessHandleCount, ProcessPriorityBoost): + ProcessInformation = DWORD() + ProcessInformationLength = sizeof(DWORD) + else: + raise Exception("Unknown ProcessInformationClass, use an explicit ProcessInformationLength value instead") + ReturnLength = ULONG(0) + ntstatus = _NtQueryInformationProcess(ProcessHandle, ProcessInformationClass, byref(ProcessInformation), ProcessInformationLength, byref(ReturnLength)) + if ntstatus != 0: + raise ctypes.WinError( RtlNtStatusToDosError(ntstatus) ) + if ProcessInformationClass == ProcessBasicInformation: + retval = ProcessInformation + elif ProcessInformationClass in (ProcessDebugPort, ProcessWow64Information, ProcessWx86Information, ProcessHandleCount, ProcessPriorityBoost): + retval = ProcessInformation.value + elif ProcessInformationClass == ProcessImageFileName: + vptr = ctypes.c_void_p(ProcessInformation.Buffer) + cptr = ctypes.cast( vptr, ctypes.c_wchar * ProcessInformation.Length ) + retval = cptr.contents.raw + else: + retval = ProcessInformation.raw[:ReturnLength.value] + return retval + +ZwQueryInformationProcess = NtQueryInformationProcess + +# NTSTATUS WINAPI NtQueryInformationThread( +# __in HANDLE ThreadHandle, +# __in THREADINFOCLASS ThreadInformationClass, +# __out PVOID ThreadInformation, +# __in ULONG ThreadInformationLength, +# __out_opt PULONG ReturnLength +# ); +def NtQueryInformationThread(ThreadHandle, ThreadInformationClass, ThreadInformationLength = None): + _NtQueryInformationThread = windll.ntdll.NtQueryInformationThread + _NtQueryInformationThread.argtypes = [HANDLE, THREADINFOCLASS, PVOID, ULONG, PULONG] + _NtQueryInformationThread.restype = NTSTATUS + if ThreadInformationLength is not None: + ThreadInformation = ctypes.create_string_buffer("", ThreadInformationLength) + else: + if ThreadInformationClass == ThreadBasicInformation: + ThreadInformation = THREAD_BASIC_INFORMATION() + elif ThreadInformationClass == ThreadHideFromDebugger: + ThreadInformation = BOOLEAN() + elif ThreadInformationClass == ThreadQuerySetWin32StartAddress: + ThreadInformation = PVOID() + elif ThreadInformationClass in (ThreadAmILastThread, ThreadPriorityBoost): + ThreadInformation = DWORD() + elif ThreadInformationClass == ThreadPerformanceCount: + ThreadInformation = LONGLONG() # LARGE_INTEGER + else: + raise Exception("Unknown ThreadInformationClass, use an explicit ThreadInformationLength value instead") + ThreadInformationLength = sizeof(ThreadInformation) + ReturnLength = ULONG(0) + ntstatus = _NtQueryInformationThread(ThreadHandle, ThreadInformationClass, byref(ThreadInformation), ThreadInformationLength, byref(ReturnLength)) + if ntstatus != 0: + raise ctypes.WinError( RtlNtStatusToDosError(ntstatus) ) + if ThreadInformationClass == ThreadBasicInformation: + retval = ThreadInformation + elif ThreadInformationClass == ThreadHideFromDebugger: + retval = bool(ThreadInformation.value) + elif ThreadInformationClass in (ThreadQuerySetWin32StartAddress, ThreadAmILastThread, ThreadPriorityBoost, ThreadPerformanceCount): + retval = ThreadInformation.value + else: + retval = ThreadInformation.raw[:ReturnLength.value] + return retval + +ZwQueryInformationThread = NtQueryInformationThread + +# NTSTATUS +# NtQueryInformationFile( +# IN HANDLE FileHandle, +# OUT PIO_STATUS_BLOCK IoStatusBlock, +# OUT PVOID FileInformation, +# IN ULONG Length, +# IN FILE_INFORMATION_CLASS FileInformationClass +# ); +def NtQueryInformationFile(FileHandle, FileInformationClass, FileInformation, Length): + _NtQueryInformationFile = windll.ntdll.NtQueryInformationFile + _NtQueryInformationFile.argtypes = [HANDLE, PIO_STATUS_BLOCK, PVOID, ULONG, DWORD] + _NtQueryInformationFile.restype = NTSTATUS + IoStatusBlock = IO_STATUS_BLOCK() + ntstatus = _NtQueryInformationFile(FileHandle, byref(IoStatusBlock), byref(FileInformation), Length, FileInformationClass) + if ntstatus != 0: + raise ctypes.WinError( RtlNtStatusToDosError(ntstatus) ) + return IoStatusBlock + +ZwQueryInformationFile = NtQueryInformationFile + +# DWORD STDCALL CsrGetProcessId (VOID); +def CsrGetProcessId(): + _CsrGetProcessId = windll.ntdll.CsrGetProcessId + _CsrGetProcessId.argtypes = [] + _CsrGetProcessId.restype = DWORD + return _CsrGetProcessId() + +#============================================================================== +# This calculates the list of exported symbols. +_all = set(vars().keys()).difference(_all) +__all__ = [_x for _x in _all if not _x.startswith('_')] +__all__.sort() +#============================================================================== diff --git a/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/win32/peb_teb.py b/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/win32/peb_teb.py new file mode 100644 index 00000000..9d101c70 --- /dev/null +++ b/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/win32/peb_teb.py @@ -0,0 +1,3435 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# Copyright (c) 2009-2014, Mario Vilas +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright +# notice,this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# * Neither the name of the copyright holder nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. + +""" +PEB and TEB structures, constants and data types. +""" + +__revision__ = "$Id$" + +from winappdbg.win32.defines import * +from winappdbg.win32.version import os + +#============================================================================== +# This is used later on to calculate the list of exported symbols. +_all = None +_all = set(vars().keys()) +#============================================================================== + +#--- PEB and TEB structures, constants and data types ------------------------- + +# From http://www.nirsoft.net/kernel_struct/vista/CLIENT_ID.html +# +# typedef struct _CLIENT_ID +# { +# PVOID UniqueProcess; +# PVOID UniqueThread; +# } CLIENT_ID, *PCLIENT_ID; +class CLIENT_ID(Structure): + _fields_ = [ + ("UniqueProcess", PVOID), + ("UniqueThread", PVOID), +] + +# From MSDN: +# +# typedef struct _LDR_DATA_TABLE_ENTRY { +# BYTE Reserved1[2]; +# LIST_ENTRY InMemoryOrderLinks; +# PVOID Reserved2[2]; +# PVOID DllBase; +# PVOID EntryPoint; +# PVOID Reserved3; +# UNICODE_STRING FullDllName; +# BYTE Reserved4[8]; +# PVOID Reserved5[3]; +# union { +# ULONG CheckSum; +# PVOID Reserved6; +# }; +# ULONG TimeDateStamp; +# } LDR_DATA_TABLE_ENTRY, *PLDR_DATA_TABLE_ENTRY; +##class LDR_DATA_TABLE_ENTRY(Structure): +## _fields_ = [ +## ("Reserved1", BYTE * 2), +## ("InMemoryOrderLinks", LIST_ENTRY), +## ("Reserved2", PVOID * 2), +## ("DllBase", PVOID), +## ("EntryPoint", PVOID), +## ("Reserved3", PVOID), +## ("FullDllName", UNICODE_STRING), +## ("Reserved4", BYTE * 8), +## ("Reserved5", PVOID * 3), +## ("CheckSum", ULONG), +## ("TimeDateStamp", ULONG), +##] + +# From MSDN: +# +# typedef struct _PEB_LDR_DATA { +# BYTE Reserved1[8]; +# PVOID Reserved2[3]; +# LIST_ENTRY InMemoryOrderModuleList; +# } PEB_LDR_DATA, +# *PPEB_LDR_DATA; +##class PEB_LDR_DATA(Structure): +## _fields_ = [ +## ("Reserved1", BYTE), +## ("Reserved2", PVOID), +## ("InMemoryOrderModuleList", LIST_ENTRY), +##] + +# From http://undocumented.ntinternals.net/UserMode/Structures/RTL_USER_PROCESS_PARAMETERS.html +# typedef struct _RTL_USER_PROCESS_PARAMETERS { +# ULONG MaximumLength; +# ULONG Length; +# ULONG Flags; +# ULONG DebugFlags; +# PVOID ConsoleHandle; +# ULONG ConsoleFlags; +# HANDLE StdInputHandle; +# HANDLE StdOutputHandle; +# HANDLE StdErrorHandle; +# UNICODE_STRING CurrentDirectoryPath; +# HANDLE CurrentDirectoryHandle; +# UNICODE_STRING DllPath; +# UNICODE_STRING ImagePathName; +# UNICODE_STRING CommandLine; +# PVOID Environment; +# ULONG StartingPositionLeft; +# ULONG StartingPositionTop; +# ULONG Width; +# ULONG Height; +# ULONG CharWidth; +# ULONG CharHeight; +# ULONG ConsoleTextAttributes; +# ULONG WindowFlags; +# ULONG ShowWindowFlags; +# UNICODE_STRING WindowTitle; +# UNICODE_STRING DesktopName; +# UNICODE_STRING ShellInfo; +# UNICODE_STRING RuntimeData; +# RTL_DRIVE_LETTER_CURDIR DLCurrentDirectory[0x20]; +# } RTL_USER_PROCESS_PARAMETERS, *PRTL_USER_PROCESS_PARAMETERS; + +# kd> dt _RTL_USER_PROCESS_PARAMETERS +# ntdll!_RTL_USER_PROCESS_PARAMETERS +# +0x000 MaximumLength : Uint4B +# +0x004 Length : Uint4B +# +0x008 Flags : Uint4B +# +0x00c DebugFlags : Uint4B +# +0x010 ConsoleHandle : Ptr32 Void +# +0x014 ConsoleFlags : Uint4B +# +0x018 StandardInput : Ptr32 Void +# +0x01c StandardOutput : Ptr32 Void +# +0x020 StandardError : Ptr32 Void +# +0x024 CurrentDirectory : _CURDIR +# +0x030 DllPath : _UNICODE_STRING +# +0x038 ImagePathName : _UNICODE_STRING +# +0x040 CommandLine : _UNICODE_STRING +# +0x048 Environment : Ptr32 Void +# +0x04c StartingX : Uint4B +# +0x050 StartingY : Uint4B +# +0x054 CountX : Uint4B +# +0x058 CountY : Uint4B +# +0x05c CountCharsX : Uint4B +# +0x060 CountCharsY : Uint4B +# +0x064 FillAttribute : Uint4B +# +0x068 WindowFlags : Uint4B +# +0x06c ShowWindowFlags : Uint4B +# +0x070 WindowTitle : _UNICODE_STRING +# +0x078 DesktopInfo : _UNICODE_STRING +# +0x080 ShellInfo : _UNICODE_STRING +# +0x088 RuntimeData : _UNICODE_STRING +# +0x090 CurrentDirectores : [32] _RTL_DRIVE_LETTER_CURDIR +# +0x290 EnvironmentSize : Uint4B +##class RTL_USER_PROCESS_PARAMETERS(Structure): +## _fields_ = [ +## ("MaximumLength", ULONG), +## ("Length", ULONG), +## ("Flags", ULONG), +## ("DebugFlags", ULONG), +## ("ConsoleHandle", PVOID), +## ("ConsoleFlags", ULONG), +## ("StandardInput", HANDLE), +## ("StandardOutput", HANDLE), +## ("StandardError", HANDLE), +## ("CurrentDirectory", CURDIR), +## ("DllPath", UNICODE_STRING), +## ("ImagePathName", UNICODE_STRING), +## ("CommandLine", UNICODE_STRING), +## ("Environment", PVOID), +## ("StartingX", ULONG), +## ("StartingY", ULONG), +## ("CountX", ULONG), +## ("CountY", ULONG), +## ("CountCharsX", ULONG), +## ("CountCharsY", ULONG), +## ("FillAttribute", ULONG), +## ("WindowFlags", ULONG), +## ("ShowWindowFlags", ULONG), +## ("WindowTitle", UNICODE_STRING), +## ("DesktopInfo", UNICODE_STRING), +## ("ShellInfo", UNICODE_STRING), +## ("RuntimeData", UNICODE_STRING), +## ("CurrentDirectores", RTL_DRIVE_LETTER_CURDIR * 32), # typo here? +## +## # Windows 2008 and Vista +## ("EnvironmentSize", ULONG), +##] +## @property +## def CurrentDirectories(self): +## return self.CurrentDirectores + +# From MSDN: +# +# typedef struct _RTL_USER_PROCESS_PARAMETERS { +# BYTE Reserved1[16]; +# PVOID Reserved2[10]; +# UNICODE_STRING ImagePathName; +# UNICODE_STRING CommandLine; +# } RTL_USER_PROCESS_PARAMETERS, +# *PRTL_USER_PROCESS_PARAMETERS; +class RTL_USER_PROCESS_PARAMETERS(Structure): + _fields_ = [ + ("Reserved1", BYTE * 16), + ("Reserved2", PVOID * 10), + ("ImagePathName", UNICODE_STRING), + ("CommandLine", UNICODE_STRING), + ("Environment", PVOID), # undocumented! + # + # XXX TODO + # This structure should be defined with all undocumented fields for + # each version of Windows, just like it's being done for PEB and TEB. + # +] + +PPS_POST_PROCESS_INIT_ROUTINE = PVOID + +#from MSDN: +# +# typedef struct _PEB { +# BYTE Reserved1[2]; +# BYTE BeingDebugged; +# BYTE Reserved2[21]; +# PPEB_LDR_DATA LoaderData; +# PRTL_USER_PROCESS_PARAMETERS ProcessParameters; +# BYTE Reserved3[520]; +# PPS_POST_PROCESS_INIT_ROUTINE PostProcessInitRoutine; +# BYTE Reserved4[136]; +# ULONG SessionId; +# } PEB; +##class PEB(Structure): +## _fields_ = [ +## ("Reserved1", BYTE * 2), +## ("BeingDebugged", BYTE), +## ("Reserved2", BYTE * 21), +## ("LoaderData", PVOID, # PPEB_LDR_DATA +## ("ProcessParameters", PVOID, # PRTL_USER_PROCESS_PARAMETERS +## ("Reserved3", BYTE * 520), +## ("PostProcessInitRoutine", PPS_POST_PROCESS_INIT_ROUTINE), +## ("Reserved4", BYTE), +## ("SessionId", ULONG), +##] + +# from MSDN: +# +# typedef struct _TEB { +# BYTE Reserved1[1952]; +# PVOID Reserved2[412]; +# PVOID TlsSlots[64]; +# BYTE Reserved3[8]; +# PVOID Reserved4[26]; +# PVOID ReservedForOle; +# PVOID Reserved5[4]; +# PVOID TlsExpansionSlots; +# } TEB, +# *PTEB; +##class TEB(Structure): +## _fields_ = [ +## ("Reserved1", PVOID * 1952), +## ("Reserved2", PVOID * 412), +## ("TlsSlots", PVOID * 64), +## ("Reserved3", BYTE * 8), +## ("Reserved4", PVOID * 26), +## ("ReservedForOle", PVOID), +## ("Reserved5", PVOID * 4), +## ("TlsExpansionSlots", PVOID), +##] + +# from http://undocumented.ntinternals.net/UserMode/Structures/LDR_MODULE.html +# +# typedef struct _LDR_MODULE { +# LIST_ENTRY InLoadOrderModuleList; +# LIST_ENTRY InMemoryOrderModuleList; +# LIST_ENTRY InInitializationOrderModuleList; +# PVOID BaseAddress; +# PVOID EntryPoint; +# ULONG SizeOfImage; +# UNICODE_STRING FullDllName; +# UNICODE_STRING BaseDllName; +# ULONG Flags; +# SHORT LoadCount; +# SHORT TlsIndex; +# LIST_ENTRY HashTableEntry; +# ULONG TimeDateStamp; +# } LDR_MODULE, *PLDR_MODULE; +class LDR_MODULE(Structure): + _fields_ = [ + ("InLoadOrderModuleList", LIST_ENTRY), + ("InMemoryOrderModuleList", LIST_ENTRY), + ("InInitializationOrderModuleList", LIST_ENTRY), + ("BaseAddress", PVOID), + ("EntryPoint", PVOID), + ("SizeOfImage", ULONG), + ("FullDllName", UNICODE_STRING), + ("BaseDllName", UNICODE_STRING), + ("Flags", ULONG), + ("LoadCount", SHORT), + ("TlsIndex", SHORT), + ("HashTableEntry", LIST_ENTRY), + ("TimeDateStamp", ULONG), +] + +# from http://undocumented.ntinternals.net/UserMode/Structures/PEB_LDR_DATA.html +# +# typedef struct _PEB_LDR_DATA { +# ULONG Length; +# BOOLEAN Initialized; +# PVOID SsHandle; +# LIST_ENTRY InLoadOrderModuleList; +# LIST_ENTRY InMemoryOrderModuleList; +# LIST_ENTRY InInitializationOrderModuleList; +# } PEB_LDR_DATA, *PPEB_LDR_DATA; +class PEB_LDR_DATA(Structure): + _fields_ = [ + ("Length", ULONG), + ("Initialized", BOOLEAN), + ("SsHandle", PVOID), + ("InLoadOrderModuleList", LIST_ENTRY), + ("InMemoryOrderModuleList", LIST_ENTRY), + ("InInitializationOrderModuleList", LIST_ENTRY), +] + +# From http://undocumented.ntinternals.net/UserMode/Undocumented%20Functions/NT%20Objects/Process/PEB_FREE_BLOCK.html +# +# typedef struct _PEB_FREE_BLOCK { +# PEB_FREE_BLOCK *Next; +# ULONG Size; +# } PEB_FREE_BLOCK, *PPEB_FREE_BLOCK; +class PEB_FREE_BLOCK(Structure): + pass + +##PPEB_FREE_BLOCK = POINTER(PEB_FREE_BLOCK) +PPEB_FREE_BLOCK = PVOID + +PEB_FREE_BLOCK._fields_ = [ + ("Next", PPEB_FREE_BLOCK), + ("Size", ULONG), +] + +# From http://undocumented.ntinternals.net/UserMode/Structures/RTL_DRIVE_LETTER_CURDIR.html +# +# typedef struct _RTL_DRIVE_LETTER_CURDIR { +# USHORT Flags; +# USHORT Length; +# ULONG TimeStamp; +# UNICODE_STRING DosPath; +# } RTL_DRIVE_LETTER_CURDIR, *PRTL_DRIVE_LETTER_CURDIR; +class RTL_DRIVE_LETTER_CURDIR(Structure): + _fields_ = [ + ("Flags", USHORT), + ("Length", USHORT), + ("TimeStamp", ULONG), + ("DosPath", UNICODE_STRING), +] + +# From http://www.nirsoft.net/kernel_struct/vista/CURDIR.html +# +# typedef struct _CURDIR +# { +# UNICODE_STRING DosPath; +# PVOID Handle; +# } CURDIR, *PCURDIR; +class CURDIR(Structure): + _fields_ = [ + ("DosPath", UNICODE_STRING), + ("Handle", PVOID), +] + +# From http://www.nirsoft.net/kernel_struct/vista/RTL_CRITICAL_SECTION_DEBUG.html +# +# typedef struct _RTL_CRITICAL_SECTION_DEBUG +# { +# WORD Type; +# WORD CreatorBackTraceIndex; +# PRTL_CRITICAL_SECTION CriticalSection; +# LIST_ENTRY ProcessLocksList; +# ULONG EntryCount; +# ULONG ContentionCount; +# ULONG Flags; +# WORD CreatorBackTraceIndexHigh; +# WORD SpareUSHORT; +# } RTL_CRITICAL_SECTION_DEBUG, *PRTL_CRITICAL_SECTION_DEBUG; +# +# From http://www.nirsoft.net/kernel_struct/vista/RTL_CRITICAL_SECTION.html +# +# typedef struct _RTL_CRITICAL_SECTION +# { +# PRTL_CRITICAL_SECTION_DEBUG DebugInfo; +# LONG LockCount; +# LONG RecursionCount; +# PVOID OwningThread; +# PVOID LockSemaphore; +# ULONG SpinCount; +# } RTL_CRITICAL_SECTION, *PRTL_CRITICAL_SECTION; +# +class RTL_CRITICAL_SECTION(Structure): + _fields_ = [ + ("DebugInfo", PVOID), # PRTL_CRITICAL_SECTION_DEBUG + ("LockCount", LONG), + ("RecursionCount", LONG), + ("OwningThread", PVOID), + ("LockSemaphore", PVOID), + ("SpinCount", ULONG), +] +class RTL_CRITICAL_SECTION_DEBUG(Structure): + _fields_ = [ + ("Type", WORD), + ("CreatorBackTraceIndex", WORD), + ("CriticalSection", PVOID), # PRTL_CRITICAL_SECTION + ("ProcessLocksList", LIST_ENTRY), + ("EntryCount", ULONG), + ("ContentionCount", ULONG), + ("Flags", ULONG), + ("CreatorBackTraceIndexHigh", WORD), + ("SpareUSHORT", WORD), +] +PRTL_CRITICAL_SECTION = POINTER(RTL_CRITICAL_SECTION) +PRTL_CRITICAL_SECTION_DEBUG = POINTER(RTL_CRITICAL_SECTION_DEBUG) + +PPEB_LDR_DATA = POINTER(PEB_LDR_DATA) +PRTL_USER_PROCESS_PARAMETERS = POINTER(RTL_USER_PROCESS_PARAMETERS) + +PPEBLOCKROUTINE = PVOID + +# BitField +ImageUsesLargePages = 1 << 0 +IsProtectedProcess = 1 << 1 +IsLegacyProcess = 1 << 2 +IsImageDynamicallyRelocated = 1 << 3 +SkipPatchingUser32Forwarders = 1 << 4 + +# CrossProcessFlags +ProcessInJob = 1 << 0 +ProcessInitializing = 1 << 1 +ProcessUsingVEH = 1 << 2 +ProcessUsingVCH = 1 << 3 +ProcessUsingFTH = 1 << 4 + +# TracingFlags +HeapTracingEnabled = 1 << 0 +CritSecTracingEnabled = 1 << 1 + +# NtGlobalFlags +FLG_VALID_BITS = 0x003FFFFF # not a flag +FLG_STOP_ON_EXCEPTION = 0x00000001 +FLG_SHOW_LDR_SNAPS = 0x00000002 +FLG_DEBUG_INITIAL_COMMAND = 0x00000004 +FLG_STOP_ON_HUNG_GUI = 0x00000008 +FLG_HEAP_ENABLE_TAIL_CHECK = 0x00000010 +FLG_HEAP_ENABLE_FREE_CHECK = 0x00000020 +FLG_HEAP_VALIDATE_PARAMETERS = 0x00000040 +FLG_HEAP_VALIDATE_ALL = 0x00000080 +FLG_POOL_ENABLE_TAIL_CHECK = 0x00000100 +FLG_POOL_ENABLE_FREE_CHECK = 0x00000200 +FLG_POOL_ENABLE_TAGGING = 0x00000400 +FLG_HEAP_ENABLE_TAGGING = 0x00000800 +FLG_USER_STACK_TRACE_DB = 0x00001000 +FLG_KERNEL_STACK_TRACE_DB = 0x00002000 +FLG_MAINTAIN_OBJECT_TYPELIST = 0x00004000 +FLG_HEAP_ENABLE_TAG_BY_DLL = 0x00008000 +FLG_IGNORE_DEBUG_PRIV = 0x00010000 +FLG_ENABLE_CSRDEBUG = 0x00020000 +FLG_ENABLE_KDEBUG_SYMBOL_LOAD = 0x00040000 +FLG_DISABLE_PAGE_KERNEL_STACKS = 0x00080000 +FLG_HEAP_ENABLE_CALL_TRACING = 0x00100000 +FLG_HEAP_DISABLE_COALESCING = 0x00200000 +FLG_ENABLE_CLOSE_EXCEPTION = 0x00400000 +FLG_ENABLE_EXCEPTION_LOGGING = 0x00800000 +FLG_ENABLE_HANDLE_TYPE_TAGGING = 0x01000000 +FLG_HEAP_PAGE_ALLOCS = 0x02000000 +FLG_DEBUG_WINLOGON = 0x04000000 +FLG_ENABLE_DBGPRINT_BUFFERING = 0x08000000 +FLG_EARLY_CRITICAL_SECTION_EVT = 0x10000000 +FLG_DISABLE_DLL_VERIFICATION = 0x80000000 + +class _PEB_NT(Structure): + _pack_ = 4 + _fields_ = [ + ("InheritedAddressSpace", BOOLEAN), + ("ReadImageFileExecOptions", UCHAR), + ("BeingDebugged", BOOLEAN), + ("BitField", UCHAR), + ("Mutant", HANDLE), + ("ImageBaseAddress", PVOID), + ("Ldr", PVOID), # PPEB_LDR_DATA + ("ProcessParameters", PVOID), # PRTL_USER_PROCESS_PARAMETERS + ("SubSystemData", PVOID), + ("ProcessHeap", PVOID), + ("FastPebLock", PVOID), + ("FastPebLockRoutine", PVOID), # PPEBLOCKROUTINE + ("FastPebUnlockRoutine", PVOID), # PPEBLOCKROUTINE + ("EnvironmentUpdateCount", ULONG), + ("KernelCallbackTable", PVOID), # Ptr32 Ptr32 Void + ("EventLogSection", PVOID), + ("EventLog", PVOID), + ("FreeList", PVOID), # PPEB_FREE_BLOCK + ("TlsExpansionCounter", ULONG), + ("TlsBitmap", PVOID), + ("TlsBitmapBits", ULONG * 2), + ("ReadOnlySharedMemoryBase", PVOID), + ("ReadOnlySharedMemoryHeap", PVOID), + ("ReadOnlyStaticServerData", PVOID), # Ptr32 Ptr32 Void + ("AnsiCodePageData", PVOID), + ("OemCodePageData", PVOID), + ("UnicodeCaseTableData", PVOID), + ("NumberOfProcessors", ULONG), + ("NtGlobalFlag", ULONG), + ("Spare2", BYTE * 4), + ("CriticalSectionTimeout", LONGLONG), # LARGE_INTEGER + ("HeapSegmentReserve", ULONG), + ("HeapSegmentCommit", ULONG), + ("HeapDeCommitTotalFreeThreshold", ULONG), + ("HeapDeCommitFreeBlockThreshold", ULONG), + ("NumberOfHeaps", ULONG), + ("MaximumNumberOfHeaps", ULONG), + ("ProcessHeaps", PVOID), # Ptr32 Ptr32 Void + ("GdiSharedHandleTable", PVOID), + ("ProcessStarterHelper", PVOID), + ("GdiDCAttributeList", PVOID), + ("LoaderLock", PVOID), # PRTL_CRITICAL_SECTION + ("OSMajorVersion", ULONG), + ("OSMinorVersion", ULONG), + ("OSBuildNumber", ULONG), + ("OSPlatformId", ULONG), + ("ImageSubSystem", ULONG), + ("ImageSubSystemMajorVersion", ULONG), + ("ImageSubSystemMinorVersion", ULONG), + ("ImageProcessAffinityMask", ULONG), + ("GdiHandleBuffer", ULONG * 34), + ("PostProcessInitRoutine", PPS_POST_PROCESS_INIT_ROUTINE), + ("TlsExpansionBitmap", ULONG), + ("TlsExpansionBitmapBits", BYTE * 128), + ("SessionId", ULONG), + ] + +# not really, but "dt _PEB" in w2k isn't working for me :( +_PEB_2000 = _PEB_NT + +# +0x000 InheritedAddressSpace : UChar +# +0x001 ReadImageFileExecOptions : UChar +# +0x002 BeingDebugged : UChar +# +0x003 SpareBool : UChar +# +0x004 Mutant : Ptr32 Void +# +0x008 ImageBaseAddress : Ptr32 Void +# +0x00c Ldr : Ptr32 _PEB_LDR_DATA +# +0x010 ProcessParameters : Ptr32 _RTL_USER_PROCESS_PARAMETERS +# +0x014 SubSystemData : Ptr32 Void +# +0x018 ProcessHeap : Ptr32 Void +# +0x01c FastPebLock : Ptr32 _RTL_CRITICAL_SECTION +# +0x020 FastPebLockRoutine : Ptr32 Void +# +0x024 FastPebUnlockRoutine : Ptr32 Void +# +0x028 EnvironmentUpdateCount : Uint4B +# +0x02c KernelCallbackTable : Ptr32 Void +# +0x030 SystemReserved : [1] Uint4B +# +0x034 AtlThunkSListPtr32 : Uint4B +# +0x038 FreeList : Ptr32 _PEB_FREE_BLOCK +# +0x03c TlsExpansionCounter : Uint4B +# +0x040 TlsBitmap : Ptr32 Void +# +0x044 TlsBitmapBits : [2] Uint4B +# +0x04c ReadOnlySharedMemoryBase : Ptr32 Void +# +0x050 ReadOnlySharedMemoryHeap : Ptr32 Void +# +0x054 ReadOnlyStaticServerData : Ptr32 Ptr32 Void +# +0x058 AnsiCodePageData : Ptr32 Void +# +0x05c OemCodePageData : Ptr32 Void +# +0x060 UnicodeCaseTableData : Ptr32 Void +# +0x064 NumberOfProcessors : Uint4B +# +0x068 NtGlobalFlag : Uint4B +# +0x070 CriticalSectionTimeout : _LARGE_INTEGER +# +0x078 HeapSegmentReserve : Uint4B +# +0x07c HeapSegmentCommit : Uint4B +# +0x080 HeapDeCommitTotalFreeThreshold : Uint4B +# +0x084 HeapDeCommitFreeBlockThreshold : Uint4B +# +0x088 NumberOfHeaps : Uint4B +# +0x08c MaximumNumberOfHeaps : Uint4B +# +0x090 ProcessHeaps : Ptr32 Ptr32 Void +# +0x094 GdiSharedHandleTable : Ptr32 Void +# +0x098 ProcessStarterHelper : Ptr32 Void +# +0x09c GdiDCAttributeList : Uint4B +# +0x0a0 LoaderLock : Ptr32 Void +# +0x0a4 OSMajorVersion : Uint4B +# +0x0a8 OSMinorVersion : Uint4B +# +0x0ac OSBuildNumber : Uint2B +# +0x0ae OSCSDVersion : Uint2B +# +0x0b0 OSPlatformId : Uint4B +# +0x0b4 ImageSubsystem : Uint4B +# +0x0b8 ImageSubsystemMajorVersion : Uint4B +# +0x0bc ImageSubsystemMinorVersion : Uint4B +# +0x0c0 ImageProcessAffinityMask : Uint4B +# +0x0c4 GdiHandleBuffer : [34] Uint4B +# +0x14c PostProcessInitRoutine : Ptr32 void +# +0x150 TlsExpansionBitmap : Ptr32 Void +# +0x154 TlsExpansionBitmapBits : [32] Uint4B +# +0x1d4 SessionId : Uint4B +# +0x1d8 AppCompatFlags : _ULARGE_INTEGER +# +0x1e0 AppCompatFlagsUser : _ULARGE_INTEGER +# +0x1e8 pShimData : Ptr32 Void +# +0x1ec AppCompatInfo : Ptr32 Void +# +0x1f0 CSDVersion : _UNICODE_STRING +# +0x1f8 ActivationContextData : Ptr32 Void +# +0x1fc ProcessAssemblyStorageMap : Ptr32 Void +# +0x200 SystemDefaultActivationContextData : Ptr32 Void +# +0x204 SystemAssemblyStorageMap : Ptr32 Void +# +0x208 MinimumStackCommit : Uint4B +class _PEB_XP(Structure): + _pack_ = 8 + _fields_ = [ + ("InheritedAddressSpace", BOOLEAN), + ("ReadImageFileExecOptions", UCHAR), + ("BeingDebugged", BOOLEAN), + ("SpareBool", UCHAR), + ("Mutant", HANDLE), + ("ImageBaseAddress", PVOID), + ("Ldr", PVOID), # PPEB_LDR_DATA + ("ProcessParameters", PVOID), # PRTL_USER_PROCESS_PARAMETERS + ("SubSystemData", PVOID), + ("ProcessHeap", PVOID), + ("FastPebLock", PVOID), + ("FastPebLockRoutine", PVOID), + ("FastPebUnlockRoutine", PVOID), + ("EnvironmentUpdateCount", DWORD), + ("KernelCallbackTable", PVOID), + ("SystemReserved", DWORD), + ("AtlThunkSListPtr32", DWORD), + ("FreeList", PVOID), # PPEB_FREE_BLOCK + ("TlsExpansionCounter", DWORD), + ("TlsBitmap", PVOID), + ("TlsBitmapBits", DWORD * 2), + ("ReadOnlySharedMemoryBase", PVOID), + ("ReadOnlySharedMemoryHeap", PVOID), + ("ReadOnlyStaticServerData", PVOID), # Ptr32 Ptr32 Void + ("AnsiCodePageData", PVOID), + ("OemCodePageData", PVOID), + ("UnicodeCaseTableData", PVOID), + ("NumberOfProcessors", DWORD), + ("NtGlobalFlag", DWORD), + ("CriticalSectionTimeout", LONGLONG), # LARGE_INTEGER + ("HeapSegmentReserve", DWORD), + ("HeapSegmentCommit", DWORD), + ("HeapDeCommitTotalFreeThreshold", DWORD), + ("HeapDeCommitFreeBlockThreshold", DWORD), + ("NumberOfHeaps", DWORD), + ("MaximumNumberOfHeaps", DWORD), + ("ProcessHeaps", PVOID), # Ptr32 Ptr32 Void + ("GdiSharedHandleTable", PVOID), + ("ProcessStarterHelper", PVOID), + ("GdiDCAttributeList", DWORD), + ("LoaderLock", PVOID), # PRTL_CRITICAL_SECTION + ("OSMajorVersion", DWORD), + ("OSMinorVersion", DWORD), + ("OSBuildNumber", WORD), + ("OSCSDVersion", WORD), + ("OSPlatformId", DWORD), + ("ImageSubsystem", DWORD), + ("ImageSubsystemMajorVersion", DWORD), + ("ImageSubsystemMinorVersion", DWORD), + ("ImageProcessAffinityMask", DWORD), + ("GdiHandleBuffer", DWORD * 34), + ("PostProcessInitRoutine", PPS_POST_PROCESS_INIT_ROUTINE), + ("TlsExpansionBitmap", PVOID), + ("TlsExpansionBitmapBits", DWORD * 32), + ("SessionId", DWORD), + ("AppCompatFlags", ULONGLONG), # ULARGE_INTEGER + ("AppCompatFlagsUser", ULONGLONG), # ULARGE_INTEGER + ("pShimData", PVOID), + ("AppCompatInfo", PVOID), + ("CSDVersion", UNICODE_STRING), + ("ActivationContextData", PVOID), # ACTIVATION_CONTEXT_DATA + ("ProcessAssemblyStorageMap", PVOID), # ASSEMBLY_STORAGE_MAP + ("SystemDefaultActivationContextData", PVOID), # ACTIVATION_CONTEXT_DATA + ("SystemAssemblyStorageMap", PVOID), # ASSEMBLY_STORAGE_MAP + ("MinimumStackCommit", DWORD), + ] + +# +0x000 InheritedAddressSpace : UChar +# +0x001 ReadImageFileExecOptions : UChar +# +0x002 BeingDebugged : UChar +# +0x003 BitField : UChar +# +0x003 ImageUsesLargePages : Pos 0, 1 Bit +# +0x003 SpareBits : Pos 1, 7 Bits +# +0x008 Mutant : Ptr64 Void +# +0x010 ImageBaseAddress : Ptr64 Void +# +0x018 Ldr : Ptr64 _PEB_LDR_DATA +# +0x020 ProcessParameters : Ptr64 _RTL_USER_PROCESS_PARAMETERS +# +0x028 SubSystemData : Ptr64 Void +# +0x030 ProcessHeap : Ptr64 Void +# +0x038 FastPebLock : Ptr64 _RTL_CRITICAL_SECTION +# +0x040 AtlThunkSListPtr : Ptr64 Void +# +0x048 SparePtr2 : Ptr64 Void +# +0x050 EnvironmentUpdateCount : Uint4B +# +0x058 KernelCallbackTable : Ptr64 Void +# +0x060 SystemReserved : [1] Uint4B +# +0x064 SpareUlong : Uint4B +# +0x068 FreeList : Ptr64 _PEB_FREE_BLOCK +# +0x070 TlsExpansionCounter : Uint4B +# +0x078 TlsBitmap : Ptr64 Void +# +0x080 TlsBitmapBits : [2] Uint4B +# +0x088 ReadOnlySharedMemoryBase : Ptr64 Void +# +0x090 ReadOnlySharedMemoryHeap : Ptr64 Void +# +0x098 ReadOnlyStaticServerData : Ptr64 Ptr64 Void +# +0x0a0 AnsiCodePageData : Ptr64 Void +# +0x0a8 OemCodePageData : Ptr64 Void +# +0x0b0 UnicodeCaseTableData : Ptr64 Void +# +0x0b8 NumberOfProcessors : Uint4B +# +0x0bc NtGlobalFlag : Uint4B +# +0x0c0 CriticalSectionTimeout : _LARGE_INTEGER +# +0x0c8 HeapSegmentReserve : Uint8B +# +0x0d0 HeapSegmentCommit : Uint8B +# +0x0d8 HeapDeCommitTotalFreeThreshold : Uint8B +# +0x0e0 HeapDeCommitFreeBlockThreshold : Uint8B +# +0x0e8 NumberOfHeaps : Uint4B +# +0x0ec MaximumNumberOfHeaps : Uint4B +# +0x0f0 ProcessHeaps : Ptr64 Ptr64 Void +# +0x0f8 GdiSharedHandleTable : Ptr64 Void +# +0x100 ProcessStarterHelper : Ptr64 Void +# +0x108 GdiDCAttributeList : Uint4B +# +0x110 LoaderLock : Ptr64 _RTL_CRITICAL_SECTION +# +0x118 OSMajorVersion : Uint4B +# +0x11c OSMinorVersion : Uint4B +# +0x120 OSBuildNumber : Uint2B +# +0x122 OSCSDVersion : Uint2B +# +0x124 OSPlatformId : Uint4B +# +0x128 ImageSubsystem : Uint4B +# +0x12c ImageSubsystemMajorVersion : Uint4B +# +0x130 ImageSubsystemMinorVersion : Uint4B +# +0x138 ImageProcessAffinityMask : Uint8B +# +0x140 GdiHandleBuffer : [60] Uint4B +# +0x230 PostProcessInitRoutine : Ptr64 void +# +0x238 TlsExpansionBitmap : Ptr64 Void +# +0x240 TlsExpansionBitmapBits : [32] Uint4B +# +0x2c0 SessionId : Uint4B +# +0x2c8 AppCompatFlags : _ULARGE_INTEGER +# +0x2d0 AppCompatFlagsUser : _ULARGE_INTEGER +# +0x2d8 pShimData : Ptr64 Void +# +0x2e0 AppCompatInfo : Ptr64 Void +# +0x2e8 CSDVersion : _UNICODE_STRING +# +0x2f8 ActivationContextData : Ptr64 _ACTIVATION_CONTEXT_DATA +# +0x300 ProcessAssemblyStorageMap : Ptr64 _ASSEMBLY_STORAGE_MAP +# +0x308 SystemDefaultActivationContextData : Ptr64 _ACTIVATION_CONTEXT_DATA +# +0x310 SystemAssemblyStorageMap : Ptr64 _ASSEMBLY_STORAGE_MAP +# +0x318 MinimumStackCommit : Uint8B +# +0x320 FlsCallback : Ptr64 Ptr64 Void +# +0x328 FlsListHead : _LIST_ENTRY +# +0x338 FlsBitmap : Ptr64 Void +# +0x340 FlsBitmapBits : [4] Uint4B +# +0x350 FlsHighIndex : Uint4B +class _PEB_XP_64(Structure): + _pack_ = 8 + _fields_ = [ + ("InheritedAddressSpace", BOOLEAN), + ("ReadImageFileExecOptions", UCHAR), + ("BeingDebugged", BOOLEAN), + ("BitField", UCHAR), + ("Mutant", HANDLE), + ("ImageBaseAddress", PVOID), + ("Ldr", PVOID), # PPEB_LDR_DATA + ("ProcessParameters", PVOID), # PRTL_USER_PROCESS_PARAMETERS + ("SubSystemData", PVOID), + ("ProcessHeap", PVOID), + ("FastPebLock", PVOID), # PRTL_CRITICAL_SECTION + ("AtlThunkSListPtr", PVOID), + ("SparePtr2", PVOID), + ("EnvironmentUpdateCount", DWORD), + ("KernelCallbackTable", PVOID), + ("SystemReserved", DWORD), + ("SpareUlong", DWORD), + ("FreeList", PVOID), # PPEB_FREE_BLOCK + ("TlsExpansionCounter", DWORD), + ("TlsBitmap", PVOID), + ("TlsBitmapBits", DWORD * 2), + ("ReadOnlySharedMemoryBase", PVOID), + ("ReadOnlySharedMemoryHeap", PVOID), + ("ReadOnlyStaticServerData", PVOID), # Ptr64 Ptr64 Void + ("AnsiCodePageData", PVOID), + ("OemCodePageData", PVOID), + ("UnicodeCaseTableData", PVOID), + ("NumberOfProcessors", DWORD), + ("NtGlobalFlag", DWORD), + ("CriticalSectionTimeout", LONGLONG), # LARGE_INTEGER + ("HeapSegmentReserve", QWORD), + ("HeapSegmentCommit", QWORD), + ("HeapDeCommitTotalFreeThreshold", QWORD), + ("HeapDeCommitFreeBlockThreshold", QWORD), + ("NumberOfHeaps", DWORD), + ("MaximumNumberOfHeaps", DWORD), + ("ProcessHeaps", PVOID), # Ptr64 Ptr64 Void + ("GdiSharedHandleTable", PVOID), + ("ProcessStarterHelper", PVOID), + ("GdiDCAttributeList", DWORD), + ("LoaderLock", PVOID), # PRTL_CRITICAL_SECTION + ("OSMajorVersion", DWORD), + ("OSMinorVersion", DWORD), + ("OSBuildNumber", WORD), + ("OSCSDVersion", WORD), + ("OSPlatformId", DWORD), + ("ImageSubsystem", DWORD), + ("ImageSubsystemMajorVersion", DWORD), + ("ImageSubsystemMinorVersion", DWORD), + ("ImageProcessAffinityMask", QWORD), + ("GdiHandleBuffer", DWORD * 60), + ("PostProcessInitRoutine", PPS_POST_PROCESS_INIT_ROUTINE), + ("TlsExpansionBitmap", PVOID), + ("TlsExpansionBitmapBits", DWORD * 32), + ("SessionId", DWORD), + ("AppCompatFlags", ULONGLONG), # ULARGE_INTEGER + ("AppCompatFlagsUser", ULONGLONG), # ULARGE_INTEGER + ("pShimData", PVOID), + ("AppCompatInfo", PVOID), + ("CSDVersion", UNICODE_STRING), + ("ActivationContextData", PVOID), # ACTIVATION_CONTEXT_DATA + ("ProcessAssemblyStorageMap", PVOID), # ASSEMBLY_STORAGE_MAP + ("SystemDefaultActivationContextData", PVOID), # ACTIVATION_CONTEXT_DATA + ("SystemAssemblyStorageMap", PVOID), # ASSEMBLY_STORAGE_MAP + ("MinimumStackCommit", QWORD), + ("FlsCallback", PVOID), # Ptr64 Ptr64 Void + ("FlsListHead", LIST_ENTRY), + ("FlsBitmap", PVOID), + ("FlsBitmapBits", DWORD * 4), + ("FlsHighIndex", DWORD), + ] + +# +0x000 InheritedAddressSpace : UChar +# +0x001 ReadImageFileExecOptions : UChar +# +0x002 BeingDebugged : UChar +# +0x003 BitField : UChar +# +0x003 ImageUsesLargePages : Pos 0, 1 Bit +# +0x003 SpareBits : Pos 1, 7 Bits +# +0x004 Mutant : Ptr32 Void +# +0x008 ImageBaseAddress : Ptr32 Void +# +0x00c Ldr : Ptr32 _PEB_LDR_DATA +# +0x010 ProcessParameters : Ptr32 _RTL_USER_PROCESS_PARAMETERS +# +0x014 SubSystemData : Ptr32 Void +# +0x018 ProcessHeap : Ptr32 Void +# +0x01c FastPebLock : Ptr32 _RTL_CRITICAL_SECTION +# +0x020 AtlThunkSListPtr : Ptr32 Void +# +0x024 SparePtr2 : Ptr32 Void +# +0x028 EnvironmentUpdateCount : Uint4B +# +0x02c KernelCallbackTable : Ptr32 Void +# +0x030 SystemReserved : [1] Uint4B +# +0x034 SpareUlong : Uint4B +# +0x038 FreeList : Ptr32 _PEB_FREE_BLOCK +# +0x03c TlsExpansionCounter : Uint4B +# +0x040 TlsBitmap : Ptr32 Void +# +0x044 TlsBitmapBits : [2] Uint4B +# +0x04c ReadOnlySharedMemoryBase : Ptr32 Void +# +0x050 ReadOnlySharedMemoryHeap : Ptr32 Void +# +0x054 ReadOnlyStaticServerData : Ptr32 Ptr32 Void +# +0x058 AnsiCodePageData : Ptr32 Void +# +0x05c OemCodePageData : Ptr32 Void +# +0x060 UnicodeCaseTableData : Ptr32 Void +# +0x064 NumberOfProcessors : Uint4B +# +0x068 NtGlobalFlag : Uint4B +# +0x070 CriticalSectionTimeout : _LARGE_INTEGER +# +0x078 HeapSegmentReserve : Uint4B +# +0x07c HeapSegmentCommit : Uint4B +# +0x080 HeapDeCommitTotalFreeThreshold : Uint4B +# +0x084 HeapDeCommitFreeBlockThreshold : Uint4B +# +0x088 NumberOfHeaps : Uint4B +# +0x08c MaximumNumberOfHeaps : Uint4B +# +0x090 ProcessHeaps : Ptr32 Ptr32 Void +# +0x094 GdiSharedHandleTable : Ptr32 Void +# +0x098 ProcessStarterHelper : Ptr32 Void +# +0x09c GdiDCAttributeList : Uint4B +# +0x0a0 LoaderLock : Ptr32 _RTL_CRITICAL_SECTION +# +0x0a4 OSMajorVersion : Uint4B +# +0x0a8 OSMinorVersion : Uint4B +# +0x0ac OSBuildNumber : Uint2B +# +0x0ae OSCSDVersion : Uint2B +# +0x0b0 OSPlatformId : Uint4B +# +0x0b4 ImageSubsystem : Uint4B +# +0x0b8 ImageSubsystemMajorVersion : Uint4B +# +0x0bc ImageSubsystemMinorVersion : Uint4B +# +0x0c0 ImageProcessAffinityMask : Uint4B +# +0x0c4 GdiHandleBuffer : [34] Uint4B +# +0x14c PostProcessInitRoutine : Ptr32 void +# +0x150 TlsExpansionBitmap : Ptr32 Void +# +0x154 TlsExpansionBitmapBits : [32] Uint4B +# +0x1d4 SessionId : Uint4B +# +0x1d8 AppCompatFlags : _ULARGE_INTEGER +# +0x1e0 AppCompatFlagsUser : _ULARGE_INTEGER +# +0x1e8 pShimData : Ptr32 Void +# +0x1ec AppCompatInfo : Ptr32 Void +# +0x1f0 CSDVersion : _UNICODE_STRING +# +0x1f8 ActivationContextData : Ptr32 _ACTIVATION_CONTEXT_DATA +# +0x1fc ProcessAssemblyStorageMap : Ptr32 _ASSEMBLY_STORAGE_MAP +# +0x200 SystemDefaultActivationContextData : Ptr32 _ACTIVATION_CONTEXT_DATA +# +0x204 SystemAssemblyStorageMap : Ptr32 _ASSEMBLY_STORAGE_MAP +# +0x208 MinimumStackCommit : Uint4B +# +0x20c FlsCallback : Ptr32 Ptr32 Void +# +0x210 FlsListHead : _LIST_ENTRY +# +0x218 FlsBitmap : Ptr32 Void +# +0x21c FlsBitmapBits : [4] Uint4B +# +0x22c FlsHighIndex : Uint4B +class _PEB_2003(Structure): + _pack_ = 8 + _fields_ = [ + ("InheritedAddressSpace", BOOLEAN), + ("ReadImageFileExecOptions", UCHAR), + ("BeingDebugged", BOOLEAN), + ("BitField", UCHAR), + ("Mutant", HANDLE), + ("ImageBaseAddress", PVOID), + ("Ldr", PVOID), # PPEB_LDR_DATA + ("ProcessParameters", PVOID), # PRTL_USER_PROCESS_PARAMETERS + ("SubSystemData", PVOID), + ("ProcessHeap", PVOID), + ("FastPebLock", PVOID), # PRTL_CRITICAL_SECTION + ("AtlThunkSListPtr", PVOID), + ("SparePtr2", PVOID), + ("EnvironmentUpdateCount", DWORD), + ("KernelCallbackTable", PVOID), + ("SystemReserved", DWORD), + ("SpareUlong", DWORD), + ("FreeList", PVOID), # PPEB_FREE_BLOCK + ("TlsExpansionCounter", DWORD), + ("TlsBitmap", PVOID), + ("TlsBitmapBits", DWORD * 2), + ("ReadOnlySharedMemoryBase", PVOID), + ("ReadOnlySharedMemoryHeap", PVOID), + ("ReadOnlyStaticServerData", PVOID), # Ptr32 Ptr32 Void + ("AnsiCodePageData", PVOID), + ("OemCodePageData", PVOID), + ("UnicodeCaseTableData", PVOID), + ("NumberOfProcessors", DWORD), + ("NtGlobalFlag", DWORD), + ("CriticalSectionTimeout", LONGLONG), # LARGE_INTEGER + ("HeapSegmentReserve", DWORD), + ("HeapSegmentCommit", DWORD), + ("HeapDeCommitTotalFreeThreshold", DWORD), + ("HeapDeCommitFreeBlockThreshold", DWORD), + ("NumberOfHeaps", DWORD), + ("MaximumNumberOfHeaps", DWORD), + ("ProcessHeaps", PVOID), # Ptr32 Ptr32 Void + ("GdiSharedHandleTable", PVOID), + ("ProcessStarterHelper", PVOID), + ("GdiDCAttributeList", DWORD), + ("LoaderLock", PVOID), # PRTL_CRITICAL_SECTION + ("OSMajorVersion", DWORD), + ("OSMinorVersion", DWORD), + ("OSBuildNumber", WORD), + ("OSCSDVersion", WORD), + ("OSPlatformId", DWORD), + ("ImageSubsystem", DWORD), + ("ImageSubsystemMajorVersion", DWORD), + ("ImageSubsystemMinorVersion", DWORD), + ("ImageProcessAffinityMask", DWORD), + ("GdiHandleBuffer", DWORD * 34), + ("PostProcessInitRoutine", PPS_POST_PROCESS_INIT_ROUTINE), + ("TlsExpansionBitmap", PVOID), + ("TlsExpansionBitmapBits", DWORD * 32), + ("SessionId", DWORD), + ("AppCompatFlags", ULONGLONG), # ULARGE_INTEGER + ("AppCompatFlagsUser", ULONGLONG), # ULARGE_INTEGER + ("pShimData", PVOID), + ("AppCompatInfo", PVOID), + ("CSDVersion", UNICODE_STRING), + ("ActivationContextData", PVOID), # ACTIVATION_CONTEXT_DATA + ("ProcessAssemblyStorageMap", PVOID), # ASSEMBLY_STORAGE_MAP + ("SystemDefaultActivationContextData", PVOID), # ACTIVATION_CONTEXT_DATA + ("SystemAssemblyStorageMap", PVOID), # ASSEMBLY_STORAGE_MAP + ("MinimumStackCommit", QWORD), + ("FlsCallback", PVOID), # Ptr32 Ptr32 Void + ("FlsListHead", LIST_ENTRY), + ("FlsBitmap", PVOID), + ("FlsBitmapBits", DWORD * 4), + ("FlsHighIndex", DWORD), + ] + +_PEB_2003_64 = _PEB_XP_64 +_PEB_2003_R2 = _PEB_2003 +_PEB_2003_R2_64 = _PEB_2003_64 + +# +0x000 InheritedAddressSpace : UChar +# +0x001 ReadImageFileExecOptions : UChar +# +0x002 BeingDebugged : UChar +# +0x003 BitField : UChar +# +0x003 ImageUsesLargePages : Pos 0, 1 Bit +# +0x003 IsProtectedProcess : Pos 1, 1 Bit +# +0x003 IsLegacyProcess : Pos 2, 1 Bit +# +0x003 IsImageDynamicallyRelocated : Pos 3, 1 Bit +# +0x003 SkipPatchingUser32Forwarders : Pos 4, 1 Bit +# +0x003 SpareBits : Pos 5, 3 Bits +# +0x004 Mutant : Ptr32 Void +# +0x008 ImageBaseAddress : Ptr32 Void +# +0x00c Ldr : Ptr32 _PEB_LDR_DATA +# +0x010 ProcessParameters : Ptr32 _RTL_USER_PROCESS_PARAMETERS +# +0x014 SubSystemData : Ptr32 Void +# +0x018 ProcessHeap : Ptr32 Void +# +0x01c FastPebLock : Ptr32 _RTL_CRITICAL_SECTION +# +0x020 AtlThunkSListPtr : Ptr32 Void +# +0x024 IFEOKey : Ptr32 Void +# +0x028 CrossProcessFlags : Uint4B +# +0x028 ProcessInJob : Pos 0, 1 Bit +# +0x028 ProcessInitializing : Pos 1, 1 Bit +# +0x028 ProcessUsingVEH : Pos 2, 1 Bit +# +0x028 ProcessUsingVCH : Pos 3, 1 Bit +# +0x028 ReservedBits0 : Pos 4, 28 Bits +# +0x02c KernelCallbackTable : Ptr32 Void +# +0x02c UserSharedInfoPtr : Ptr32 Void +# +0x030 SystemReserved : [1] Uint4B +# +0x034 SpareUlong : Uint4B +# +0x038 SparePebPtr0 : Uint4B +# +0x03c TlsExpansionCounter : Uint4B +# +0x040 TlsBitmap : Ptr32 Void +# +0x044 TlsBitmapBits : [2] Uint4B +# +0x04c ReadOnlySharedMemoryBase : Ptr32 Void +# +0x050 HotpatchInformation : Ptr32 Void +# +0x054 ReadOnlyStaticServerData : Ptr32 Ptr32 Void +# +0x058 AnsiCodePageData : Ptr32 Void +# +0x05c OemCodePageData : Ptr32 Void +# +0x060 UnicodeCaseTableData : Ptr32 Void +# +0x064 NumberOfProcessors : Uint4B +# +0x068 NtGlobalFlag : Uint4B +# +0x070 CriticalSectionTimeout : _LARGE_INTEGER +# +0x078 HeapSegmentReserve : Uint4B +# +0x07c HeapSegmentCommit : Uint4B +# +0x080 HeapDeCommitTotalFreeThreshold : Uint4B +# +0x084 HeapDeCommitFreeBlockThreshold : Uint4B +# +0x088 NumberOfHeaps : Uint4B +# +0x08c MaximumNumberOfHeaps : Uint4B +# +0x090 ProcessHeaps : Ptr32 Ptr32 Void +# +0x094 GdiSharedHandleTable : Ptr32 Void +# +0x098 ProcessStarterHelper : Ptr32 Void +# +0x09c GdiDCAttributeList : Uint4B +# +0x0a0 LoaderLock : Ptr32 _RTL_CRITICAL_SECTION +# +0x0a4 OSMajorVersion : Uint4B +# +0x0a8 OSMinorVersion : Uint4B +# +0x0ac OSBuildNumber : Uint2B +# +0x0ae OSCSDVersion : Uint2B +# +0x0b0 OSPlatformId : Uint4B +# +0x0b4 ImageSubsystem : Uint4B +# +0x0b8 ImageSubsystemMajorVersion : Uint4B +# +0x0bc ImageSubsystemMinorVersion : Uint4B +# +0x0c0 ActiveProcessAffinityMask : Uint4B +# +0x0c4 GdiHandleBuffer : [34] Uint4B +# +0x14c PostProcessInitRoutine : Ptr32 void +# +0x150 TlsExpansionBitmap : Ptr32 Void +# +0x154 TlsExpansionBitmapBits : [32] Uint4B +# +0x1d4 SessionId : Uint4B +# +0x1d8 AppCompatFlags : _ULARGE_INTEGER +# +0x1e0 AppCompatFlagsUser : _ULARGE_INTEGER +# +0x1e8 pShimData : Ptr32 Void +# +0x1ec AppCompatInfo : Ptr32 Void +# +0x1f0 CSDVersion : _UNICODE_STRING +# +0x1f8 ActivationContextData : Ptr32 _ACTIVATION_CONTEXT_DATA +# +0x1fc ProcessAssemblyStorageMap : Ptr32 _ASSEMBLY_STORAGE_MAP +# +0x200 SystemDefaultActivationContextData : Ptr32 _ACTIVATION_CONTEXT_DATA +# +0x204 SystemAssemblyStorageMap : Ptr32 _ASSEMBLY_STORAGE_MAP +# +0x208 MinimumStackCommit : Uint4B +# +0x20c FlsCallback : Ptr32 _FLS_CALLBACK_INFO +# +0x210 FlsListHead : _LIST_ENTRY +# +0x218 FlsBitmap : Ptr32 Void +# +0x21c FlsBitmapBits : [4] Uint4B +# +0x22c FlsHighIndex : Uint4B +# +0x230 WerRegistrationData : Ptr32 Void +# +0x234 WerShipAssertPtr : Ptr32 Void +class _PEB_2008(Structure): + _pack_ = 8 + _fields_ = [ + ("InheritedAddressSpace", BOOLEAN), + ("ReadImageFileExecOptions", UCHAR), + ("BeingDebugged", BOOLEAN), + ("BitField", UCHAR), + ("Mutant", HANDLE), + ("ImageBaseAddress", PVOID), + ("Ldr", PVOID), # PPEB_LDR_DATA + ("ProcessParameters", PVOID), # PRTL_USER_PROCESS_PARAMETERS + ("SubSystemData", PVOID), + ("ProcessHeap", PVOID), + ("FastPebLock", PVOID), # PRTL_CRITICAL_SECTION + ("AtlThunkSListPtr", PVOID), + ("IFEOKey", PVOID), + ("CrossProcessFlags", DWORD), + ("KernelCallbackTable", PVOID), + ("SystemReserved", DWORD), + ("SpareUlong", DWORD), + ("SparePebPtr0", PVOID), + ("TlsExpansionCounter", DWORD), + ("TlsBitmap", PVOID), + ("TlsBitmapBits", DWORD * 2), + ("ReadOnlySharedMemoryBase", PVOID), + ("HotpatchInformation", PVOID), + ("ReadOnlyStaticServerData", PVOID), # Ptr32 Ptr32 Void + ("AnsiCodePageData", PVOID), + ("OemCodePageData", PVOID), + ("UnicodeCaseTableData", PVOID), + ("NumberOfProcessors", DWORD), + ("NtGlobalFlag", DWORD), + ("CriticalSectionTimeout", LONGLONG), # LARGE_INTEGER + ("HeapSegmentReserve", DWORD), + ("HeapSegmentCommit", DWORD), + ("HeapDeCommitTotalFreeThreshold", DWORD), + ("HeapDeCommitFreeBlockThreshold", DWORD), + ("NumberOfHeaps", DWORD), + ("MaximumNumberOfHeaps", DWORD), + ("ProcessHeaps", PVOID), # Ptr32 Ptr32 Void + ("GdiSharedHandleTable", PVOID), + ("ProcessStarterHelper", PVOID), + ("GdiDCAttributeList", DWORD), + ("LoaderLock", PVOID), # PRTL_CRITICAL_SECTION + ("OSMajorVersion", DWORD), + ("OSMinorVersion", DWORD), + ("OSBuildNumber", WORD), + ("OSCSDVersion", WORD), + ("OSPlatformId", DWORD), + ("ImageSubsystem", DWORD), + ("ImageSubsystemMajorVersion", DWORD), + ("ImageSubsystemMinorVersion", DWORD), + ("ActiveProcessAffinityMask", DWORD), + ("GdiHandleBuffer", DWORD * 34), + ("PostProcessInitRoutine", PPS_POST_PROCESS_INIT_ROUTINE), + ("TlsExpansionBitmap", PVOID), + ("TlsExpansionBitmapBits", DWORD * 32), + ("SessionId", DWORD), + ("AppCompatFlags", ULONGLONG), # ULARGE_INTEGER + ("AppCompatFlagsUser", ULONGLONG), # ULARGE_INTEGER + ("pShimData", PVOID), + ("AppCompatInfo", PVOID), + ("CSDVersion", UNICODE_STRING), + ("ActivationContextData", PVOID), # ACTIVATION_CONTEXT_DATA + ("ProcessAssemblyStorageMap", PVOID), # ASSEMBLY_STORAGE_MAP + ("SystemDefaultActivationContextData", PVOID), # ACTIVATION_CONTEXT_DATA + ("SystemAssemblyStorageMap", PVOID), # ASSEMBLY_STORAGE_MAP + ("MinimumStackCommit", DWORD), + ("FlsCallback", PVOID), # PFLS_CALLBACK_INFO + ("FlsListHead", LIST_ENTRY), + ("FlsBitmap", PVOID), + ("FlsBitmapBits", DWORD * 4), + ("FlsHighIndex", DWORD), + ("WerRegistrationData", PVOID), + ("WerShipAssertPtr", PVOID), + ] + def __get_UserSharedInfoPtr(self): + return self.KernelCallbackTable + def __set_UserSharedInfoPtr(self, value): + self.KernelCallbackTable = value + UserSharedInfoPtr = property(__get_UserSharedInfoPtr, __set_UserSharedInfoPtr) + +# +0x000 InheritedAddressSpace : UChar +# +0x001 ReadImageFileExecOptions : UChar +# +0x002 BeingDebugged : UChar +# +0x003 BitField : UChar +# +0x003 ImageUsesLargePages : Pos 0, 1 Bit +# +0x003 IsProtectedProcess : Pos 1, 1 Bit +# +0x003 IsLegacyProcess : Pos 2, 1 Bit +# +0x003 IsImageDynamicallyRelocated : Pos 3, 1 Bit +# +0x003 SkipPatchingUser32Forwarders : Pos 4, 1 Bit +# +0x003 SpareBits : Pos 5, 3 Bits +# +0x008 Mutant : Ptr64 Void +# +0x010 ImageBaseAddress : Ptr64 Void +# +0x018 Ldr : Ptr64 _PEB_LDR_DATA +# +0x020 ProcessParameters : Ptr64 _RTL_USER_PROCESS_PARAMETERS +# +0x028 SubSystemData : Ptr64 Void +# +0x030 ProcessHeap : Ptr64 Void +# +0x038 FastPebLock : Ptr64 _RTL_CRITICAL_SECTION +# +0x040 AtlThunkSListPtr : Ptr64 Void +# +0x048 IFEOKey : Ptr64 Void +# +0x050 CrossProcessFlags : Uint4B +# +0x050 ProcessInJob : Pos 0, 1 Bit +# +0x050 ProcessInitializing : Pos 1, 1 Bit +# +0x050 ProcessUsingVEH : Pos 2, 1 Bit +# +0x050 ProcessUsingVCH : Pos 3, 1 Bit +# +0x050 ReservedBits0 : Pos 4, 28 Bits +# +0x058 KernelCallbackTable : Ptr64 Void +# +0x058 UserSharedInfoPtr : Ptr64 Void +# +0x060 SystemReserved : [1] Uint4B +# +0x064 SpareUlong : Uint4B +# +0x068 SparePebPtr0 : Uint8B +# +0x070 TlsExpansionCounter : Uint4B +# +0x078 TlsBitmap : Ptr64 Void +# +0x080 TlsBitmapBits : [2] Uint4B +# +0x088 ReadOnlySharedMemoryBase : Ptr64 Void +# +0x090 HotpatchInformation : Ptr64 Void +# +0x098 ReadOnlyStaticServerData : Ptr64 Ptr64 Void +# +0x0a0 AnsiCodePageData : Ptr64 Void +# +0x0a8 OemCodePageData : Ptr64 Void +# +0x0b0 UnicodeCaseTableData : Ptr64 Void +# +0x0b8 NumberOfProcessors : Uint4B +# +0x0bc NtGlobalFlag : Uint4B +# +0x0c0 CriticalSectionTimeout : _LARGE_INTEGER +# +0x0c8 HeapSegmentReserve : Uint8B +# +0x0d0 HeapSegmentCommit : Uint8B +# +0x0d8 HeapDeCommitTotalFreeThreshold : Uint8B +# +0x0e0 HeapDeCommitFreeBlockThreshold : Uint8B +# +0x0e8 NumberOfHeaps : Uint4B +# +0x0ec MaximumNumberOfHeaps : Uint4B +# +0x0f0 ProcessHeaps : Ptr64 Ptr64 Void +# +0x0f8 GdiSharedHandleTable : Ptr64 Void +# +0x100 ProcessStarterHelper : Ptr64 Void +# +0x108 GdiDCAttributeList : Uint4B +# +0x110 LoaderLock : Ptr64 _RTL_CRITICAL_SECTION +# +0x118 OSMajorVersion : Uint4B +# +0x11c OSMinorVersion : Uint4B +# +0x120 OSBuildNumber : Uint2B +# +0x122 OSCSDVersion : Uint2B +# +0x124 OSPlatformId : Uint4B +# +0x128 ImageSubsystem : Uint4B +# +0x12c ImageSubsystemMajorVersion : Uint4B +# +0x130 ImageSubsystemMinorVersion : Uint4B +# +0x138 ActiveProcessAffinityMask : Uint8B +# +0x140 GdiHandleBuffer : [60] Uint4B +# +0x230 PostProcessInitRoutine : Ptr64 void +# +0x238 TlsExpansionBitmap : Ptr64 Void +# +0x240 TlsExpansionBitmapBits : [32] Uint4B +# +0x2c0 SessionId : Uint4B +# +0x2c8 AppCompatFlags : _ULARGE_INTEGER +# +0x2d0 AppCompatFlagsUser : _ULARGE_INTEGER +# +0x2d8 pShimData : Ptr64 Void +# +0x2e0 AppCompatInfo : Ptr64 Void +# +0x2e8 CSDVersion : _UNICODE_STRING +# +0x2f8 ActivationContextData : Ptr64 _ACTIVATION_CONTEXT_DATA +# +0x300 ProcessAssemblyStorageMap : Ptr64 _ASSEMBLY_STORAGE_MAP +# +0x308 SystemDefaultActivationContextData : Ptr64 _ACTIVATION_CONTEXT_DATA +# +0x310 SystemAssemblyStorageMap : Ptr64 _ASSEMBLY_STORAGE_MAP +# +0x318 MinimumStackCommit : Uint8B +# +0x320 FlsCallback : Ptr64 _FLS_CALLBACK_INFO +# +0x328 FlsListHead : _LIST_ENTRY +# +0x338 FlsBitmap : Ptr64 Void +# +0x340 FlsBitmapBits : [4] Uint4B +# +0x350 FlsHighIndex : Uint4B +# +0x358 WerRegistrationData : Ptr64 Void +# +0x360 WerShipAssertPtr : Ptr64 Void +class _PEB_2008_64(Structure): + _pack_ = 8 + _fields_ = [ + ("InheritedAddressSpace", BOOLEAN), + ("ReadImageFileExecOptions", UCHAR), + ("BeingDebugged", BOOLEAN), + ("BitField", UCHAR), + ("Mutant", HANDLE), + ("ImageBaseAddress", PVOID), + ("Ldr", PVOID), # PPEB_LDR_DATA + ("ProcessParameters", PVOID), # PRTL_USER_PROCESS_PARAMETERS + ("SubSystemData", PVOID), + ("ProcessHeap", PVOID), + ("FastPebLock", PVOID), # PRTL_CRITICAL_SECTION + ("AtlThunkSListPtr", PVOID), + ("IFEOKey", PVOID), + ("CrossProcessFlags", DWORD), + ("KernelCallbackTable", PVOID), + ("SystemReserved", DWORD), + ("SpareUlong", DWORD), + ("SparePebPtr0", PVOID), + ("TlsExpansionCounter", DWORD), + ("TlsBitmap", PVOID), + ("TlsBitmapBits", DWORD * 2), + ("ReadOnlySharedMemoryBase", PVOID), + ("HotpatchInformation", PVOID), + ("ReadOnlyStaticServerData", PVOID), # Ptr64 Ptr64 Void + ("AnsiCodePageData", PVOID), + ("OemCodePageData", PVOID), + ("UnicodeCaseTableData", PVOID), + ("NumberOfProcessors", DWORD), + ("NtGlobalFlag", DWORD), + ("CriticalSectionTimeout", LONGLONG), # LARGE_INTEGER + ("HeapSegmentReserve", QWORD), + ("HeapSegmentCommit", QWORD), + ("HeapDeCommitTotalFreeThreshold", QWORD), + ("HeapDeCommitFreeBlockThreshold", QWORD), + ("NumberOfHeaps", DWORD), + ("MaximumNumberOfHeaps", DWORD), + ("ProcessHeaps", PVOID), # Ptr64 Ptr64 Void + ("GdiSharedHandleTable", PVOID), + ("ProcessStarterHelper", PVOID), + ("GdiDCAttributeList", DWORD), + ("LoaderLock", PVOID), # PRTL_CRITICAL_SECTION + ("OSMajorVersion", DWORD), + ("OSMinorVersion", DWORD), + ("OSBuildNumber", WORD), + ("OSCSDVersion", WORD), + ("OSPlatformId", DWORD), + ("ImageSubsystem", DWORD), + ("ImageSubsystemMajorVersion", DWORD), + ("ImageSubsystemMinorVersion", DWORD), + ("ActiveProcessAffinityMask", QWORD), + ("GdiHandleBuffer", DWORD * 60), + ("PostProcessInitRoutine", PPS_POST_PROCESS_INIT_ROUTINE), + ("TlsExpansionBitmap", PVOID), + ("TlsExpansionBitmapBits", DWORD * 32), + ("SessionId", DWORD), + ("AppCompatFlags", ULONGLONG), # ULARGE_INTEGER + ("AppCompatFlagsUser", ULONGLONG), # ULARGE_INTEGER + ("pShimData", PVOID), + ("AppCompatInfo", PVOID), + ("CSDVersion", UNICODE_STRING), + ("ActivationContextData", PVOID), # ACTIVATION_CONTEXT_DATA + ("ProcessAssemblyStorageMap", PVOID), # ASSEMBLY_STORAGE_MAP + ("SystemDefaultActivationContextData", PVOID), # ACTIVATION_CONTEXT_DATA + ("SystemAssemblyStorageMap", PVOID), # ASSEMBLY_STORAGE_MAP + ("MinimumStackCommit", QWORD), + ("FlsCallback", PVOID), # PFLS_CALLBACK_INFO + ("FlsListHead", LIST_ENTRY), + ("FlsBitmap", PVOID), + ("FlsBitmapBits", DWORD * 4), + ("FlsHighIndex", DWORD), + ("WerRegistrationData", PVOID), + ("WerShipAssertPtr", PVOID), + ] + def __get_UserSharedInfoPtr(self): + return self.KernelCallbackTable + def __set_UserSharedInfoPtr(self, value): + self.KernelCallbackTable = value + UserSharedInfoPtr = property(__get_UserSharedInfoPtr, __set_UserSharedInfoPtr) + +# +0x000 InheritedAddressSpace : UChar +# +0x001 ReadImageFileExecOptions : UChar +# +0x002 BeingDebugged : UChar +# +0x003 BitField : UChar +# +0x003 ImageUsesLargePages : Pos 0, 1 Bit +# +0x003 IsProtectedProcess : Pos 1, 1 Bit +# +0x003 IsLegacyProcess : Pos 2, 1 Bit +# +0x003 IsImageDynamicallyRelocated : Pos 3, 1 Bit +# +0x003 SkipPatchingUser32Forwarders : Pos 4, 1 Bit +# +0x003 SpareBits : Pos 5, 3 Bits +# +0x004 Mutant : Ptr32 Void +# +0x008 ImageBaseAddress : Ptr32 Void +# +0x00c Ldr : Ptr32 _PEB_LDR_DATA +# +0x010 ProcessParameters : Ptr32 _RTL_USER_PROCESS_PARAMETERS +# +0x014 SubSystemData : Ptr32 Void +# +0x018 ProcessHeap : Ptr32 Void +# +0x01c FastPebLock : Ptr32 _RTL_CRITICAL_SECTION +# +0x020 AtlThunkSListPtr : Ptr32 Void +# +0x024 IFEOKey : Ptr32 Void +# +0x028 CrossProcessFlags : Uint4B +# +0x028 ProcessInJob : Pos 0, 1 Bit +# +0x028 ProcessInitializing : Pos 1, 1 Bit +# +0x028 ProcessUsingVEH : Pos 2, 1 Bit +# +0x028 ProcessUsingVCH : Pos 3, 1 Bit +# +0x028 ProcessUsingFTH : Pos 4, 1 Bit +# +0x028 ReservedBits0 : Pos 5, 27 Bits +# +0x02c KernelCallbackTable : Ptr32 Void +# +0x02c UserSharedInfoPtr : Ptr32 Void +# +0x030 SystemReserved : [1] Uint4B +# +0x034 AtlThunkSListPtr32 : Uint4B +# +0x038 ApiSetMap : Ptr32 Void +# +0x03c TlsExpansionCounter : Uint4B +# +0x040 TlsBitmap : Ptr32 Void +# +0x044 TlsBitmapBits : [2] Uint4B +# +0x04c ReadOnlySharedMemoryBase : Ptr32 Void +# +0x050 HotpatchInformation : Ptr32 Void +# +0x054 ReadOnlyStaticServerData : Ptr32 Ptr32 Void +# +0x058 AnsiCodePageData : Ptr32 Void +# +0x05c OemCodePageData : Ptr32 Void +# +0x060 UnicodeCaseTableData : Ptr32 Void +# +0x064 NumberOfProcessors : Uint4B +# +0x068 NtGlobalFlag : Uint4B +# +0x070 CriticalSectionTimeout : _LARGE_INTEGER +# +0x078 HeapSegmentReserve : Uint4B +# +0x07c HeapSegmentCommit : Uint4B +# +0x080 HeapDeCommitTotalFreeThreshold : Uint4B +# +0x084 HeapDeCommitFreeBlockThreshold : Uint4B +# +0x088 NumberOfHeaps : Uint4B +# +0x08c MaximumNumberOfHeaps : Uint4B +# +0x090 ProcessHeaps : Ptr32 Ptr32 Void +# +0x094 GdiSharedHandleTable : Ptr32 Void +# +0x098 ProcessStarterHelper : Ptr32 Void +# +0x09c GdiDCAttributeList : Uint4B +# +0x0a0 LoaderLock : Ptr32 _RTL_CRITICAL_SECTION +# +0x0a4 OSMajorVersion : Uint4B +# +0x0a8 OSMinorVersion : Uint4B +# +0x0ac OSBuildNumber : Uint2B +# +0x0ae OSCSDVersion : Uint2B +# +0x0b0 OSPlatformId : Uint4B +# +0x0b4 ImageSubsystem : Uint4B +# +0x0b8 ImageSubsystemMajorVersion : Uint4B +# +0x0bc ImageSubsystemMinorVersion : Uint4B +# +0x0c0 ActiveProcessAffinityMask : Uint4B +# +0x0c4 GdiHandleBuffer : [34] Uint4B +# +0x14c PostProcessInitRoutine : Ptr32 void +# +0x150 TlsExpansionBitmap : Ptr32 Void +# +0x154 TlsExpansionBitmapBits : [32] Uint4B +# +0x1d4 SessionId : Uint4B +# +0x1d8 AppCompatFlags : _ULARGE_INTEGER +# +0x1e0 AppCompatFlagsUser : _ULARGE_INTEGER +# +0x1e8 pShimData : Ptr32 Void +# +0x1ec AppCompatInfo : Ptr32 Void +# +0x1f0 CSDVersion : _UNICODE_STRING +# +0x1f8 ActivationContextData : Ptr32 _ACTIVATION_CONTEXT_DATA +# +0x1fc ProcessAssemblyStorageMap : Ptr32 _ASSEMBLY_STORAGE_MAP +# +0x200 SystemDefaultActivationContextData : Ptr32 _ACTIVATION_CONTEXT_DATA +# +0x204 SystemAssemblyStorageMap : Ptr32 _ASSEMBLY_STORAGE_MAP +# +0x208 MinimumStackCommit : Uint4B +# +0x20c FlsCallback : Ptr32 _FLS_CALLBACK_INFO +# +0x210 FlsListHead : _LIST_ENTRY +# +0x218 FlsBitmap : Ptr32 Void +# +0x21c FlsBitmapBits : [4] Uint4B +# +0x22c FlsHighIndex : Uint4B +# +0x230 WerRegistrationData : Ptr32 Void +# +0x234 WerShipAssertPtr : Ptr32 Void +# +0x238 pContextData : Ptr32 Void +# +0x23c pImageHeaderHash : Ptr32 Void +# +0x240 TracingFlags : Uint4B +# +0x240 HeapTracingEnabled : Pos 0, 1 Bit +# +0x240 CritSecTracingEnabled : Pos 1, 1 Bit +# +0x240 SpareTracingBits : Pos 2, 30 Bits +class _PEB_2008_R2(Structure): + _pack_ = 8 + _fields_ = [ + ("InheritedAddressSpace", BOOLEAN), + ("ReadImageFileExecOptions", UCHAR), + ("BeingDebugged", BOOLEAN), + ("BitField", UCHAR), + ("Mutant", HANDLE), + ("ImageBaseAddress", PVOID), + ("Ldr", PVOID), # PPEB_LDR_DATA + ("ProcessParameters", PVOID), # PRTL_USER_PROCESS_PARAMETERS + ("SubSystemData", PVOID), + ("ProcessHeap", PVOID), + ("FastPebLock", PVOID), # PRTL_CRITICAL_SECTION + ("AtlThunkSListPtr", PVOID), + ("IFEOKey", PVOID), + ("CrossProcessFlags", DWORD), + ("KernelCallbackTable", PVOID), + ("SystemReserved", DWORD), + ("AtlThunkSListPtr32", PVOID), + ("ApiSetMap", PVOID), + ("TlsExpansionCounter", DWORD), + ("TlsBitmap", PVOID), + ("TlsBitmapBits", DWORD * 2), + ("ReadOnlySharedMemoryBase", PVOID), + ("HotpatchInformation", PVOID), + ("ReadOnlyStaticServerData", PVOID), # Ptr32 Ptr32 Void + ("AnsiCodePageData", PVOID), + ("OemCodePageData", PVOID), + ("UnicodeCaseTableData", PVOID), + ("NumberOfProcessors", DWORD), + ("NtGlobalFlag", DWORD), + ("CriticalSectionTimeout", LONGLONG), # LARGE_INTEGER + ("HeapSegmentReserve", DWORD), + ("HeapSegmentCommit", DWORD), + ("HeapDeCommitTotalFreeThreshold", DWORD), + ("HeapDeCommitFreeBlockThreshold", DWORD), + ("NumberOfHeaps", DWORD), + ("MaximumNumberOfHeaps", DWORD), + ("ProcessHeaps", PVOID), # Ptr32 Ptr32 Void + ("GdiSharedHandleTable", PVOID), + ("ProcessStarterHelper", PVOID), + ("GdiDCAttributeList", DWORD), + ("LoaderLock", PVOID), # PRTL_CRITICAL_SECTION + ("OSMajorVersion", DWORD), + ("OSMinorVersion", DWORD), + ("OSBuildNumber", WORD), + ("OSCSDVersion", WORD), + ("OSPlatformId", DWORD), + ("ImageSubsystem", DWORD), + ("ImageSubsystemMajorVersion", DWORD), + ("ImageSubsystemMinorVersion", DWORD), + ("ActiveProcessAffinityMask", DWORD), + ("GdiHandleBuffer", DWORD * 34), + ("PostProcessInitRoutine", PPS_POST_PROCESS_INIT_ROUTINE), + ("TlsExpansionBitmap", PVOID), + ("TlsExpansionBitmapBits", DWORD * 32), + ("SessionId", DWORD), + ("AppCompatFlags", ULONGLONG), # ULARGE_INTEGER + ("AppCompatFlagsUser", ULONGLONG), # ULARGE_INTEGER + ("pShimData", PVOID), + ("AppCompatInfo", PVOID), + ("CSDVersion", UNICODE_STRING), + ("ActivationContextData", PVOID), # ACTIVATION_CONTEXT_DATA + ("ProcessAssemblyStorageMap", PVOID), # ASSEMBLY_STORAGE_MAP + ("SystemDefaultActivationContextData", PVOID), # ACTIVATION_CONTEXT_DATA + ("SystemAssemblyStorageMap", PVOID), # ASSEMBLY_STORAGE_MAP + ("MinimumStackCommit", DWORD), + ("FlsCallback", PVOID), # PFLS_CALLBACK_INFO + ("FlsListHead", LIST_ENTRY), + ("FlsBitmap", PVOID), + ("FlsBitmapBits", DWORD * 4), + ("FlsHighIndex", DWORD), + ("WerRegistrationData", PVOID), + ("WerShipAssertPtr", PVOID), + ("pContextData", PVOID), + ("pImageHeaderHash", PVOID), + ("TracingFlags", DWORD), + ] + def __get_UserSharedInfoPtr(self): + return self.KernelCallbackTable + def __set_UserSharedInfoPtr(self, value): + self.KernelCallbackTable = value + UserSharedInfoPtr = property(__get_UserSharedInfoPtr, __set_UserSharedInfoPtr) + +# +0x000 InheritedAddressSpace : UChar +# +0x001 ReadImageFileExecOptions : UChar +# +0x002 BeingDebugged : UChar +# +0x003 BitField : UChar +# +0x003 ImageUsesLargePages : Pos 0, 1 Bit +# +0x003 IsProtectedProcess : Pos 1, 1 Bit +# +0x003 IsLegacyProcess : Pos 2, 1 Bit +# +0x003 IsImageDynamicallyRelocated : Pos 3, 1 Bit +# +0x003 SkipPatchingUser32Forwarders : Pos 4, 1 Bit +# +0x003 SpareBits : Pos 5, 3 Bits +# +0x008 Mutant : Ptr64 Void +# +0x010 ImageBaseAddress : Ptr64 Void +# +0x018 Ldr : Ptr64 _PEB_LDR_DATA +# +0x020 ProcessParameters : Ptr64 _RTL_USER_PROCESS_PARAMETERS +# +0x028 SubSystemData : Ptr64 Void +# +0x030 ProcessHeap : Ptr64 Void +# +0x038 FastPebLock : Ptr64 _RTL_CRITICAL_SECTION +# +0x040 AtlThunkSListPtr : Ptr64 Void +# +0x048 IFEOKey : Ptr64 Void +# +0x050 CrossProcessFlags : Uint4B +# +0x050 ProcessInJob : Pos 0, 1 Bit +# +0x050 ProcessInitializing : Pos 1, 1 Bit +# +0x050 ProcessUsingVEH : Pos 2, 1 Bit +# +0x050 ProcessUsingVCH : Pos 3, 1 Bit +# +0x050 ProcessUsingFTH : Pos 4, 1 Bit +# +0x050 ReservedBits0 : Pos 5, 27 Bits +# +0x058 KernelCallbackTable : Ptr64 Void +# +0x058 UserSharedInfoPtr : Ptr64 Void +# +0x060 SystemReserved : [1] Uint4B +# +0x064 AtlThunkSListPtr32 : Uint4B +# +0x068 ApiSetMap : Ptr64 Void +# +0x070 TlsExpansionCounter : Uint4B +# +0x078 TlsBitmap : Ptr64 Void +# +0x080 TlsBitmapBits : [2] Uint4B +# +0x088 ReadOnlySharedMemoryBase : Ptr64 Void +# +0x090 HotpatchInformation : Ptr64 Void +# +0x098 ReadOnlyStaticServerData : Ptr64 Ptr64 Void +# +0x0a0 AnsiCodePageData : Ptr64 Void +# +0x0a8 OemCodePageData : Ptr64 Void +# +0x0b0 UnicodeCaseTableData : Ptr64 Void +# +0x0b8 NumberOfProcessors : Uint4B +# +0x0bc NtGlobalFlag : Uint4B +# +0x0c0 CriticalSectionTimeout : _LARGE_INTEGER +# +0x0c8 HeapSegmentReserve : Uint8B +# +0x0d0 HeapSegmentCommit : Uint8B +# +0x0d8 HeapDeCommitTotalFreeThreshold : Uint8B +# +0x0e0 HeapDeCommitFreeBlockThreshold : Uint8B +# +0x0e8 NumberOfHeaps : Uint4B +# +0x0ec MaximumNumberOfHeaps : Uint4B +# +0x0f0 ProcessHeaps : Ptr64 Ptr64 Void +# +0x0f8 GdiSharedHandleTable : Ptr64 Void +# +0x100 ProcessStarterHelper : Ptr64 Void +# +0x108 GdiDCAttributeList : Uint4B +# +0x110 LoaderLock : Ptr64 _RTL_CRITICAL_SECTION +# +0x118 OSMajorVersion : Uint4B +# +0x11c OSMinorVersion : Uint4B +# +0x120 OSBuildNumber : Uint2B +# +0x122 OSCSDVersion : Uint2B +# +0x124 OSPlatformId : Uint4B +# +0x128 ImageSubsystem : Uint4B +# +0x12c ImageSubsystemMajorVersion : Uint4B +# +0x130 ImageSubsystemMinorVersion : Uint4B +# +0x138 ActiveProcessAffinityMask : Uint8B +# +0x140 GdiHandleBuffer : [60] Uint4B +# +0x230 PostProcessInitRoutine : Ptr64 void +# +0x238 TlsExpansionBitmap : Ptr64 Void +# +0x240 TlsExpansionBitmapBits : [32] Uint4B +# +0x2c0 SessionId : Uint4B +# +0x2c8 AppCompatFlags : _ULARGE_INTEGER +# +0x2d0 AppCompatFlagsUser : _ULARGE_INTEGER +# +0x2d8 pShimData : Ptr64 Void +# +0x2e0 AppCompatInfo : Ptr64 Void +# +0x2e8 CSDVersion : _UNICODE_STRING +# +0x2f8 ActivationContextData : Ptr64 _ACTIVATION_CONTEXT_DATA +# +0x300 ProcessAssemblyStorageMap : Ptr64 _ASSEMBLY_STORAGE_MAP +# +0x308 SystemDefaultActivationContextData : Ptr64 _ACTIVATION_CONTEXT_DATA +# +0x310 SystemAssemblyStorageMap : Ptr64 _ASSEMBLY_STORAGE_MAP +# +0x318 MinimumStackCommit : Uint8B +# +0x320 FlsCallback : Ptr64 _FLS_CALLBACK_INFO +# +0x328 FlsListHead : _LIST_ENTRY +# +0x338 FlsBitmap : Ptr64 Void +# +0x340 FlsBitmapBits : [4] Uint4B +# +0x350 FlsHighIndex : Uint4B +# +0x358 WerRegistrationData : Ptr64 Void +# +0x360 WerShipAssertPtr : Ptr64 Void +# +0x368 pContextData : Ptr64 Void +# +0x370 pImageHeaderHash : Ptr64 Void +# +0x378 TracingFlags : Uint4B +# +0x378 HeapTracingEnabled : Pos 0, 1 Bit +# +0x378 CritSecTracingEnabled : Pos 1, 1 Bit +# +0x378 SpareTracingBits : Pos 2, 30 Bits +class _PEB_2008_R2_64(Structure): + _pack_ = 8 + _fields_ = [ + ("InheritedAddressSpace", BOOLEAN), + ("ReadImageFileExecOptions", UCHAR), + ("BeingDebugged", BOOLEAN), + ("BitField", UCHAR), + ("Mutant", HANDLE), + ("ImageBaseAddress", PVOID), + ("Ldr", PVOID), # PPEB_LDR_DATA + ("ProcessParameters", PVOID), # PRTL_USER_PROCESS_PARAMETERS + ("SubSystemData", PVOID), + ("ProcessHeap", PVOID), + ("FastPebLock", PVOID), # PRTL_CRITICAL_SECTION + ("AtlThunkSListPtr", PVOID), + ("IFEOKey", PVOID), + ("CrossProcessFlags", DWORD), + ("KernelCallbackTable", PVOID), + ("SystemReserved", DWORD), + ("AtlThunkSListPtr32", DWORD), + ("ApiSetMap", PVOID), + ("TlsExpansionCounter", DWORD), + ("TlsBitmap", PVOID), + ("TlsBitmapBits", DWORD * 2), + ("ReadOnlySharedMemoryBase", PVOID), + ("HotpatchInformation", PVOID), + ("ReadOnlyStaticServerData", PVOID), # Ptr32 Ptr32 Void + ("AnsiCodePageData", PVOID), + ("OemCodePageData", PVOID), + ("UnicodeCaseTableData", PVOID), + ("NumberOfProcessors", DWORD), + ("NtGlobalFlag", DWORD), + ("CriticalSectionTimeout", LONGLONG), # LARGE_INTEGER + ("HeapSegmentReserve", QWORD), + ("HeapSegmentCommit", QWORD), + ("HeapDeCommitTotalFreeThreshold", QWORD), + ("HeapDeCommitFreeBlockThreshold", QWORD), + ("NumberOfHeaps", DWORD), + ("MaximumNumberOfHeaps", DWORD), + ("ProcessHeaps", PVOID), # Ptr64 Ptr64 Void + ("GdiSharedHandleTable", PVOID), + ("ProcessStarterHelper", PVOID), + ("GdiDCAttributeList", DWORD), + ("LoaderLock", PVOID), # PRTL_CRITICAL_SECTION + ("OSMajorVersion", DWORD), + ("OSMinorVersion", DWORD), + ("OSBuildNumber", WORD), + ("OSCSDVersion", WORD), + ("OSPlatformId", DWORD), + ("ImageSubsystem", DWORD), + ("ImageSubsystemMajorVersion", DWORD), + ("ImageSubsystemMinorVersion", DWORD), + ("ActiveProcessAffinityMask", QWORD), + ("GdiHandleBuffer", DWORD * 60), + ("PostProcessInitRoutine", PPS_POST_PROCESS_INIT_ROUTINE), + ("TlsExpansionBitmap", PVOID), + ("TlsExpansionBitmapBits", DWORD * 32), + ("SessionId", DWORD), + ("AppCompatFlags", ULONGLONG), # ULARGE_INTEGER + ("AppCompatFlagsUser", ULONGLONG), # ULARGE_INTEGER + ("pShimData", PVOID), + ("AppCompatInfo", PVOID), + ("CSDVersion", UNICODE_STRING), + ("ActivationContextData", PVOID), # ACTIVATION_CONTEXT_DATA + ("ProcessAssemblyStorageMap", PVOID), # ASSEMBLY_STORAGE_MAP + ("SystemDefaultActivationContextData", PVOID), # ACTIVATION_CONTEXT_DATA + ("SystemAssemblyStorageMap", PVOID), # ASSEMBLY_STORAGE_MAP + ("MinimumStackCommit", QWORD), + ("FlsCallback", PVOID), # PFLS_CALLBACK_INFO + ("FlsListHead", LIST_ENTRY), + ("FlsBitmap", PVOID), + ("FlsBitmapBits", DWORD * 4), + ("FlsHighIndex", DWORD), + ("WerRegistrationData", PVOID), + ("WerShipAssertPtr", PVOID), + ("pContextData", PVOID), + ("pImageHeaderHash", PVOID), + ("TracingFlags", DWORD), + ] + def __get_UserSharedInfoPtr(self): + return self.KernelCallbackTable + def __set_UserSharedInfoPtr(self, value): + self.KernelCallbackTable = value + UserSharedInfoPtr = property(__get_UserSharedInfoPtr, __set_UserSharedInfoPtr) + +_PEB_Vista = _PEB_2008 +_PEB_Vista_64 = _PEB_2008_64 +_PEB_W7 = _PEB_2008_R2 +_PEB_W7_64 = _PEB_2008_R2_64 + +# +0x000 InheritedAddressSpace : UChar +# +0x001 ReadImageFileExecOptions : UChar +# +0x002 BeingDebugged : UChar +# +0x003 BitField : UChar +# +0x003 ImageUsesLargePages : Pos 0, 1 Bit +# +0x003 IsProtectedProcess : Pos 1, 1 Bit +# +0x003 IsLegacyProcess : Pos 2, 1 Bit +# +0x003 IsImageDynamicallyRelocated : Pos 3, 1 Bit +# +0x003 SkipPatchingUser32Forwarders : Pos 4, 1 Bit +# +0x003 SpareBits : Pos 5, 3 Bits +# +0x004 Mutant : Ptr32 Void +# +0x008 ImageBaseAddress : Ptr32 Void +# +0x00c Ldr : Ptr32 _PEB_LDR_DATA +# +0x010 ProcessParameters : Ptr32 _RTL_USER_PROCESS_PARAMETERS +# +0x014 SubSystemData : Ptr32 Void +# +0x018 ProcessHeap : Ptr32 Void +# +0x01c FastPebLock : Ptr32 _RTL_CRITICAL_SECTION +# +0x020 AtlThunkSListPtr : Ptr32 Void +# +0x024 IFEOKey : Ptr32 Void +# +0x028 CrossProcessFlags : Uint4B +# +0x028 ProcessInJob : Pos 0, 1 Bit +# +0x028 ProcessInitializing : Pos 1, 1 Bit +# +0x028 ProcessUsingVEH : Pos 2, 1 Bit +# +0x028 ProcessUsingVCH : Pos 3, 1 Bit +# +0x028 ProcessUsingFTH : Pos 4, 1 Bit +# +0x028 ReservedBits0 : Pos 5, 27 Bits +# +0x02c KernelCallbackTable : Ptr32 Void +# +0x02c UserSharedInfoPtr : Ptr32 Void +# +0x030 SystemReserved : [1] Uint4B +# +0x034 TracingFlags : Uint4B +# +0x034 HeapTracingEnabled : Pos 0, 1 Bit +# +0x034 CritSecTracingEnabled : Pos 1, 1 Bit +# +0x034 SpareTracingBits : Pos 2, 30 Bits +# +0x038 ApiSetMap : Ptr32 Void +# +0x03c TlsExpansionCounter : Uint4B +# +0x040 TlsBitmap : Ptr32 Void +# +0x044 TlsBitmapBits : [2] Uint4B +# +0x04c ReadOnlySharedMemoryBase : Ptr32 Void +# +0x050 HotpatchInformation : Ptr32 Void +# +0x054 ReadOnlyStaticServerData : Ptr32 Ptr32 Void +# +0x058 AnsiCodePageData : Ptr32 Void +# +0x05c OemCodePageData : Ptr32 Void +# +0x060 UnicodeCaseTableData : Ptr32 Void +# +0x064 NumberOfProcessors : Uint4B +# +0x068 NtGlobalFlag : Uint4B +# +0x070 CriticalSectionTimeout : _LARGE_INTEGER +# +0x078 HeapSegmentReserve : Uint4B +# +0x07c HeapSegmentCommit : Uint4B +# +0x080 HeapDeCommitTotalFreeThreshold : Uint4B +# +0x084 HeapDeCommitFreeBlockThreshold : Uint4B +# +0x088 NumberOfHeaps : Uint4B +# +0x08c MaximumNumberOfHeaps : Uint4B +# +0x090 ProcessHeaps : Ptr32 Ptr32 Void +# +0x094 GdiSharedHandleTable : Ptr32 Void +# +0x098 ProcessStarterHelper : Ptr32 Void +# +0x09c GdiDCAttributeList : Uint4B +# +0x0a0 LoaderLock : Ptr32 _RTL_CRITICAL_SECTION +# +0x0a4 OSMajorVersion : Uint4B +# +0x0a8 OSMinorVersion : Uint4B +# +0x0ac OSBuildNumber : Uint2B +# +0x0ae OSCSDVersion : Uint2B +# +0x0b0 OSPlatformId : Uint4B +# +0x0b4 ImageSubsystem : Uint4B +# +0x0b8 ImageSubsystemMajorVersion : Uint4B +# +0x0bc ImageSubsystemMinorVersion : Uint4B +# +0x0c0 ActiveProcessAffinityMask : Uint4B +# +0x0c4 GdiHandleBuffer : [34] Uint4B +# +0x14c PostProcessInitRoutine : Ptr32 void +# +0x150 TlsExpansionBitmap : Ptr32 Void +# +0x154 TlsExpansionBitmapBits : [32] Uint4B +# +0x1d4 SessionId : Uint4B +# +0x1d8 AppCompatFlags : _ULARGE_INTEGER +# +0x1e0 AppCompatFlagsUser : _ULARGE_INTEGER +# +0x1e8 pShimData : Ptr32 Void +# +0x1ec AppCompatInfo : Ptr32 Void +# +0x1f0 CSDVersion : _UNICODE_STRING +# +0x1f8 ActivationContextData : Ptr32 _ACTIVATION_CONTEXT_DATA +# +0x1fc ProcessAssemblyStorageMap : Ptr32 _ASSEMBLY_STORAGE_MAP +# +0x200 SystemDefaultActivationContextData : Ptr32 _ACTIVATION_CONTEXT_DATA +# +0x204 SystemAssemblyStorageMap : Ptr32 _ASSEMBLY_STORAGE_MAP +# +0x208 MinimumStackCommit : Uint4B +# +0x20c FlsCallback : Ptr32 _FLS_CALLBACK_INFO +# +0x210 FlsListHead : _LIST_ENTRY +# +0x218 FlsBitmap : Ptr32 Void +# +0x21c FlsBitmapBits : [4] Uint4B +# +0x22c FlsHighIndex : Uint4B +# +0x230 WerRegistrationData : Ptr32 Void +# +0x234 WerShipAssertPtr : Ptr32 Void +# +0x238 pContextData : Ptr32 Void +# +0x23c pImageHeaderHash : Ptr32 Void +class _PEB_W7_Beta(Structure): + """ + This definition of the PEB structure is only valid for the beta versions + of Windows 7. For the final version of Windows 7 use L{_PEB_W7} instead. + This structure is not chosen automatically. + """ + _pack_ = 8 + _fields_ = [ + ("InheritedAddressSpace", BOOLEAN), + ("ReadImageFileExecOptions", UCHAR), + ("BeingDebugged", BOOLEAN), + ("BitField", UCHAR), + ("Mutant", HANDLE), + ("ImageBaseAddress", PVOID), + ("Ldr", PVOID), # PPEB_LDR_DATA + ("ProcessParameters", PVOID), # PRTL_USER_PROCESS_PARAMETERS + ("SubSystemData", PVOID), + ("ProcessHeap", PVOID), + ("FastPebLock", PVOID), # PRTL_CRITICAL_SECTION + ("AtlThunkSListPtr", PVOID), + ("IFEOKey", PVOID), + ("CrossProcessFlags", DWORD), + ("KernelCallbackTable", PVOID), + ("SystemReserved", DWORD), + ("TracingFlags", DWORD), + ("ApiSetMap", PVOID), + ("TlsExpansionCounter", DWORD), + ("TlsBitmap", PVOID), + ("TlsBitmapBits", DWORD * 2), + ("ReadOnlySharedMemoryBase", PVOID), + ("HotpatchInformation", PVOID), + ("ReadOnlyStaticServerData", PVOID), # Ptr32 Ptr32 Void + ("AnsiCodePageData", PVOID), + ("OemCodePageData", PVOID), + ("UnicodeCaseTableData", PVOID), + ("NumberOfProcessors", DWORD), + ("NtGlobalFlag", DWORD), + ("CriticalSectionTimeout", LONGLONG), # LARGE_INTEGER + ("HeapSegmentReserve", DWORD), + ("HeapSegmentCommit", DWORD), + ("HeapDeCommitTotalFreeThreshold", DWORD), + ("HeapDeCommitFreeBlockThreshold", DWORD), + ("NumberOfHeaps", DWORD), + ("MaximumNumberOfHeaps", DWORD), + ("ProcessHeaps", PVOID), # Ptr32 Ptr32 Void + ("GdiSharedHandleTable", PVOID), + ("ProcessStarterHelper", PVOID), + ("GdiDCAttributeList", DWORD), + ("LoaderLock", PVOID), # PRTL_CRITICAL_SECTION + ("OSMajorVersion", DWORD), + ("OSMinorVersion", DWORD), + ("OSBuildNumber", WORD), + ("OSCSDVersion", WORD), + ("OSPlatformId", DWORD), + ("ImageSubsystem", DWORD), + ("ImageSubsystemMajorVersion", DWORD), + ("ImageSubsystemMinorVersion", DWORD), + ("ActiveProcessAffinityMask", DWORD), + ("GdiHandleBuffer", DWORD * 34), + ("PostProcessInitRoutine", PPS_POST_PROCESS_INIT_ROUTINE), + ("TlsExpansionBitmap", PVOID), + ("TlsExpansionBitmapBits", DWORD * 32), + ("SessionId", DWORD), + ("AppCompatFlags", ULONGLONG), # ULARGE_INTEGER + ("AppCompatFlagsUser", ULONGLONG), # ULARGE_INTEGER + ("pShimData", PVOID), + ("AppCompatInfo", PVOID), + ("CSDVersion", UNICODE_STRING), + ("ActivationContextData", PVOID), # ACTIVATION_CONTEXT_DATA + ("ProcessAssemblyStorageMap", PVOID), # ASSEMBLY_STORAGE_MAP + ("SystemDefaultActivationContextData", PVOID), # ACTIVATION_CONTEXT_DATA + ("SystemAssemblyStorageMap", PVOID), # ASSEMBLY_STORAGE_MAP + ("MinimumStackCommit", DWORD), + ("FlsCallback", PVOID), # PFLS_CALLBACK_INFO + ("FlsListHead", LIST_ENTRY), + ("FlsBitmap", PVOID), + ("FlsBitmapBits", DWORD * 4), + ("FlsHighIndex", DWORD), + ("WerRegistrationData", PVOID), + ("WerShipAssertPtr", PVOID), + ("pContextData", PVOID), + ("pImageHeaderHash", PVOID), + ] + def __get_UserSharedInfoPtr(self): + return self.KernelCallbackTable + def __set_UserSharedInfoPtr(self, value): + self.KernelCallbackTable = value + UserSharedInfoPtr = property(__get_UserSharedInfoPtr, __set_UserSharedInfoPtr) + +# Use the correct PEB structure definition. +# Defaults to the latest Windows version. +class PEB(Structure): + _pack_ = 8 + if os == 'Windows NT': + _pack_ = _PEB_NT._pack_ + _fields_ = _PEB_NT._fields_ + elif os == 'Windows 2000': + _pack_ = _PEB_2000._pack_ + _fields_ = _PEB_2000._fields_ + elif os == 'Windows XP': + _fields_ = _PEB_XP._fields_ + elif os == 'Windows XP (64 bits)': + _fields_ = _PEB_XP_64._fields_ + elif os == 'Windows 2003': + _fields_ = _PEB_2003._fields_ + elif os == 'Windows 2003 (64 bits)': + _fields_ = _PEB_2003_64._fields_ + elif os == 'Windows 2003 R2': + _fields_ = _PEB_2003_R2._fields_ + elif os == 'Windows 2003 R2 (64 bits)': + _fields_ = _PEB_2003_R2_64._fields_ + elif os == 'Windows 2008': + _fields_ = _PEB_2008._fields_ + elif os == 'Windows 2008 (64 bits)': + _fields_ = _PEB_2008_64._fields_ + elif os == 'Windows 2008 R2': + _fields_ = _PEB_2008_R2._fields_ + elif os == 'Windows 2008 R2 (64 bits)': + _fields_ = _PEB_2008_R2_64._fields_ + elif os == 'Windows Vista': + _fields_ = _PEB_Vista._fields_ + elif os == 'Windows Vista (64 bits)': + _fields_ = _PEB_Vista_64._fields_ + elif os == 'Windows 7': + _fields_ = _PEB_W7._fields_ + elif os == 'Windows 7 (64 bits)': + _fields_ = _PEB_W7_64._fields_ + elif sizeof(SIZE_T) == sizeof(DWORD): + _fields_ = _PEB_W7._fields_ + else: + _fields_ = _PEB_W7_64._fields_ +PPEB = POINTER(PEB) + +# PEB structure for WOW64 processes. +class PEB_32(Structure): + _pack_ = 8 + if os == 'Windows NT': + _pack_ = _PEB_NT._pack_ + _fields_ = _PEB_NT._fields_ + elif os == 'Windows 2000': + _pack_ = _PEB_2000._pack_ + _fields_ = _PEB_2000._fields_ + elif os.startswith('Windows XP'): + _fields_ = _PEB_XP._fields_ + elif os.startswith('Windows 2003 R2'): + _fields_ = _PEB_2003_R2._fields_ + elif os.startswith('Windows 2003'): + _fields_ = _PEB_2003._fields_ + elif os.startswith('Windows 2008 R2'): + _fields_ = _PEB_2008_R2._fields_ + elif os.startswith('Windows 2008'): + _fields_ = _PEB_2008._fields_ + elif os.startswith('Windows Vista'): + _fields_ = _PEB_Vista._fields_ + else: #if os.startswith('Windows 7'): + _fields_ = _PEB_W7._fields_ + +# from https://vmexplorer.svn.codeplex.com/svn/VMExplorer/src/Win32/Threads.cs +# +# [StructLayout (LayoutKind.Sequential, Size = 0x0C)] +# public struct Wx86ThreadState +# { +# public IntPtr CallBx86Eip; // Ptr32 to Uint4B +# public IntPtr DeallocationCpu; // Ptr32 to Void +# public Byte UseKnownWx86Dll; // UChar +# public Byte OleStubInvoked; // Char +# }; +class Wx86ThreadState(Structure): + _fields_ = [ + ("CallBx86Eip", PVOID), + ("DeallocationCpu", PVOID), + ("UseKnownWx86Dll", UCHAR), + ("OleStubInvoked", CHAR), +] + +# ntdll!_RTL_ACTIVATION_CONTEXT_STACK_FRAME +# +0x000 Previous : Ptr64 _RTL_ACTIVATION_CONTEXT_STACK_FRAME +# +0x008 ActivationContext : Ptr64 _ACTIVATION_CONTEXT +# +0x010 Flags : Uint4B +class RTL_ACTIVATION_CONTEXT_STACK_FRAME(Structure): + _fields_ = [ + ("Previous", PVOID), + ("ActivationContext", PVOID), + ("Flags", DWORD), +] + +# ntdll!_ACTIVATION_CONTEXT_STACK +# +0x000 ActiveFrame : Ptr64 _RTL_ACTIVATION_CONTEXT_STACK_FRAME +# +0x008 FrameListCache : _LIST_ENTRY +# +0x018 Flags : Uint4B +# +0x01c NextCookieSequenceNumber : Uint4B +# +0x020 StackId : Uint4B +class ACTIVATION_CONTEXT_STACK(Structure): + _fields_ = [ + ("ActiveFrame", PVOID), + ("FrameListCache", LIST_ENTRY), + ("Flags", DWORD), + ("NextCookieSequenceNumber", DWORD), + ("StackId", DWORD), +] + +# typedef struct _PROCESSOR_NUMBER { +# WORD Group; +# BYTE Number; +# BYTE Reserved; +# }PROCESSOR_NUMBER, *PPROCESSOR_NUMBER; +class PROCESSOR_NUMBER(Structure): + _fields_ = [ + ("Group", WORD), + ("Number", BYTE), + ("Reserved", BYTE), +] + +# from http://www.nirsoft.net/kernel_struct/vista/NT_TIB.html +# +# typedef struct _NT_TIB +# { +# PEXCEPTION_REGISTRATION_RECORD ExceptionList; +# PVOID StackBase; +# PVOID StackLimit; +# PVOID SubSystemTib; +# union +# { +# PVOID FiberData; +# ULONG Version; +# }; +# PVOID ArbitraryUserPointer; +# PNT_TIB Self; +# } NT_TIB, *PNT_TIB; +class _NT_TIB_UNION(Union): + _fields_ = [ + ("FiberData", PVOID), + ("Version", ULONG), + ] +class NT_TIB(Structure): + _fields_ = [ + ("ExceptionList", PVOID), # PEXCEPTION_REGISTRATION_RECORD + ("StackBase", PVOID), + ("StackLimit", PVOID), + ("SubSystemTib", PVOID), + ("u", _NT_TIB_UNION), + ("ArbitraryUserPointer", PVOID), + ("Self", PVOID), # PNTTIB + ] + + def __get_FiberData(self): + return self.u.FiberData + def __set_FiberData(self, value): + self.u.FiberData = value + FiberData = property(__get_FiberData, __set_FiberData) + + def __get_Version(self): + return self.u.Version + def __set_Version(self, value): + self.u.Version = value + Version = property(__get_Version, __set_Version) + +PNTTIB = POINTER(NT_TIB) + +# From http://www.nirsoft.net/kernel_struct/vista/EXCEPTION_REGISTRATION_RECORD.html +# +# typedef struct _EXCEPTION_REGISTRATION_RECORD +# { +# PEXCEPTION_REGISTRATION_RECORD Next; +# PEXCEPTION_DISPOSITION Handler; +# } EXCEPTION_REGISTRATION_RECORD, *PEXCEPTION_REGISTRATION_RECORD; +class EXCEPTION_REGISTRATION_RECORD(Structure): + pass + +EXCEPTION_DISPOSITION = DWORD +##PEXCEPTION_DISPOSITION = POINTER(EXCEPTION_DISPOSITION) +##PEXCEPTION_REGISTRATION_RECORD = POINTER(EXCEPTION_REGISTRATION_RECORD) +PEXCEPTION_DISPOSITION = PVOID +PEXCEPTION_REGISTRATION_RECORD = PVOID + +EXCEPTION_REGISTRATION_RECORD._fields_ = [ + ("Next", PEXCEPTION_REGISTRATION_RECORD), + ("Handler", PEXCEPTION_DISPOSITION), +] + +##PPEB = POINTER(PEB) +PPEB = PVOID + +# From http://www.nirsoft.net/kernel_struct/vista/GDI_TEB_BATCH.html +# +# typedef struct _GDI_TEB_BATCH +# { +# ULONG Offset; +# ULONG HDC; +# ULONG Buffer[310]; +# } GDI_TEB_BATCH, *PGDI_TEB_BATCH; +class GDI_TEB_BATCH(Structure): + _fields_ = [ + ("Offset", ULONG), + ("HDC", ULONG), + ("Buffer", ULONG * 310), +] + +# ntdll!_TEB_ACTIVE_FRAME_CONTEXT +# +0x000 Flags : Uint4B +# +0x008 FrameName : Ptr64 Char +class TEB_ACTIVE_FRAME_CONTEXT(Structure): + _fields_ = [ + ("Flags", DWORD), + ("FrameName", LPVOID), # LPCHAR +] +PTEB_ACTIVE_FRAME_CONTEXT = POINTER(TEB_ACTIVE_FRAME_CONTEXT) + +# ntdll!_TEB_ACTIVE_FRAME +# +0x000 Flags : Uint4B +# +0x008 Previous : Ptr64 _TEB_ACTIVE_FRAME +# +0x010 Context : Ptr64 _TEB_ACTIVE_FRAME_CONTEXT +class TEB_ACTIVE_FRAME(Structure): + _fields_ = [ + ("Flags", DWORD), + ("Previous", LPVOID), # PTEB_ACTIVE_FRAME + ("Context", LPVOID), # PTEB_ACTIVE_FRAME_CONTEXT +] +PTEB_ACTIVE_FRAME = POINTER(TEB_ACTIVE_FRAME) + +# SameTebFlags +DbgSafeThunkCall = 1 << 0 +DbgInDebugPrint = 1 << 1 +DbgHasFiberData = 1 << 2 +DbgSkipThreadAttach = 1 << 3 +DbgWerInShipAssertCode = 1 << 4 +DbgRanProcessInit = 1 << 5 +DbgClonedThread = 1 << 6 +DbgSuppressDebugMsg = 1 << 7 +RtlDisableUserStackWalk = 1 << 8 +RtlExceptionAttached = 1 << 9 +RtlInitialThread = 1 << 10 + +# XXX This is quite wrong :P +class _TEB_NT(Structure): + _pack_ = 4 + _fields_ = [ + ("NtTib", NT_TIB), + ("EnvironmentPointer", PVOID), + ("ClientId", CLIENT_ID), + ("ActiveRpcHandle", HANDLE), + ("ThreadLocalStoragePointer", PVOID), + ("ProcessEnvironmentBlock", PPEB), + ("LastErrorValue", ULONG), + ("CountOfOwnedCriticalSections", ULONG), + ("CsrClientThread", PVOID), + ("Win32ThreadInfo", PVOID), + ("User32Reserved", ULONG * 26), + ("UserReserved", ULONG * 5), + ("WOW32Reserved", PVOID), # ptr to wow64cpu!X86SwitchTo64BitMode + ("CurrentLocale", ULONG), + ("FpSoftwareStatusRegister", ULONG), + ("SystemReserved1", PVOID * 54), + ("Spare1", PVOID), + ("ExceptionCode", ULONG), + ("ActivationContextStackPointer", PVOID), # PACTIVATION_CONTEXT_STACK + ("SpareBytes1", ULONG * 36), + ("TxFsContext", ULONG), + ("GdiTebBatch", GDI_TEB_BATCH), + ("RealClientId", CLIENT_ID), + ("GdiCachedProcessHandle", PVOID), + ("GdiClientPID", ULONG), + ("GdiClientTID", ULONG), + ("GdiThreadLocalInfo", PVOID), + ("Win32ClientInfo", PVOID * 62), + ("glDispatchTable", PVOID * 233), + ("glReserved1", ULONG * 29), + ("glReserved2", PVOID), + ("glSectionInfo", PVOID), + ("glSection", PVOID), + ("glTable", PVOID), + ("glCurrentRC", PVOID), + ("glContext", PVOID), + ("LastStatusValue", NTSTATUS), + ("StaticUnicodeString", UNICODE_STRING), + ("StaticUnicodeBuffer", WCHAR * 261), + ("DeallocationStack", PVOID), + ("TlsSlots", PVOID * 64), + ("TlsLinks", LIST_ENTRY), + ("Vdm", PVOID), + ("ReservedForNtRpc", PVOID), + ("DbgSsReserved", PVOID * 2), + ("HardErrorDisabled", ULONG), + ("Instrumentation", PVOID * 9), + ("ActivityId", GUID), + ("SubProcessTag", PVOID), + ("EtwLocalData", PVOID), + ("EtwTraceData", PVOID), + ("WinSockData", PVOID), + ("GdiBatchCount", ULONG), + ("SpareBool0", BOOLEAN), + ("SpareBool1", BOOLEAN), + ("SpareBool2", BOOLEAN), + ("IdealProcessor", UCHAR), + ("GuaranteedStackBytes", ULONG), + ("ReservedForPerf", PVOID), + ("ReservedForOle", PVOID), + ("WaitingOnLoaderLock", ULONG), + ("StackCommit", PVOID), + ("StackCommitMax", PVOID), + ("StackReserved", PVOID), +] + +# not really, but "dt _TEB" in w2k isn't working for me :( +_TEB_2000 = _TEB_NT + +# +0x000 NtTib : _NT_TIB +# +0x01c EnvironmentPointer : Ptr32 Void +# +0x020 ClientId : _CLIENT_ID +# +0x028 ActiveRpcHandle : Ptr32 Void +# +0x02c ThreadLocalStoragePointer : Ptr32 Void +# +0x030 ProcessEnvironmentBlock : Ptr32 _PEB +# +0x034 LastErrorValue : Uint4B +# +0x038 CountOfOwnedCriticalSections : Uint4B +# +0x03c CsrClientThread : Ptr32 Void +# +0x040 Win32ThreadInfo : Ptr32 Void +# +0x044 User32Reserved : [26] Uint4B +# +0x0ac UserReserved : [5] Uint4B +# +0x0c0 WOW32Reserved : Ptr32 Void +# +0x0c4 CurrentLocale : Uint4B +# +0x0c8 FpSoftwareStatusRegister : Uint4B +# +0x0cc SystemReserved1 : [54] Ptr32 Void +# +0x1a4 ExceptionCode : Int4B +# +0x1a8 ActivationContextStack : _ACTIVATION_CONTEXT_STACK +# +0x1bc SpareBytes1 : [24] UChar +# +0x1d4 GdiTebBatch : _GDI_TEB_BATCH +# +0x6b4 RealClientId : _CLIENT_ID +# +0x6bc GdiCachedProcessHandle : Ptr32 Void +# +0x6c0 GdiClientPID : Uint4B +# +0x6c4 GdiClientTID : Uint4B +# +0x6c8 GdiThreadLocalInfo : Ptr32 Void +# +0x6cc Win32ClientInfo : [62] Uint4B +# +0x7c4 glDispatchTable : [233] Ptr32 Void +# +0xb68 glReserved1 : [29] Uint4B +# +0xbdc glReserved2 : Ptr32 Void +# +0xbe0 glSectionInfo : Ptr32 Void +# +0xbe4 glSection : Ptr32 Void +# +0xbe8 glTable : Ptr32 Void +# +0xbec glCurrentRC : Ptr32 Void +# +0xbf0 glContext : Ptr32 Void +# +0xbf4 LastStatusValue : Uint4B +# +0xbf8 StaticUnicodeString : _UNICODE_STRING +# +0xc00 StaticUnicodeBuffer : [261] Uint2B +# +0xe0c DeallocationStack : Ptr32 Void +# +0xe10 TlsSlots : [64] Ptr32 Void +# +0xf10 TlsLinks : _LIST_ENTRY +# +0xf18 Vdm : Ptr32 Void +# +0xf1c ReservedForNtRpc : Ptr32 Void +# +0xf20 DbgSsReserved : [2] Ptr32 Void +# +0xf28 HardErrorsAreDisabled : Uint4B +# +0xf2c Instrumentation : [16] Ptr32 Void +# +0xf6c WinSockData : Ptr32 Void +# +0xf70 GdiBatchCount : Uint4B +# +0xf74 InDbgPrint : UChar +# +0xf75 FreeStackOnTermination : UChar +# +0xf76 HasFiberData : UChar +# +0xf77 IdealProcessor : UChar +# +0xf78 Spare3 : Uint4B +# +0xf7c ReservedForPerf : Ptr32 Void +# +0xf80 ReservedForOle : Ptr32 Void +# +0xf84 WaitingOnLoaderLock : Uint4B +# +0xf88 Wx86Thread : _Wx86ThreadState +# +0xf94 TlsExpansionSlots : Ptr32 Ptr32 Void +# +0xf98 ImpersonationLocale : Uint4B +# +0xf9c IsImpersonating : Uint4B +# +0xfa0 NlsCache : Ptr32 Void +# +0xfa4 pShimData : Ptr32 Void +# +0xfa8 HeapVirtualAffinity : Uint4B +# +0xfac CurrentTransactionHandle : Ptr32 Void +# +0xfb0 ActiveFrame : Ptr32 _TEB_ACTIVE_FRAME +# +0xfb4 SafeThunkCall : UChar +# +0xfb5 BooleanSpare : [3] UChar +class _TEB_XP(Structure): + _pack_ = 8 + _fields_ = [ + ("NtTib", NT_TIB), + ("EnvironmentPointer", PVOID), + ("ClientId", CLIENT_ID), + ("ActiveRpcHandle", HANDLE), + ("ThreadLocalStoragePointer", PVOID), + ("ProcessEnvironmentBlock", PVOID), # PPEB + ("LastErrorValue", DWORD), + ("CountOfOwnedCriticalSections", DWORD), + ("CsrClientThread", PVOID), + ("Win32ThreadInfo", PVOID), + ("User32Reserved", DWORD * 26), + ("UserReserved", DWORD * 5), + ("WOW32Reserved", PVOID), # ptr to wow64cpu!X86SwitchTo64BitMode + ("CurrentLocale", DWORD), + ("FpSoftwareStatusRegister", DWORD), + ("SystemReserved1", PVOID * 54), + ("ExceptionCode", SDWORD), + ("ActivationContextStackPointer", PVOID), # PACTIVATION_CONTEXT_STACK + ("SpareBytes1", UCHAR * 24), + ("TxFsContext", DWORD), + ("GdiTebBatch", GDI_TEB_BATCH), + ("RealClientId", CLIENT_ID), + ("GdiCachedProcessHandle", HANDLE), + ("GdiClientPID", DWORD), + ("GdiClientTID", DWORD), + ("GdiThreadLocalInfo", PVOID), + ("Win32ClientInfo", DWORD * 62), + ("glDispatchTable", PVOID * 233), + ("glReserved1", DWORD * 29), + ("glReserved2", PVOID), + ("glSectionInfo", PVOID), + ("glSection", PVOID), + ("glTable", PVOID), + ("glCurrentRC", PVOID), + ("glContext", PVOID), + ("LastStatusValue", NTSTATUS), + ("StaticUnicodeString", UNICODE_STRING), + ("StaticUnicodeBuffer", WCHAR * 261), + ("DeallocationStack", PVOID), + ("TlsSlots", PVOID * 64), + ("TlsLinks", LIST_ENTRY), + ("Vdm", PVOID), + ("ReservedForNtRpc", PVOID), + ("DbgSsReserved", PVOID * 2), + ("HardErrorsAreDisabled", DWORD), + ("Instrumentation", PVOID * 16), + ("WinSockData", PVOID), + ("GdiBatchCount", DWORD), + ("InDbgPrint", BOOLEAN), + ("FreeStackOnTermination", BOOLEAN), + ("HasFiberData", BOOLEAN), + ("IdealProcessor", UCHAR), + ("Spare3", DWORD), + ("ReservedForPerf", PVOID), + ("ReservedForOle", PVOID), + ("WaitingOnLoaderLock", DWORD), + ("Wx86Thread", Wx86ThreadState), + ("TlsExpansionSlots", PVOID), # Ptr32 Ptr32 Void + ("ImpersonationLocale", DWORD), + ("IsImpersonating", BOOL), + ("NlsCache", PVOID), + ("pShimData", PVOID), + ("HeapVirtualAffinity", DWORD), + ("CurrentTransactionHandle", HANDLE), + ("ActiveFrame", PVOID), # PTEB_ACTIVE_FRAME + ("SafeThunkCall", BOOLEAN), + ("BooleanSpare", BOOLEAN * 3), +] + +# +0x000 NtTib : _NT_TIB +# +0x038 EnvironmentPointer : Ptr64 Void +# +0x040 ClientId : _CLIENT_ID +# +0x050 ActiveRpcHandle : Ptr64 Void +# +0x058 ThreadLocalStoragePointer : Ptr64 Void +# +0x060 ProcessEnvironmentBlock : Ptr64 _PEB +# +0x068 LastErrorValue : Uint4B +# +0x06c CountOfOwnedCriticalSections : Uint4B +# +0x070 CsrClientThread : Ptr64 Void +# +0x078 Win32ThreadInfo : Ptr64 Void +# +0x080 User32Reserved : [26] Uint4B +# +0x0e8 UserReserved : [5] Uint4B +# +0x100 WOW32Reserved : Ptr64 Void +# +0x108 CurrentLocale : Uint4B +# +0x10c FpSoftwareStatusRegister : Uint4B +# +0x110 SystemReserved1 : [54] Ptr64 Void +# +0x2c0 ExceptionCode : Int4B +# +0x2c8 ActivationContextStackPointer : Ptr64 _ACTIVATION_CONTEXT_STACK +# +0x2d0 SpareBytes1 : [28] UChar +# +0x2f0 GdiTebBatch : _GDI_TEB_BATCH +# +0x7d8 RealClientId : _CLIENT_ID +# +0x7e8 GdiCachedProcessHandle : Ptr64 Void +# +0x7f0 GdiClientPID : Uint4B +# +0x7f4 GdiClientTID : Uint4B +# +0x7f8 GdiThreadLocalInfo : Ptr64 Void +# +0x800 Win32ClientInfo : [62] Uint8B +# +0x9f0 glDispatchTable : [233] Ptr64 Void +# +0x1138 glReserved1 : [29] Uint8B +# +0x1220 glReserved2 : Ptr64 Void +# +0x1228 glSectionInfo : Ptr64 Void +# +0x1230 glSection : Ptr64 Void +# +0x1238 glTable : Ptr64 Void +# +0x1240 glCurrentRC : Ptr64 Void +# +0x1248 glContext : Ptr64 Void +# +0x1250 LastStatusValue : Uint4B +# +0x1258 StaticUnicodeString : _UNICODE_STRING +# +0x1268 StaticUnicodeBuffer : [261] Uint2B +# +0x1478 DeallocationStack : Ptr64 Void +# +0x1480 TlsSlots : [64] Ptr64 Void +# +0x1680 TlsLinks : _LIST_ENTRY +# +0x1690 Vdm : Ptr64 Void +# +0x1698 ReservedForNtRpc : Ptr64 Void +# +0x16a0 DbgSsReserved : [2] Ptr64 Void +# +0x16b0 HardErrorMode : Uint4B +# +0x16b8 Instrumentation : [14] Ptr64 Void +# +0x1728 SubProcessTag : Ptr64 Void +# +0x1730 EtwTraceData : Ptr64 Void +# +0x1738 WinSockData : Ptr64 Void +# +0x1740 GdiBatchCount : Uint4B +# +0x1744 InDbgPrint : UChar +# +0x1745 FreeStackOnTermination : UChar +# +0x1746 HasFiberData : UChar +# +0x1747 IdealProcessor : UChar +# +0x1748 GuaranteedStackBytes : Uint4B +# +0x1750 ReservedForPerf : Ptr64 Void +# +0x1758 ReservedForOle : Ptr64 Void +# +0x1760 WaitingOnLoaderLock : Uint4B +# +0x1768 SparePointer1 : Uint8B +# +0x1770 SoftPatchPtr1 : Uint8B +# +0x1778 SoftPatchPtr2 : Uint8B +# +0x1780 TlsExpansionSlots : Ptr64 Ptr64 Void +# +0x1788 DeallocationBStore : Ptr64 Void +# +0x1790 BStoreLimit : Ptr64 Void +# +0x1798 ImpersonationLocale : Uint4B +# +0x179c IsImpersonating : Uint4B +# +0x17a0 NlsCache : Ptr64 Void +# +0x17a8 pShimData : Ptr64 Void +# +0x17b0 HeapVirtualAffinity : Uint4B +# +0x17b8 CurrentTransactionHandle : Ptr64 Void +# +0x17c0 ActiveFrame : Ptr64 _TEB_ACTIVE_FRAME +# +0x17c8 FlsData : Ptr64 Void +# +0x17d0 SafeThunkCall : UChar +# +0x17d1 BooleanSpare : [3] UChar +class _TEB_XP_64(Structure): + _pack_ = 8 + _fields_ = [ + ("NtTib", NT_TIB), + ("EnvironmentPointer", PVOID), + ("ClientId", CLIENT_ID), + ("ActiveRpcHandle", PVOID), + ("ThreadLocalStoragePointer", PVOID), + ("ProcessEnvironmentBlock", PVOID), # PPEB + ("LastErrorValue", DWORD), + ("CountOfOwnedCriticalSections", DWORD), + ("CsrClientThread", PVOID), + ("Win32ThreadInfo", PVOID), + ("User32Reserved", DWORD * 26), + ("UserReserved", DWORD * 5), + ("WOW32Reserved", PVOID), # ptr to wow64cpu!X86SwitchTo64BitMode + ("CurrentLocale", DWORD), + ("FpSoftwareStatusRegister", DWORD), + ("SystemReserved1", PVOID * 54), + ("ExceptionCode", SDWORD), + ("ActivationContextStackPointer", PVOID), # PACTIVATION_CONTEXT_STACK + ("SpareBytes1", UCHAR * 28), + ("GdiTebBatch", GDI_TEB_BATCH), + ("RealClientId", CLIENT_ID), + ("GdiCachedProcessHandle", HANDLE), + ("GdiClientPID", DWORD), + ("GdiClientTID", DWORD), + ("GdiThreadLocalInfo", PVOID), + ("Win32ClientInfo", QWORD * 62), + ("glDispatchTable", PVOID * 233), + ("glReserved1", QWORD * 29), + ("glReserved2", PVOID), + ("glSectionInfo", PVOID), + ("glSection", PVOID), + ("glTable", PVOID), + ("glCurrentRC", PVOID), + ("glContext", PVOID), + ("LastStatusValue", NTSTATUS), + ("StaticUnicodeString", UNICODE_STRING), + ("StaticUnicodeBuffer", WCHAR * 261), + ("DeallocationStack", PVOID), + ("TlsSlots", PVOID * 64), + ("TlsLinks", LIST_ENTRY), + ("Vdm", PVOID), + ("ReservedForNtRpc", PVOID), + ("DbgSsReserved", PVOID * 2), + ("HardErrorMode", DWORD), + ("Instrumentation", PVOID * 14), + ("SubProcessTag", PVOID), + ("EtwTraceData", PVOID), + ("WinSockData", PVOID), + ("GdiBatchCount", DWORD), + ("InDbgPrint", BOOLEAN), + ("FreeStackOnTermination", BOOLEAN), + ("HasFiberData", BOOLEAN), + ("IdealProcessor", UCHAR), + ("GuaranteedStackBytes", DWORD), + ("ReservedForPerf", PVOID), + ("ReservedForOle", PVOID), + ("WaitingOnLoaderLock", DWORD), + ("SparePointer1", PVOID), + ("SoftPatchPtr1", PVOID), + ("SoftPatchPtr2", PVOID), + ("TlsExpansionSlots", PVOID), # Ptr64 Ptr64 Void + ("DeallocationBStore", PVOID), + ("BStoreLimit", PVOID), + ("ImpersonationLocale", DWORD), + ("IsImpersonating", BOOL), + ("NlsCache", PVOID), + ("pShimData", PVOID), + ("HeapVirtualAffinity", DWORD), + ("CurrentTransactionHandle", HANDLE), + ("ActiveFrame", PVOID), # PTEB_ACTIVE_FRAME + ("FlsData", PVOID), + ("SafeThunkCall", BOOLEAN), + ("BooleanSpare", BOOLEAN * 3), +] + +# +0x000 NtTib : _NT_TIB +# +0x01c EnvironmentPointer : Ptr32 Void +# +0x020 ClientId : _CLIENT_ID +# +0x028 ActiveRpcHandle : Ptr32 Void +# +0x02c ThreadLocalStoragePointer : Ptr32 Void +# +0x030 ProcessEnvironmentBlock : Ptr32 _PEB +# +0x034 LastErrorValue : Uint4B +# +0x038 CountOfOwnedCriticalSections : Uint4B +# +0x03c CsrClientThread : Ptr32 Void +# +0x040 Win32ThreadInfo : Ptr32 Void +# +0x044 User32Reserved : [26] Uint4B +# +0x0ac UserReserved : [5] Uint4B +# +0x0c0 WOW32Reserved : Ptr32 Void +# +0x0c4 CurrentLocale : Uint4B +# +0x0c8 FpSoftwareStatusRegister : Uint4B +# +0x0cc SystemReserved1 : [54] Ptr32 Void +# +0x1a4 ExceptionCode : Int4B +# +0x1a8 ActivationContextStackPointer : Ptr32 _ACTIVATION_CONTEXT_STACK +# +0x1ac SpareBytes1 : [40] UChar +# +0x1d4 GdiTebBatch : _GDI_TEB_BATCH +# +0x6b4 RealClientId : _CLIENT_ID +# +0x6bc GdiCachedProcessHandle : Ptr32 Void +# +0x6c0 GdiClientPID : Uint4B +# +0x6c4 GdiClientTID : Uint4B +# +0x6c8 GdiThreadLocalInfo : Ptr32 Void +# +0x6cc Win32ClientInfo : [62] Uint4B +# +0x7c4 glDispatchTable : [233] Ptr32 Void +# +0xb68 glReserved1 : [29] Uint4B +# +0xbdc glReserved2 : Ptr32 Void +# +0xbe0 glSectionInfo : Ptr32 Void +# +0xbe4 glSection : Ptr32 Void +# +0xbe8 glTable : Ptr32 Void +# +0xbec glCurrentRC : Ptr32 Void +# +0xbf0 glContext : Ptr32 Void +# +0xbf4 LastStatusValue : Uint4B +# +0xbf8 StaticUnicodeString : _UNICODE_STRING +# +0xc00 StaticUnicodeBuffer : [261] Uint2B +# +0xe0c DeallocationStack : Ptr32 Void +# +0xe10 TlsSlots : [64] Ptr32 Void +# +0xf10 TlsLinks : _LIST_ENTRY +# +0xf18 Vdm : Ptr32 Void +# +0xf1c ReservedForNtRpc : Ptr32 Void +# +0xf20 DbgSsReserved : [2] Ptr32 Void +# +0xf28 HardErrorMode : Uint4B +# +0xf2c Instrumentation : [14] Ptr32 Void +# +0xf64 SubProcessTag : Ptr32 Void +# +0xf68 EtwTraceData : Ptr32 Void +# +0xf6c WinSockData : Ptr32 Void +# +0xf70 GdiBatchCount : Uint4B +# +0xf74 InDbgPrint : UChar +# +0xf75 FreeStackOnTermination : UChar +# +0xf76 HasFiberData : UChar +# +0xf77 IdealProcessor : UChar +# +0xf78 GuaranteedStackBytes : Uint4B +# +0xf7c ReservedForPerf : Ptr32 Void +# +0xf80 ReservedForOle : Ptr32 Void +# +0xf84 WaitingOnLoaderLock : Uint4B +# +0xf88 SparePointer1 : Uint4B +# +0xf8c SoftPatchPtr1 : Uint4B +# +0xf90 SoftPatchPtr2 : Uint4B +# +0xf94 TlsExpansionSlots : Ptr32 Ptr32 Void +# +0xf98 ImpersonationLocale : Uint4B +# +0xf9c IsImpersonating : Uint4B +# +0xfa0 NlsCache : Ptr32 Void +# +0xfa4 pShimData : Ptr32 Void +# +0xfa8 HeapVirtualAffinity : Uint4B +# +0xfac CurrentTransactionHandle : Ptr32 Void +# +0xfb0 ActiveFrame : Ptr32 _TEB_ACTIVE_FRAME +# +0xfb4 FlsData : Ptr32 Void +# +0xfb8 SafeThunkCall : UChar +# +0xfb9 BooleanSpare : [3] UChar +class _TEB_2003(Structure): + _pack_ = 8 + _fields_ = [ + ("NtTib", NT_TIB), + ("EnvironmentPointer", PVOID), + ("ClientId", CLIENT_ID), + ("ActiveRpcHandle", HANDLE), + ("ThreadLocalStoragePointer", PVOID), + ("ProcessEnvironmentBlock", PVOID), # PPEB + ("LastErrorValue", DWORD), + ("CountOfOwnedCriticalSections", DWORD), + ("CsrClientThread", PVOID), + ("Win32ThreadInfo", PVOID), + ("User32Reserved", DWORD * 26), + ("UserReserved", DWORD * 5), + ("WOW32Reserved", PVOID), # ptr to wow64cpu!X86SwitchTo64BitMode + ("CurrentLocale", DWORD), + ("FpSoftwareStatusRegister", DWORD), + ("SystemReserved1", PVOID * 54), + ("ExceptionCode", SDWORD), + ("ActivationContextStackPointer", PVOID), # PACTIVATION_CONTEXT_STACK + ("SpareBytes1", UCHAR * 40), + ("GdiTebBatch", GDI_TEB_BATCH), + ("RealClientId", CLIENT_ID), + ("GdiCachedProcessHandle", HANDLE), + ("GdiClientPID", DWORD), + ("GdiClientTID", DWORD), + ("GdiThreadLocalInfo", PVOID), + ("Win32ClientInfo", DWORD * 62), + ("glDispatchTable", PVOID * 233), + ("glReserved1", DWORD * 29), + ("glReserved2", PVOID), + ("glSectionInfo", PVOID), + ("glSection", PVOID), + ("glTable", PVOID), + ("glCurrentRC", PVOID), + ("glContext", PVOID), + ("LastStatusValue", NTSTATUS), + ("StaticUnicodeString", UNICODE_STRING), + ("StaticUnicodeBuffer", WCHAR * 261), + ("DeallocationStack", PVOID), + ("TlsSlots", PVOID * 64), + ("TlsLinks", LIST_ENTRY), + ("Vdm", PVOID), + ("ReservedForNtRpc", PVOID), + ("DbgSsReserved", PVOID * 2), + ("HardErrorMode", DWORD), + ("Instrumentation", PVOID * 14), + ("SubProcessTag", PVOID), + ("EtwTraceData", PVOID), + ("WinSockData", PVOID), + ("GdiBatchCount", DWORD), + ("InDbgPrint", BOOLEAN), + ("FreeStackOnTermination", BOOLEAN), + ("HasFiberData", BOOLEAN), + ("IdealProcessor", UCHAR), + ("GuaranteedStackBytes", DWORD), + ("ReservedForPerf", PVOID), + ("ReservedForOle", PVOID), + ("WaitingOnLoaderLock", DWORD), + ("SparePointer1", PVOID), + ("SoftPatchPtr1", PVOID), + ("SoftPatchPtr2", PVOID), + ("TlsExpansionSlots", PVOID), # Ptr32 Ptr32 Void + ("ImpersonationLocale", DWORD), + ("IsImpersonating", BOOL), + ("NlsCache", PVOID), + ("pShimData", PVOID), + ("HeapVirtualAffinity", DWORD), + ("CurrentTransactionHandle", HANDLE), + ("ActiveFrame", PVOID), # PTEB_ACTIVE_FRAME + ("FlsData", PVOID), + ("SafeThunkCall", BOOLEAN), + ("BooleanSpare", BOOLEAN * 3), +] + +_TEB_2003_64 = _TEB_XP_64 +_TEB_2003_R2 = _TEB_2003 +_TEB_2003_R2_64 = _TEB_2003_64 + +# +0x000 NtTib : _NT_TIB +# +0x01c EnvironmentPointer : Ptr32 Void +# +0x020 ClientId : _CLIENT_ID +# +0x028 ActiveRpcHandle : Ptr32 Void +# +0x02c ThreadLocalStoragePointer : Ptr32 Void +# +0x030 ProcessEnvironmentBlock : Ptr32 _PEB +# +0x034 LastErrorValue : Uint4B +# +0x038 CountOfOwnedCriticalSections : Uint4B +# +0x03c CsrClientThread : Ptr32 Void +# +0x040 Win32ThreadInfo : Ptr32 Void +# +0x044 User32Reserved : [26] Uint4B +# +0x0ac UserReserved : [5] Uint4B +# +0x0c0 WOW32Reserved : Ptr32 Void +# +0x0c4 CurrentLocale : Uint4B +# +0x0c8 FpSoftwareStatusRegister : Uint4B +# +0x0cc SystemReserved1 : [54] Ptr32 Void +# +0x1a4 ExceptionCode : Int4B +# +0x1a8 ActivationContextStackPointer : Ptr32 _ACTIVATION_CONTEXT_STACK +# +0x1ac SpareBytes1 : [36] UChar +# +0x1d0 TxFsContext : Uint4B +# +0x1d4 GdiTebBatch : _GDI_TEB_BATCH +# +0x6b4 RealClientId : _CLIENT_ID +# +0x6bc GdiCachedProcessHandle : Ptr32 Void +# +0x6c0 GdiClientPID : Uint4B +# +0x6c4 GdiClientTID : Uint4B +# +0x6c8 GdiThreadLocalInfo : Ptr32 Void +# +0x6cc Win32ClientInfo : [62] Uint4B +# +0x7c4 glDispatchTable : [233] Ptr32 Void +# +0xb68 glReserved1 : [29] Uint4B +# +0xbdc glReserved2 : Ptr32 Void +# +0xbe0 glSectionInfo : Ptr32 Void +# +0xbe4 glSection : Ptr32 Void +# +0xbe8 glTable : Ptr32 Void +# +0xbec glCurrentRC : Ptr32 Void +# +0xbf0 glContext : Ptr32 Void +# +0xbf4 LastStatusValue : Uint4B +# +0xbf8 StaticUnicodeString : _UNICODE_STRING +# +0xc00 StaticUnicodeBuffer : [261] Wchar +# +0xe0c DeallocationStack : Ptr32 Void +# +0xe10 TlsSlots : [64] Ptr32 Void +# +0xf10 TlsLinks : _LIST_ENTRY +# +0xf18 Vdm : Ptr32 Void +# +0xf1c ReservedForNtRpc : Ptr32 Void +# +0xf20 DbgSsReserved : [2] Ptr32 Void +# +0xf28 HardErrorMode : Uint4B +# +0xf2c Instrumentation : [9] Ptr32 Void +# +0xf50 ActivityId : _GUID +# +0xf60 SubProcessTag : Ptr32 Void +# +0xf64 EtwLocalData : Ptr32 Void +# +0xf68 EtwTraceData : Ptr32 Void +# +0xf6c WinSockData : Ptr32 Void +# +0xf70 GdiBatchCount : Uint4B +# +0xf74 SpareBool0 : UChar +# +0xf75 SpareBool1 : UChar +# +0xf76 SpareBool2 : UChar +# +0xf77 IdealProcessor : UChar +# +0xf78 GuaranteedStackBytes : Uint4B +# +0xf7c ReservedForPerf : Ptr32 Void +# +0xf80 ReservedForOle : Ptr32 Void +# +0xf84 WaitingOnLoaderLock : Uint4B +# +0xf88 SavedPriorityState : Ptr32 Void +# +0xf8c SoftPatchPtr1 : Uint4B +# +0xf90 ThreadPoolData : Ptr32 Void +# +0xf94 TlsExpansionSlots : Ptr32 Ptr32 Void +# +0xf98 ImpersonationLocale : Uint4B +# +0xf9c IsImpersonating : Uint4B +# +0xfa0 NlsCache : Ptr32 Void +# +0xfa4 pShimData : Ptr32 Void +# +0xfa8 HeapVirtualAffinity : Uint4B +# +0xfac CurrentTransactionHandle : Ptr32 Void +# +0xfb0 ActiveFrame : Ptr32 _TEB_ACTIVE_FRAME +# +0xfb4 FlsData : Ptr32 Void +# +0xfb8 PreferredLanguages : Ptr32 Void +# +0xfbc UserPrefLanguages : Ptr32 Void +# +0xfc0 MergedPrefLanguages : Ptr32 Void +# +0xfc4 MuiImpersonation : Uint4B +# +0xfc8 CrossTebFlags : Uint2B +# +0xfc8 SpareCrossTebBits : Pos 0, 16 Bits +# +0xfca SameTebFlags : Uint2B +# +0xfca DbgSafeThunkCall : Pos 0, 1 Bit +# +0xfca DbgInDebugPrint : Pos 1, 1 Bit +# +0xfca DbgHasFiberData : Pos 2, 1 Bit +# +0xfca DbgSkipThreadAttach : Pos 3, 1 Bit +# +0xfca DbgWerInShipAssertCode : Pos 4, 1 Bit +# +0xfca DbgRanProcessInit : Pos 5, 1 Bit +# +0xfca DbgClonedThread : Pos 6, 1 Bit +# +0xfca DbgSuppressDebugMsg : Pos 7, 1 Bit +# +0xfca RtlDisableUserStackWalk : Pos 8, 1 Bit +# +0xfca RtlExceptionAttached : Pos 9, 1 Bit +# +0xfca SpareSameTebBits : Pos 10, 6 Bits +# +0xfcc TxnScopeEnterCallback : Ptr32 Void +# +0xfd0 TxnScopeExitCallback : Ptr32 Void +# +0xfd4 TxnScopeContext : Ptr32 Void +# +0xfd8 LockCount : Uint4B +# +0xfdc ProcessRundown : Uint4B +# +0xfe0 LastSwitchTime : Uint8B +# +0xfe8 TotalSwitchOutTime : Uint8B +# +0xff0 WaitReasonBitMap : _LARGE_INTEGER +class _TEB_2008(Structure): + _pack_ = 8 + _fields_ = [ + ("NtTib", NT_TIB), + ("EnvironmentPointer", PVOID), + ("ClientId", CLIENT_ID), + ("ActiveRpcHandle", HANDLE), + ("ThreadLocalStoragePointer", PVOID), + ("ProcessEnvironmentBlock", PVOID), # PPEB + ("LastErrorValue", DWORD), + ("CountOfOwnedCriticalSections", DWORD), + ("CsrClientThread", PVOID), + ("Win32ThreadInfo", PVOID), + ("User32Reserved", DWORD * 26), + ("UserReserved", DWORD * 5), + ("WOW32Reserved", PVOID), # ptr to wow64cpu!X86SwitchTo64BitMode + ("CurrentLocale", DWORD), + ("FpSoftwareStatusRegister", DWORD), + ("SystemReserved1", PVOID * 54), + ("ExceptionCode", SDWORD), + ("ActivationContextStackPointer", PVOID), # PACTIVATION_CONTEXT_STACK + ("SpareBytes1", UCHAR * 36), + ("TxFsContext", DWORD), + ("GdiTebBatch", GDI_TEB_BATCH), + ("RealClientId", CLIENT_ID), + ("GdiCachedProcessHandle", HANDLE), + ("GdiClientPID", DWORD), + ("GdiClientTID", DWORD), + ("GdiThreadLocalInfo", PVOID), + ("Win32ClientInfo", DWORD * 62), + ("glDispatchTable", PVOID * 233), + ("glReserved1", DWORD * 29), + ("glReserved2", PVOID), + ("glSectionInfo", PVOID), + ("glSection", PVOID), + ("glTable", PVOID), + ("glCurrentRC", PVOID), + ("glContext", PVOID), + ("LastStatusValue", NTSTATUS), + ("StaticUnicodeString", UNICODE_STRING), + ("StaticUnicodeBuffer", WCHAR * 261), + ("DeallocationStack", PVOID), + ("TlsSlots", PVOID * 64), + ("TlsLinks", LIST_ENTRY), + ("Vdm", PVOID), + ("ReservedForNtRpc", PVOID), + ("DbgSsReserved", PVOID * 2), + ("HardErrorMode", DWORD), + ("Instrumentation", PVOID * 9), + ("ActivityId", GUID), + ("SubProcessTag", PVOID), + ("EtwLocalData", PVOID), + ("EtwTraceData", PVOID), + ("WinSockData", PVOID), + ("GdiBatchCount", DWORD), + ("SpareBool0", BOOLEAN), + ("SpareBool1", BOOLEAN), + ("SpareBool2", BOOLEAN), + ("IdealProcessor", UCHAR), + ("GuaranteedStackBytes", DWORD), + ("ReservedForPerf", PVOID), + ("ReservedForOle", PVOID), + ("WaitingOnLoaderLock", DWORD), + ("SavedPriorityState", PVOID), + ("SoftPatchPtr1", PVOID), + ("ThreadPoolData", PVOID), + ("TlsExpansionSlots", PVOID), # Ptr32 Ptr32 Void + ("ImpersonationLocale", DWORD), + ("IsImpersonating", BOOL), + ("NlsCache", PVOID), + ("pShimData", PVOID), + ("HeapVirtualAffinity", DWORD), + ("CurrentTransactionHandle", HANDLE), + ("ActiveFrame", PVOID), # PTEB_ACTIVE_FRAME + ("FlsData", PVOID), + ("PreferredLanguages", PVOID), + ("UserPrefLanguages", PVOID), + ("MergedPrefLanguages", PVOID), + ("MuiImpersonation", BOOL), + ("CrossTebFlags", WORD), + ("SameTebFlags", WORD), + ("TxnScopeEnterCallback", PVOID), + ("TxnScopeExitCallback", PVOID), + ("TxnScopeContext", PVOID), + ("LockCount", DWORD), + ("ProcessRundown", DWORD), + ("LastSwitchTime", QWORD), + ("TotalSwitchOutTime", QWORD), + ("WaitReasonBitMap", LONGLONG), # LARGE_INTEGER +] + +# +0x000 NtTib : _NT_TIB +# +0x038 EnvironmentPointer : Ptr64 Void +# +0x040 ClientId : _CLIENT_ID +# +0x050 ActiveRpcHandle : Ptr64 Void +# +0x058 ThreadLocalStoragePointer : Ptr64 Void +# +0x060 ProcessEnvironmentBlock : Ptr64 _PEB +# +0x068 LastErrorValue : Uint4B +# +0x06c CountOfOwnedCriticalSections : Uint4B +# +0x070 CsrClientThread : Ptr64 Void +# +0x078 Win32ThreadInfo : Ptr64 Void +# +0x080 User32Reserved : [26] Uint4B +# +0x0e8 UserReserved : [5] Uint4B +# +0x100 WOW32Reserved : Ptr64 Void +# +0x108 CurrentLocale : Uint4B +# +0x10c FpSoftwareStatusRegister : Uint4B +# +0x110 SystemReserved1 : [54] Ptr64 Void +# +0x2c0 ExceptionCode : Int4B +# +0x2c8 ActivationContextStackPointer : Ptr64 _ACTIVATION_CONTEXT_STACK +# +0x2d0 SpareBytes1 : [24] UChar +# +0x2e8 TxFsContext : Uint4B +# +0x2f0 GdiTebBatch : _GDI_TEB_BATCH +# +0x7d8 RealClientId : _CLIENT_ID +# +0x7e8 GdiCachedProcessHandle : Ptr64 Void +# +0x7f0 GdiClientPID : Uint4B +# +0x7f4 GdiClientTID : Uint4B +# +0x7f8 GdiThreadLocalInfo : Ptr64 Void +# +0x800 Win32ClientInfo : [62] Uint8B +# +0x9f0 glDispatchTable : [233] Ptr64 Void +# +0x1138 glReserved1 : [29] Uint8B +# +0x1220 glReserved2 : Ptr64 Void +# +0x1228 glSectionInfo : Ptr64 Void +# +0x1230 glSection : Ptr64 Void +# +0x1238 glTable : Ptr64 Void +# +0x1240 glCurrentRC : Ptr64 Void +# +0x1248 glContext : Ptr64 Void +# +0x1250 LastStatusValue : Uint4B +# +0x1258 StaticUnicodeString : _UNICODE_STRING +# +0x1268 StaticUnicodeBuffer : [261] Wchar +# +0x1478 DeallocationStack : Ptr64 Void +# +0x1480 TlsSlots : [64] Ptr64 Void +# +0x1680 TlsLinks : _LIST_ENTRY +# +0x1690 Vdm : Ptr64 Void +# +0x1698 ReservedForNtRpc : Ptr64 Void +# +0x16a0 DbgSsReserved : [2] Ptr64 Void +# +0x16b0 HardErrorMode : Uint4B +# +0x16b8 Instrumentation : [11] Ptr64 Void +# +0x1710 ActivityId : _GUID +# +0x1720 SubProcessTag : Ptr64 Void +# +0x1728 EtwLocalData : Ptr64 Void +# +0x1730 EtwTraceData : Ptr64 Void +# +0x1738 WinSockData : Ptr64 Void +# +0x1740 GdiBatchCount : Uint4B +# +0x1744 SpareBool0 : UChar +# +0x1745 SpareBool1 : UChar +# +0x1746 SpareBool2 : UChar +# +0x1747 IdealProcessor : UChar +# +0x1748 GuaranteedStackBytes : Uint4B +# +0x1750 ReservedForPerf : Ptr64 Void +# +0x1758 ReservedForOle : Ptr64 Void +# +0x1760 WaitingOnLoaderLock : Uint4B +# +0x1768 SavedPriorityState : Ptr64 Void +# +0x1770 SoftPatchPtr1 : Uint8B +# +0x1778 ThreadPoolData : Ptr64 Void +# +0x1780 TlsExpansionSlots : Ptr64 Ptr64 Void +# +0x1788 DeallocationBStore : Ptr64 Void +# +0x1790 BStoreLimit : Ptr64 Void +# +0x1798 ImpersonationLocale : Uint4B +# +0x179c IsImpersonating : Uint4B +# +0x17a0 NlsCache : Ptr64 Void +# +0x17a8 pShimData : Ptr64 Void +# +0x17b0 HeapVirtualAffinity : Uint4B +# +0x17b8 CurrentTransactionHandle : Ptr64 Void +# +0x17c0 ActiveFrame : Ptr64 _TEB_ACTIVE_FRAME +# +0x17c8 FlsData : Ptr64 Void +# +0x17d0 PreferredLanguages : Ptr64 Void +# +0x17d8 UserPrefLanguages : Ptr64 Void +# +0x17e0 MergedPrefLanguages : Ptr64 Void +# +0x17e8 MuiImpersonation : Uint4B +# +0x17ec CrossTebFlags : Uint2B +# +0x17ec SpareCrossTebBits : Pos 0, 16 Bits +# +0x17ee SameTebFlags : Uint2B +# +0x17ee DbgSafeThunkCall : Pos 0, 1 Bit +# +0x17ee DbgInDebugPrint : Pos 1, 1 Bit +# +0x17ee DbgHasFiberData : Pos 2, 1 Bit +# +0x17ee DbgSkipThreadAttach : Pos 3, 1 Bit +# +0x17ee DbgWerInShipAssertCode : Pos 4, 1 Bit +# +0x17ee DbgRanProcessInit : Pos 5, 1 Bit +# +0x17ee DbgClonedThread : Pos 6, 1 Bit +# +0x17ee DbgSuppressDebugMsg : Pos 7, 1 Bit +# +0x17ee RtlDisableUserStackWalk : Pos 8, 1 Bit +# +0x17ee RtlExceptionAttached : Pos 9, 1 Bit +# +0x17ee SpareSameTebBits : Pos 10, 6 Bits +# +0x17f0 TxnScopeEnterCallback : Ptr64 Void +# +0x17f8 TxnScopeExitCallback : Ptr64 Void +# +0x1800 TxnScopeContext : Ptr64 Void +# +0x1808 LockCount : Uint4B +# +0x180c ProcessRundown : Uint4B +# +0x1810 LastSwitchTime : Uint8B +# +0x1818 TotalSwitchOutTime : Uint8B +# +0x1820 WaitReasonBitMap : _LARGE_INTEGER +class _TEB_2008_64(Structure): + _pack_ = 8 + _fields_ = [ + ("NtTib", NT_TIB), + ("EnvironmentPointer", PVOID), + ("ClientId", CLIENT_ID), + ("ActiveRpcHandle", HANDLE), + ("ThreadLocalStoragePointer", PVOID), + ("ProcessEnvironmentBlock", PVOID), # PPEB + ("LastErrorValue", DWORD), + ("CountOfOwnedCriticalSections", DWORD), + ("CsrClientThread", PVOID), + ("Win32ThreadInfo", PVOID), + ("User32Reserved", DWORD * 26), + ("UserReserved", DWORD * 5), + ("WOW32Reserved", PVOID), # ptr to wow64cpu!X86SwitchTo64BitMode + ("CurrentLocale", DWORD), + ("FpSoftwareStatusRegister", DWORD), + ("SystemReserved1", PVOID * 54), + ("ExceptionCode", SDWORD), + ("ActivationContextStackPointer", PVOID), # PACTIVATION_CONTEXT_STACK + ("SpareBytes1", UCHAR * 24), + ("TxFsContext", DWORD), + ("GdiTebBatch", GDI_TEB_BATCH), + ("RealClientId", CLIENT_ID), + ("GdiCachedProcessHandle", HANDLE), + ("GdiClientPID", DWORD), + ("GdiClientTID", DWORD), + ("GdiThreadLocalInfo", PVOID), + ("Win32ClientInfo", QWORD * 62), + ("glDispatchTable", PVOID * 233), + ("glReserved1", QWORD * 29), + ("glReserved2", PVOID), + ("glSectionInfo", PVOID), + ("glSection", PVOID), + ("glTable", PVOID), + ("glCurrentRC", PVOID), + ("glContext", PVOID), + ("LastStatusValue", NTSTATUS), + ("StaticUnicodeString", UNICODE_STRING), + ("StaticUnicodeBuffer", WCHAR * 261), + ("DeallocationStack", PVOID), + ("TlsSlots", PVOID * 64), + ("TlsLinks", LIST_ENTRY), + ("Vdm", PVOID), + ("ReservedForNtRpc", PVOID), + ("DbgSsReserved", PVOID * 2), + ("HardErrorMode", DWORD), + ("Instrumentation", PVOID * 11), + ("ActivityId", GUID), + ("SubProcessTag", PVOID), + ("EtwLocalData", PVOID), + ("EtwTraceData", PVOID), + ("WinSockData", PVOID), + ("GdiBatchCount", DWORD), + ("SpareBool0", BOOLEAN), + ("SpareBool1", BOOLEAN), + ("SpareBool2", BOOLEAN), + ("IdealProcessor", UCHAR), + ("GuaranteedStackBytes", DWORD), + ("ReservedForPerf", PVOID), + ("ReservedForOle", PVOID), + ("WaitingOnLoaderLock", DWORD), + ("SavedPriorityState", PVOID), + ("SoftPatchPtr1", PVOID), + ("ThreadPoolData", PVOID), + ("TlsExpansionSlots", PVOID), # Ptr64 Ptr64 Void + ("DeallocationBStore", PVOID), + ("BStoreLimit", PVOID), + ("ImpersonationLocale", DWORD), + ("IsImpersonating", BOOL), + ("NlsCache", PVOID), + ("pShimData", PVOID), + ("HeapVirtualAffinity", DWORD), + ("CurrentTransactionHandle", HANDLE), + ("ActiveFrame", PVOID), # PTEB_ACTIVE_FRAME + ("FlsData", PVOID), + ("PreferredLanguages", PVOID), + ("UserPrefLanguages", PVOID), + ("MergedPrefLanguages", PVOID), + ("MuiImpersonation", BOOL), + ("CrossTebFlags", WORD), + ("SameTebFlags", WORD), + ("TxnScopeEnterCallback", PVOID), + ("TxnScopeExitCallback", PVOID), + ("TxnScopeContext", PVOID), + ("LockCount", DWORD), + ("ProcessRundown", DWORD), + ("LastSwitchTime", QWORD), + ("TotalSwitchOutTime", QWORD), + ("WaitReasonBitMap", LONGLONG), # LARGE_INTEGER +] + +# +0x000 NtTib : _NT_TIB +# +0x01c EnvironmentPointer : Ptr32 Void +# +0x020 ClientId : _CLIENT_ID +# +0x028 ActiveRpcHandle : Ptr32 Void +# +0x02c ThreadLocalStoragePointer : Ptr32 Void +# +0x030 ProcessEnvironmentBlock : Ptr32 _PEB +# +0x034 LastErrorValue : Uint4B +# +0x038 CountOfOwnedCriticalSections : Uint4B +# +0x03c CsrClientThread : Ptr32 Void +# +0x040 Win32ThreadInfo : Ptr32 Void +# +0x044 User32Reserved : [26] Uint4B +# +0x0ac UserReserved : [5] Uint4B +# +0x0c0 WOW32Reserved : Ptr32 Void +# +0x0c4 CurrentLocale : Uint4B +# +0x0c8 FpSoftwareStatusRegister : Uint4B +# +0x0cc SystemReserved1 : [54] Ptr32 Void +# +0x1a4 ExceptionCode : Int4B +# +0x1a8 ActivationContextStackPointer : Ptr32 _ACTIVATION_CONTEXT_STACK +# +0x1ac SpareBytes : [36] UChar +# +0x1d0 TxFsContext : Uint4B +# +0x1d4 GdiTebBatch : _GDI_TEB_BATCH +# +0x6b4 RealClientId : _CLIENT_ID +# +0x6bc GdiCachedProcessHandle : Ptr32 Void +# +0x6c0 GdiClientPID : Uint4B +# +0x6c4 GdiClientTID : Uint4B +# +0x6c8 GdiThreadLocalInfo : Ptr32 Void +# +0x6cc Win32ClientInfo : [62] Uint4B +# +0x7c4 glDispatchTable : [233] Ptr32 Void +# +0xb68 glReserved1 : [29] Uint4B +# +0xbdc glReserved2 : Ptr32 Void +# +0xbe0 glSectionInfo : Ptr32 Void +# +0xbe4 glSection : Ptr32 Void +# +0xbe8 glTable : Ptr32 Void +# +0xbec glCurrentRC : Ptr32 Void +# +0xbf0 glContext : Ptr32 Void +# +0xbf4 LastStatusValue : Uint4B +# +0xbf8 StaticUnicodeString : _UNICODE_STRING +# +0xc00 StaticUnicodeBuffer : [261] Wchar +# +0xe0c DeallocationStack : Ptr32 Void +# +0xe10 TlsSlots : [64] Ptr32 Void +# +0xf10 TlsLinks : _LIST_ENTRY +# +0xf18 Vdm : Ptr32 Void +# +0xf1c ReservedForNtRpc : Ptr32 Void +# +0xf20 DbgSsReserved : [2] Ptr32 Void +# +0xf28 HardErrorMode : Uint4B +# +0xf2c Instrumentation : [9] Ptr32 Void +# +0xf50 ActivityId : _GUID +# +0xf60 SubProcessTag : Ptr32 Void +# +0xf64 EtwLocalData : Ptr32 Void +# +0xf68 EtwTraceData : Ptr32 Void +# +0xf6c WinSockData : Ptr32 Void +# +0xf70 GdiBatchCount : Uint4B +# +0xf74 CurrentIdealProcessor : _PROCESSOR_NUMBER +# +0xf74 IdealProcessorValue : Uint4B +# +0xf74 ReservedPad0 : UChar +# +0xf75 ReservedPad1 : UChar +# +0xf76 ReservedPad2 : UChar +# +0xf77 IdealProcessor : UChar +# +0xf78 GuaranteedStackBytes : Uint4B +# +0xf7c ReservedForPerf : Ptr32 Void +# +0xf80 ReservedForOle : Ptr32 Void +# +0xf84 WaitingOnLoaderLock : Uint4B +# +0xf88 SavedPriorityState : Ptr32 Void +# +0xf8c SoftPatchPtr1 : Uint4B +# +0xf90 ThreadPoolData : Ptr32 Void +# +0xf94 TlsExpansionSlots : Ptr32 Ptr32 Void +# +0xf98 MuiGeneration : Uint4B +# +0xf9c IsImpersonating : Uint4B +# +0xfa0 NlsCache : Ptr32 Void +# +0xfa4 pShimData : Ptr32 Void +# +0xfa8 HeapVirtualAffinity : Uint4B +# +0xfac CurrentTransactionHandle : Ptr32 Void +# +0xfb0 ActiveFrame : Ptr32 _TEB_ACTIVE_FRAME +# +0xfb4 FlsData : Ptr32 Void +# +0xfb8 PreferredLanguages : Ptr32 Void +# +0xfbc UserPrefLanguages : Ptr32 Void +# +0xfc0 MergedPrefLanguages : Ptr32 Void +# +0xfc4 MuiImpersonation : Uint4B +# +0xfc8 CrossTebFlags : Uint2B +# +0xfc8 SpareCrossTebBits : Pos 0, 16 Bits +# +0xfca SameTebFlags : Uint2B +# +0xfca SafeThunkCall : Pos 0, 1 Bit +# +0xfca InDebugPrint : Pos 1, 1 Bit +# +0xfca HasFiberData : Pos 2, 1 Bit +# +0xfca SkipThreadAttach : Pos 3, 1 Bit +# +0xfca WerInShipAssertCode : Pos 4, 1 Bit +# +0xfca RanProcessInit : Pos 5, 1 Bit +# +0xfca ClonedThread : Pos 6, 1 Bit +# +0xfca SuppressDebugMsg : Pos 7, 1 Bit +# +0xfca DisableUserStackWalk : Pos 8, 1 Bit +# +0xfca RtlExceptionAttached : Pos 9, 1 Bit +# +0xfca InitialThread : Pos 10, 1 Bit +# +0xfca SpareSameTebBits : Pos 11, 5 Bits +# +0xfcc TxnScopeEnterCallback : Ptr32 Void +# +0xfd0 TxnScopeExitCallback : Ptr32 Void +# +0xfd4 TxnScopeContext : Ptr32 Void +# +0xfd8 LockCount : Uint4B +# +0xfdc SpareUlong0 : Uint4B +# +0xfe0 ResourceRetValue : Ptr32 Void +class _TEB_2008_R2(Structure): + _pack_ = 8 + _fields_ = [ + ("NtTib", NT_TIB), + ("EnvironmentPointer", PVOID), + ("ClientId", CLIENT_ID), + ("ActiveRpcHandle", HANDLE), + ("ThreadLocalStoragePointer", PVOID), + ("ProcessEnvironmentBlock", PVOID), # PPEB + ("LastErrorValue", DWORD), + ("CountOfOwnedCriticalSections", DWORD), + ("CsrClientThread", PVOID), + ("Win32ThreadInfo", PVOID), + ("User32Reserved", DWORD * 26), + ("UserReserved", DWORD * 5), + ("WOW32Reserved", PVOID), # ptr to wow64cpu!X86SwitchTo64BitMode + ("CurrentLocale", DWORD), + ("FpSoftwareStatusRegister", DWORD), + ("SystemReserved1", PVOID * 54), + ("ExceptionCode", SDWORD), + ("ActivationContextStackPointer", PVOID), # PACTIVATION_CONTEXT_STACK + ("SpareBytes", UCHAR * 36), + ("TxFsContext", DWORD), + ("GdiTebBatch", GDI_TEB_BATCH), + ("RealClientId", CLIENT_ID), + ("GdiCachedProcessHandle", HANDLE), + ("GdiClientPID", DWORD), + ("GdiClientTID", DWORD), + ("GdiThreadLocalInfo", PVOID), + ("Win32ClientInfo", DWORD * 62), + ("glDispatchTable", PVOID * 233), + ("glReserved1", DWORD * 29), + ("glReserved2", PVOID), + ("glSectionInfo", PVOID), + ("glSection", PVOID), + ("glTable", PVOID), + ("glCurrentRC", PVOID), + ("glContext", PVOID), + ("LastStatusValue", NTSTATUS), + ("StaticUnicodeString", UNICODE_STRING), + ("StaticUnicodeBuffer", WCHAR * 261), + ("DeallocationStack", PVOID), + ("TlsSlots", PVOID * 64), + ("TlsLinks", LIST_ENTRY), + ("Vdm", PVOID), + ("ReservedForNtRpc", PVOID), + ("DbgSsReserved", PVOID * 2), + ("HardErrorMode", DWORD), + ("Instrumentation", PVOID * 9), + ("ActivityId", GUID), + ("SubProcessTag", PVOID), + ("EtwLocalData", PVOID), + ("EtwTraceData", PVOID), + ("WinSockData", PVOID), + ("GdiBatchCount", DWORD), + ("CurrentIdealProcessor", PROCESSOR_NUMBER), + ("IdealProcessorValue", DWORD), + ("ReservedPad0", UCHAR), + ("ReservedPad1", UCHAR), + ("ReservedPad2", UCHAR), + ("IdealProcessor", UCHAR), + ("GuaranteedStackBytes", DWORD), + ("ReservedForPerf", PVOID), + ("ReservedForOle", PVOID), + ("WaitingOnLoaderLock", DWORD), + ("SavedPriorityState", PVOID), + ("SoftPatchPtr1", PVOID), + ("ThreadPoolData", PVOID), + ("TlsExpansionSlots", PVOID), # Ptr32 Ptr32 Void + ("MuiGeneration", DWORD), + ("IsImpersonating", BOOL), + ("NlsCache", PVOID), + ("pShimData", PVOID), + ("HeapVirtualAffinity", DWORD), + ("CurrentTransactionHandle", HANDLE), + ("ActiveFrame", PVOID), # PTEB_ACTIVE_FRAME + ("FlsData", PVOID), + ("PreferredLanguages", PVOID), + ("UserPrefLanguages", PVOID), + ("MergedPrefLanguages", PVOID), + ("MuiImpersonation", BOOL), + ("CrossTebFlags", WORD), + ("SameTebFlags", WORD), + ("TxnScopeEnterCallback", PVOID), + ("TxnScopeExitCallback", PVOID), + ("TxnScopeContext", PVOID), + ("LockCount", DWORD), + ("SpareUlong0", ULONG), + ("ResourceRetValue", PVOID), +] + +# +0x000 NtTib : _NT_TIB +# +0x038 EnvironmentPointer : Ptr64 Void +# +0x040 ClientId : _CLIENT_ID +# +0x050 ActiveRpcHandle : Ptr64 Void +# +0x058 ThreadLocalStoragePointer : Ptr64 Void +# +0x060 ProcessEnvironmentBlock : Ptr64 _PEB +# +0x068 LastErrorValue : Uint4B +# +0x06c CountOfOwnedCriticalSections : Uint4B +# +0x070 CsrClientThread : Ptr64 Void +# +0x078 Win32ThreadInfo : Ptr64 Void +# +0x080 User32Reserved : [26] Uint4B +# +0x0e8 UserReserved : [5] Uint4B +# +0x100 WOW32Reserved : Ptr64 Void +# +0x108 CurrentLocale : Uint4B +# +0x10c FpSoftwareStatusRegister : Uint4B +# +0x110 SystemReserved1 : [54] Ptr64 Void +# +0x2c0 ExceptionCode : Int4B +# +0x2c8 ActivationContextStackPointer : Ptr64 _ACTIVATION_CONTEXT_STACK +# +0x2d0 SpareBytes : [24] UChar +# +0x2e8 TxFsContext : Uint4B +# +0x2f0 GdiTebBatch : _GDI_TEB_BATCH +# +0x7d8 RealClientId : _CLIENT_ID +# +0x7e8 GdiCachedProcessHandle : Ptr64 Void +# +0x7f0 GdiClientPID : Uint4B +# +0x7f4 GdiClientTID : Uint4B +# +0x7f8 GdiThreadLocalInfo : Ptr64 Void +# +0x800 Win32ClientInfo : [62] Uint8B +# +0x9f0 glDispatchTable : [233] Ptr64 Void +# +0x1138 glReserved1 : [29] Uint8B +# +0x1220 glReserved2 : Ptr64 Void +# +0x1228 glSectionInfo : Ptr64 Void +# +0x1230 glSection : Ptr64 Void +# +0x1238 glTable : Ptr64 Void +# +0x1240 glCurrentRC : Ptr64 Void +# +0x1248 glContext : Ptr64 Void +# +0x1250 LastStatusValue : Uint4B +# +0x1258 StaticUnicodeString : _UNICODE_STRING +# +0x1268 StaticUnicodeBuffer : [261] Wchar +# +0x1478 DeallocationStack : Ptr64 Void +# +0x1480 TlsSlots : [64] Ptr64 Void +# +0x1680 TlsLinks : _LIST_ENTRY +# +0x1690 Vdm : Ptr64 Void +# +0x1698 ReservedForNtRpc : Ptr64 Void +# +0x16a0 DbgSsReserved : [2] Ptr64 Void +# +0x16b0 HardErrorMode : Uint4B +# +0x16b8 Instrumentation : [11] Ptr64 Void +# +0x1710 ActivityId : _GUID +# +0x1720 SubProcessTag : Ptr64 Void +# +0x1728 EtwLocalData : Ptr64 Void +# +0x1730 EtwTraceData : Ptr64 Void +# +0x1738 WinSockData : Ptr64 Void +# +0x1740 GdiBatchCount : Uint4B +# +0x1744 CurrentIdealProcessor : _PROCESSOR_NUMBER +# +0x1744 IdealProcessorValue : Uint4B +# +0x1744 ReservedPad0 : UChar +# +0x1745 ReservedPad1 : UChar +# +0x1746 ReservedPad2 : UChar +# +0x1747 IdealProcessor : UChar +# +0x1748 GuaranteedStackBytes : Uint4B +# +0x1750 ReservedForPerf : Ptr64 Void +# +0x1758 ReservedForOle : Ptr64 Void +# +0x1760 WaitingOnLoaderLock : Uint4B +# +0x1768 SavedPriorityState : Ptr64 Void +# +0x1770 SoftPatchPtr1 : Uint8B +# +0x1778 ThreadPoolData : Ptr64 Void +# +0x1780 TlsExpansionSlots : Ptr64 Ptr64 Void +# +0x1788 DeallocationBStore : Ptr64 Void +# +0x1790 BStoreLimit : Ptr64 Void +# +0x1798 MuiGeneration : Uint4B +# +0x179c IsImpersonating : Uint4B +# +0x17a0 NlsCache : Ptr64 Void +# +0x17a8 pShimData : Ptr64 Void +# +0x17b0 HeapVirtualAffinity : Uint4B +# +0x17b8 CurrentTransactionHandle : Ptr64 Void +# +0x17c0 ActiveFrame : Ptr64 _TEB_ACTIVE_FRAME +# +0x17c8 FlsData : Ptr64 Void +# +0x17d0 PreferredLanguages : Ptr64 Void +# +0x17d8 UserPrefLanguages : Ptr64 Void +# +0x17e0 MergedPrefLanguages : Ptr64 Void +# +0x17e8 MuiImpersonation : Uint4B +# +0x17ec CrossTebFlags : Uint2B +# +0x17ec SpareCrossTebBits : Pos 0, 16 Bits +# +0x17ee SameTebFlags : Uint2B +# +0x17ee SafeThunkCall : Pos 0, 1 Bit +# +0x17ee InDebugPrint : Pos 1, 1 Bit +# +0x17ee HasFiberData : Pos 2, 1 Bit +# +0x17ee SkipThreadAttach : Pos 3, 1 Bit +# +0x17ee WerInShipAssertCode : Pos 4, 1 Bit +# +0x17ee RanProcessInit : Pos 5, 1 Bit +# +0x17ee ClonedThread : Pos 6, 1 Bit +# +0x17ee SuppressDebugMsg : Pos 7, 1 Bit +# +0x17ee DisableUserStackWalk : Pos 8, 1 Bit +# +0x17ee RtlExceptionAttached : Pos 9, 1 Bit +# +0x17ee InitialThread : Pos 10, 1 Bit +# +0x17ee SpareSameTebBits : Pos 11, 5 Bits +# +0x17f0 TxnScopeEnterCallback : Ptr64 Void +# +0x17f8 TxnScopeExitCallback : Ptr64 Void +# +0x1800 TxnScopeContext : Ptr64 Void +# +0x1808 LockCount : Uint4B +# +0x180c SpareUlong0 : Uint4B +# +0x1810 ResourceRetValue : Ptr64 Void +class _TEB_2008_R2_64(Structure): + _pack_ = 8 + _fields_ = [ + ("NtTib", NT_TIB), + ("EnvironmentPointer", PVOID), + ("ClientId", CLIENT_ID), + ("ActiveRpcHandle", HANDLE), + ("ThreadLocalStoragePointer", PVOID), + ("ProcessEnvironmentBlock", PVOID), # PPEB + ("LastErrorValue", DWORD), + ("CountOfOwnedCriticalSections", DWORD), + ("CsrClientThread", PVOID), + ("Win32ThreadInfo", PVOID), + ("User32Reserved", DWORD * 26), + ("UserReserved", DWORD * 5), + ("WOW32Reserved", PVOID), # ptr to wow64cpu!X86SwitchTo64BitMode + ("CurrentLocale", DWORD), + ("FpSoftwareStatusRegister", DWORD), + ("SystemReserved1", PVOID * 54), + ("ExceptionCode", SDWORD), + ("ActivationContextStackPointer", PVOID), # PACTIVATION_CONTEXT_STACK + ("SpareBytes", UCHAR * 24), + ("TxFsContext", DWORD), + ("GdiTebBatch", GDI_TEB_BATCH), + ("RealClientId", CLIENT_ID), + ("GdiCachedProcessHandle", HANDLE), + ("GdiClientPID", DWORD), + ("GdiClientTID", DWORD), + ("GdiThreadLocalInfo", PVOID), + ("Win32ClientInfo", DWORD * 62), + ("glDispatchTable", PVOID * 233), + ("glReserved1", QWORD * 29), + ("glReserved2", PVOID), + ("glSectionInfo", PVOID), + ("glSection", PVOID), + ("glTable", PVOID), + ("glCurrentRC", PVOID), + ("glContext", PVOID), + ("LastStatusValue", NTSTATUS), + ("StaticUnicodeString", UNICODE_STRING), + ("StaticUnicodeBuffer", WCHAR * 261), + ("DeallocationStack", PVOID), + ("TlsSlots", PVOID * 64), + ("TlsLinks", LIST_ENTRY), + ("Vdm", PVOID), + ("ReservedForNtRpc", PVOID), + ("DbgSsReserved", PVOID * 2), + ("HardErrorMode", DWORD), + ("Instrumentation", PVOID * 11), + ("ActivityId", GUID), + ("SubProcessTag", PVOID), + ("EtwLocalData", PVOID), + ("EtwTraceData", PVOID), + ("WinSockData", PVOID), + ("GdiBatchCount", DWORD), + ("CurrentIdealProcessor", PROCESSOR_NUMBER), + ("IdealProcessorValue", DWORD), + ("ReservedPad0", UCHAR), + ("ReservedPad1", UCHAR), + ("ReservedPad2", UCHAR), + ("IdealProcessor", UCHAR), + ("GuaranteedStackBytes", DWORD), + ("ReservedForPerf", PVOID), + ("ReservedForOle", PVOID), + ("WaitingOnLoaderLock", DWORD), + ("SavedPriorityState", PVOID), + ("SoftPatchPtr1", PVOID), + ("ThreadPoolData", PVOID), + ("TlsExpansionSlots", PVOID), # Ptr64 Ptr64 Void + ("DeallocationBStore", PVOID), + ("BStoreLimit", PVOID), + ("MuiGeneration", DWORD), + ("IsImpersonating", BOOL), + ("NlsCache", PVOID), + ("pShimData", PVOID), + ("HeapVirtualAffinity", DWORD), + ("CurrentTransactionHandle", HANDLE), + ("ActiveFrame", PVOID), # PTEB_ACTIVE_FRAME + ("FlsData", PVOID), + ("PreferredLanguages", PVOID), + ("UserPrefLanguages", PVOID), + ("MergedPrefLanguages", PVOID), + ("MuiImpersonation", BOOL), + ("CrossTebFlags", WORD), + ("SameTebFlags", WORD), + ("TxnScopeEnterCallback", PVOID), + ("TxnScopeExitCallback", PVOID), + ("TxnScopeContext", PVOID), + ("LockCount", DWORD), + ("SpareUlong0", ULONG), + ("ResourceRetValue", PVOID), +] + +_TEB_Vista = _TEB_2008 +_TEB_Vista_64 = _TEB_2008_64 +_TEB_W7 = _TEB_2008_R2 +_TEB_W7_64 = _TEB_2008_R2_64 + +# Use the correct TEB structure definition. +# Defaults to the latest Windows version. +class TEB(Structure): + _pack_ = 8 + if os == 'Windows NT': + _pack_ = _TEB_NT._pack_ + _fields_ = _TEB_NT._fields_ + elif os == 'Windows 2000': + _pack_ = _TEB_2000._pack_ + _fields_ = _TEB_2000._fields_ + elif os == 'Windows XP': + _fields_ = _TEB_XP._fields_ + elif os == 'Windows XP (64 bits)': + _fields_ = _TEB_XP_64._fields_ + elif os == 'Windows 2003': + _fields_ = _TEB_2003._fields_ + elif os == 'Windows 2003 (64 bits)': + _fields_ = _TEB_2003_64._fields_ + elif os == 'Windows 2008': + _fields_ = _TEB_2008._fields_ + elif os == 'Windows 2008 (64 bits)': + _fields_ = _TEB_2008_64._fields_ + elif os == 'Windows 2003 R2': + _fields_ = _TEB_2003_R2._fields_ + elif os == 'Windows 2003 R2 (64 bits)': + _fields_ = _TEB_2003_R2_64._fields_ + elif os == 'Windows 2008 R2': + _fields_ = _TEB_2008_R2._fields_ + elif os == 'Windows 2008 R2 (64 bits)': + _fields_ = _TEB_2008_R2_64._fields_ + elif os == 'Windows Vista': + _fields_ = _TEB_Vista._fields_ + elif os == 'Windows Vista (64 bits)': + _fields_ = _TEB_Vista_64._fields_ + elif os == 'Windows 7': + _fields_ = _TEB_W7._fields_ + elif os == 'Windows 7 (64 bits)': + _fields_ = _TEB_W7_64._fields_ + elif sizeof(SIZE_T) == sizeof(DWORD): + _fields_ = _TEB_W7._fields_ + else: + _fields_ = _TEB_W7_64._fields_ +PTEB = POINTER(TEB) + +#============================================================================== +# This calculates the list of exported symbols. +_all = set(vars().keys()).difference(_all) +__all__ = [_x for _x in _all if not _x.startswith('_')] +__all__.sort() +#============================================================================== diff --git a/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/win32/psapi.py b/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/win32/psapi.py new file mode 100644 index 00000000..e353c7f7 --- /dev/null +++ b/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/win32/psapi.py @@ -0,0 +1,387 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# Copyright (c) 2009-2014, Mario Vilas +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright +# notice,this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# * Neither the name of the copyright holder nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. + +""" +Wrapper for psapi.dll in ctypes. +""" + +__revision__ = "$Id$" + +from winappdbg.win32.defines import * + +#============================================================================== +# This is used later on to calculate the list of exported symbols. +_all = None +_all = set(vars().keys()) +#============================================================================== + +#--- PSAPI structures and constants ------------------------------------------- + +LIST_MODULES_DEFAULT = 0x00 +LIST_MODULES_32BIT = 0x01 +LIST_MODULES_64BIT = 0x02 +LIST_MODULES_ALL = 0x03 + +# typedef struct _MODULEINFO { +# LPVOID lpBaseOfDll; +# DWORD SizeOfImage; +# LPVOID EntryPoint; +# } MODULEINFO, *LPMODULEINFO; +class MODULEINFO(Structure): + _fields_ = [ + ("lpBaseOfDll", LPVOID), # remote pointer + ("SizeOfImage", DWORD), + ("EntryPoint", LPVOID), # remote pointer +] +LPMODULEINFO = POINTER(MODULEINFO) + +#--- psapi.dll ---------------------------------------------------------------- + +# BOOL WINAPI EnumDeviceDrivers( +# __out LPVOID *lpImageBase, +# __in DWORD cb, +# __out LPDWORD lpcbNeeded +# ); +def EnumDeviceDrivers(): + _EnumDeviceDrivers = windll.psapi.EnumDeviceDrivers + _EnumDeviceDrivers.argtypes = [LPVOID, DWORD, LPDWORD] + _EnumDeviceDrivers.restype = bool + _EnumDeviceDrivers.errcheck = RaiseIfZero + + size = 0x1000 + lpcbNeeded = DWORD(size) + unit = sizeof(LPVOID) + while 1: + lpImageBase = (LPVOID * (size // unit))() + _EnumDeviceDrivers(byref(lpImageBase), lpcbNeeded, byref(lpcbNeeded)) + needed = lpcbNeeded.value + if needed <= size: + break + size = needed + return [ lpImageBase[index] for index in compat.xrange(0, (needed // unit)) ] + +# BOOL WINAPI EnumProcesses( +# __out DWORD *pProcessIds, +# __in DWORD cb, +# __out DWORD *pBytesReturned +# ); +def EnumProcesses(): + _EnumProcesses = windll.psapi.EnumProcesses + _EnumProcesses.argtypes = [LPVOID, DWORD, LPDWORD] + _EnumProcesses.restype = bool + _EnumProcesses.errcheck = RaiseIfZero + + size = 0x1000 + cbBytesReturned = DWORD() + unit = sizeof(DWORD) + while 1: + ProcessIds = (DWORD * (size // unit))() + cbBytesReturned.value = size + _EnumProcesses(byref(ProcessIds), cbBytesReturned, byref(cbBytesReturned)) + returned = cbBytesReturned.value + if returned < size: + break + size = size + 0x1000 + ProcessIdList = list() + for ProcessId in ProcessIds: + if ProcessId is None: + break + ProcessIdList.append(ProcessId) + return ProcessIdList + +# BOOL WINAPI EnumProcessModules( +# __in HANDLE hProcess, +# __out HMODULE *lphModule, +# __in DWORD cb, +# __out LPDWORD lpcbNeeded +# ); +def EnumProcessModules(hProcess): + _EnumProcessModules = windll.psapi.EnumProcessModules + _EnumProcessModules.argtypes = [HANDLE, LPVOID, DWORD, LPDWORD] + _EnumProcessModules.restype = bool + _EnumProcessModules.errcheck = RaiseIfZero + + size = 0x1000 + lpcbNeeded = DWORD(size) + unit = sizeof(HMODULE) + while 1: + lphModule = (HMODULE * (size // unit))() + _EnumProcessModules(hProcess, byref(lphModule), lpcbNeeded, byref(lpcbNeeded)) + needed = lpcbNeeded.value + if needed <= size: + break + size = needed + return [ lphModule[index] for index in compat.xrange(0, int(needed // unit)) ] + +# BOOL WINAPI EnumProcessModulesEx( +# __in HANDLE hProcess, +# __out HMODULE *lphModule, +# __in DWORD cb, +# __out LPDWORD lpcbNeeded, +# __in DWORD dwFilterFlag +# ); +def EnumProcessModulesEx(hProcess, dwFilterFlag = LIST_MODULES_DEFAULT): + _EnumProcessModulesEx = windll.psapi.EnumProcessModulesEx + _EnumProcessModulesEx.argtypes = [HANDLE, LPVOID, DWORD, LPDWORD, DWORD] + _EnumProcessModulesEx.restype = bool + _EnumProcessModulesEx.errcheck = RaiseIfZero + + size = 0x1000 + lpcbNeeded = DWORD(size) + unit = sizeof(HMODULE) + while 1: + lphModule = (HMODULE * (size // unit))() + _EnumProcessModulesEx(hProcess, byref(lphModule), lpcbNeeded, byref(lpcbNeeded), dwFilterFlag) + needed = lpcbNeeded.value + if needed <= size: + break + size = needed + return [ lphModule[index] for index in compat.xrange(0, (needed // unit)) ] + +# DWORD WINAPI GetDeviceDriverBaseName( +# __in LPVOID ImageBase, +# __out LPTSTR lpBaseName, +# __in DWORD nSize +# ); +def GetDeviceDriverBaseNameA(ImageBase): + _GetDeviceDriverBaseNameA = windll.psapi.GetDeviceDriverBaseNameA + _GetDeviceDriverBaseNameA.argtypes = [LPVOID, LPSTR, DWORD] + _GetDeviceDriverBaseNameA.restype = DWORD + + nSize = MAX_PATH + while 1: + lpBaseName = ctypes.create_string_buffer("", nSize) + nCopied = _GetDeviceDriverBaseNameA(ImageBase, lpBaseName, nSize) + if nCopied == 0: + raise ctypes.WinError() + if nCopied < (nSize - 1): + break + nSize = nSize + MAX_PATH + return lpBaseName.value + +def GetDeviceDriverBaseNameW(ImageBase): + _GetDeviceDriverBaseNameW = windll.psapi.GetDeviceDriverBaseNameW + _GetDeviceDriverBaseNameW.argtypes = [LPVOID, LPWSTR, DWORD] + _GetDeviceDriverBaseNameW.restype = DWORD + + nSize = MAX_PATH + while 1: + lpBaseName = ctypes.create_unicode_buffer(u"", nSize) + nCopied = _GetDeviceDriverBaseNameW(ImageBase, lpBaseName, nSize) + if nCopied == 0: + raise ctypes.WinError() + if nCopied < (nSize - 1): + break + nSize = nSize + MAX_PATH + return lpBaseName.value + +GetDeviceDriverBaseName = GuessStringType(GetDeviceDriverBaseNameA, GetDeviceDriverBaseNameW) + +# DWORD WINAPI GetDeviceDriverFileName( +# __in LPVOID ImageBase, +# __out LPTSTR lpFilename, +# __in DWORD nSize +# ); +def GetDeviceDriverFileNameA(ImageBase): + _GetDeviceDriverFileNameA = windll.psapi.GetDeviceDriverFileNameA + _GetDeviceDriverFileNameA.argtypes = [LPVOID, LPSTR, DWORD] + _GetDeviceDriverFileNameA.restype = DWORD + + nSize = MAX_PATH + while 1: + lpFilename = ctypes.create_string_buffer("", nSize) + nCopied = ctypes.windll.psapi.GetDeviceDriverFileNameA(ImageBase, lpFilename, nSize) + if nCopied == 0: + raise ctypes.WinError() + if nCopied < (nSize - 1): + break + nSize = nSize + MAX_PATH + return lpFilename.value + +def GetDeviceDriverFileNameW(ImageBase): + _GetDeviceDriverFileNameW = windll.psapi.GetDeviceDriverFileNameW + _GetDeviceDriverFileNameW.argtypes = [LPVOID, LPWSTR, DWORD] + _GetDeviceDriverFileNameW.restype = DWORD + + nSize = MAX_PATH + while 1: + lpFilename = ctypes.create_unicode_buffer(u"", nSize) + nCopied = ctypes.windll.psapi.GetDeviceDriverFileNameW(ImageBase, lpFilename, nSize) + if nCopied == 0: + raise ctypes.WinError() + if nCopied < (nSize - 1): + break + nSize = nSize + MAX_PATH + return lpFilename.value + +GetDeviceDriverFileName = GuessStringType(GetDeviceDriverFileNameA, GetDeviceDriverFileNameW) + +# DWORD WINAPI GetMappedFileName( +# __in HANDLE hProcess, +# __in LPVOID lpv, +# __out LPTSTR lpFilename, +# __in DWORD nSize +# ); +def GetMappedFileNameA(hProcess, lpv): + _GetMappedFileNameA = ctypes.windll.psapi.GetMappedFileNameA + _GetMappedFileNameA.argtypes = [HANDLE, LPVOID, LPSTR, DWORD] + _GetMappedFileNameA.restype = DWORD + + nSize = MAX_PATH + while 1: + lpFilename = ctypes.create_string_buffer("", nSize) + nCopied = _GetMappedFileNameA(hProcess, lpv, lpFilename, nSize) + if nCopied == 0: + raise ctypes.WinError() + if nCopied < (nSize - 1): + break + nSize = nSize + MAX_PATH + return lpFilename.value + +def GetMappedFileNameW(hProcess, lpv): + _GetMappedFileNameW = ctypes.windll.psapi.GetMappedFileNameW + _GetMappedFileNameW.argtypes = [HANDLE, LPVOID, LPWSTR, DWORD] + _GetMappedFileNameW.restype = DWORD + + nSize = MAX_PATH + while 1: + lpFilename = ctypes.create_unicode_buffer(u"", nSize) + nCopied = _GetMappedFileNameW(hProcess, lpv, lpFilename, nSize) + if nCopied == 0: + raise ctypes.WinError() + if nCopied < (nSize - 1): + break + nSize = nSize + MAX_PATH + return lpFilename.value + +GetMappedFileName = GuessStringType(GetMappedFileNameA, GetMappedFileNameW) + +# DWORD WINAPI GetModuleFileNameEx( +# __in HANDLE hProcess, +# __in_opt HMODULE hModule, +# __out LPTSTR lpFilename, +# __in DWORD nSize +# ); +def GetModuleFileNameExA(hProcess, hModule = None): + _GetModuleFileNameExA = ctypes.windll.psapi.GetModuleFileNameExA + _GetModuleFileNameExA.argtypes = [HANDLE, HMODULE, LPSTR, DWORD] + _GetModuleFileNameExA.restype = DWORD + + nSize = MAX_PATH + while 1: + lpFilename = ctypes.create_string_buffer("", nSize) + nCopied = _GetModuleFileNameExA(hProcess, hModule, lpFilename, nSize) + if nCopied == 0: + raise ctypes.WinError() + if nCopied < (nSize - 1): + break + nSize = nSize + MAX_PATH + return lpFilename.value + +def GetModuleFileNameExW(hProcess, hModule = None): + _GetModuleFileNameExW = ctypes.windll.psapi.GetModuleFileNameExW + _GetModuleFileNameExW.argtypes = [HANDLE, HMODULE, LPWSTR, DWORD] + _GetModuleFileNameExW.restype = DWORD + + nSize = MAX_PATH + while 1: + lpFilename = ctypes.create_unicode_buffer(u"", nSize) + nCopied = _GetModuleFileNameExW(hProcess, hModule, lpFilename, nSize) + if nCopied == 0: + raise ctypes.WinError() + if nCopied < (nSize - 1): + break + nSize = nSize + MAX_PATH + return lpFilename.value + +GetModuleFileNameEx = GuessStringType(GetModuleFileNameExA, GetModuleFileNameExW) + +# BOOL WINAPI GetModuleInformation( +# __in HANDLE hProcess, +# __in HMODULE hModule, +# __out LPMODULEINFO lpmodinfo, +# __in DWORD cb +# ); +def GetModuleInformation(hProcess, hModule, lpmodinfo = None): + _GetModuleInformation = windll.psapi.GetModuleInformation + _GetModuleInformation.argtypes = [HANDLE, HMODULE, LPMODULEINFO, DWORD] + _GetModuleInformation.restype = bool + _GetModuleInformation.errcheck = RaiseIfZero + + if lpmodinfo is None: + lpmodinfo = MODULEINFO() + _GetModuleInformation(hProcess, hModule, byref(lpmodinfo), sizeof(lpmodinfo)) + return lpmodinfo + +# DWORD WINAPI GetProcessImageFileName( +# __in HANDLE hProcess, +# __out LPTSTR lpImageFileName, +# __in DWORD nSize +# ); +def GetProcessImageFileNameA(hProcess): + _GetProcessImageFileNameA = windll.psapi.GetProcessImageFileNameA + _GetProcessImageFileNameA.argtypes = [HANDLE, LPSTR, DWORD] + _GetProcessImageFileNameA.restype = DWORD + + nSize = MAX_PATH + while 1: + lpFilename = ctypes.create_string_buffer("", nSize) + nCopied = _GetProcessImageFileNameA(hProcess, lpFilename, nSize) + if nCopied == 0: + raise ctypes.WinError() + if nCopied < (nSize - 1): + break + nSize = nSize + MAX_PATH + return lpFilename.value + +def GetProcessImageFileNameW(hProcess): + _GetProcessImageFileNameW = windll.psapi.GetProcessImageFileNameW + _GetProcessImageFileNameW.argtypes = [HANDLE, LPWSTR, DWORD] + _GetProcessImageFileNameW.restype = DWORD + + nSize = MAX_PATH + while 1: + lpFilename = ctypes.create_unicode_buffer(u"", nSize) + nCopied = _GetProcessImageFileNameW(hProcess, lpFilename, nSize) + if nCopied == 0: + raise ctypes.WinError() + if nCopied < (nSize - 1): + break + nSize = nSize + MAX_PATH + return lpFilename.value + +GetProcessImageFileName = GuessStringType(GetProcessImageFileNameA, GetProcessImageFileNameW) + +#============================================================================== +# This calculates the list of exported symbols. +_all = set(vars().keys()).difference(_all) +__all__ = [_x for _x in _all if not _x.startswith('_')] +__all__.sort() +#============================================================================== diff --git a/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/win32/shell32.py b/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/win32/shell32.py new file mode 100644 index 00000000..5c945db7 --- /dev/null +++ b/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/win32/shell32.py @@ -0,0 +1,382 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# Copyright (c) 2009-2014, Mario Vilas +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright +# notice,this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# * Neither the name of the copyright holder nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. + +""" +Wrapper for shell32.dll in ctypes. +""" + +# TODO +# * Add a class wrapper to SHELLEXECUTEINFO +# * More logic into ShellExecuteEx + +__revision__ = "$Id$" + +from winappdbg.win32.defines import * +from winappdbg.win32.kernel32 import LocalFree + +#============================================================================== +# This is used later on to calculate the list of exported symbols. +_all = None +_all = set(vars().keys()) +#============================================================================== + +#--- Constants ---------------------------------------------------------------- + +SEE_MASK_DEFAULT = 0x00000000 +SEE_MASK_CLASSNAME = 0x00000001 +SEE_MASK_CLASSKEY = 0x00000003 +SEE_MASK_IDLIST = 0x00000004 +SEE_MASK_INVOKEIDLIST = 0x0000000C +SEE_MASK_ICON = 0x00000010 +SEE_MASK_HOTKEY = 0x00000020 +SEE_MASK_NOCLOSEPROCESS = 0x00000040 +SEE_MASK_CONNECTNETDRV = 0x00000080 +SEE_MASK_NOASYNC = 0x00000100 +SEE_MASK_DOENVSUBST = 0x00000200 +SEE_MASK_FLAG_NO_UI = 0x00000400 +SEE_MASK_UNICODE = 0x00004000 +SEE_MASK_NO_CONSOLE = 0x00008000 +SEE_MASK_ASYNCOK = 0x00100000 +SEE_MASK_HMONITOR = 0x00200000 +SEE_MASK_NOZONECHECKS = 0x00800000 +SEE_MASK_WAITFORINPUTIDLE = 0x02000000 +SEE_MASK_FLAG_LOG_USAGE = 0x04000000 + +SE_ERR_FNF = 2 +SE_ERR_PNF = 3 +SE_ERR_ACCESSDENIED = 5 +SE_ERR_OOM = 8 +SE_ERR_DLLNOTFOUND = 32 +SE_ERR_SHARE = 26 +SE_ERR_ASSOCINCOMPLETE = 27 +SE_ERR_DDETIMEOUT = 28 +SE_ERR_DDEFAIL = 29 +SE_ERR_DDEBUSY = 30 +SE_ERR_NOASSOC = 31 + +SHGFP_TYPE_CURRENT = 0 +SHGFP_TYPE_DEFAULT = 1 + +CSIDL_DESKTOP = 0x0000 +CSIDL_INTERNET = 0x0001 +CSIDL_PROGRAMS = 0x0002 +CSIDL_CONTROLS = 0x0003 +CSIDL_PRINTERS = 0x0004 +CSIDL_PERSONAL = 0x0005 +CSIDL_FAVORITES = 0x0006 +CSIDL_STARTUP = 0x0007 +CSIDL_RECENT = 0x0008 +CSIDL_SENDTO = 0x0009 +CSIDL_BITBUCKET = 0x000a +CSIDL_STARTMENU = 0x000b +CSIDL_MYDOCUMENTS = CSIDL_PERSONAL +CSIDL_MYMUSIC = 0x000d +CSIDL_MYVIDEO = 0x000e +CSIDL_DESKTOPDIRECTORY = 0x0010 +CSIDL_DRIVES = 0x0011 +CSIDL_NETWORK = 0x0012 +CSIDL_NETHOOD = 0x0013 +CSIDL_FONTS = 0x0014 +CSIDL_TEMPLATES = 0x0015 +CSIDL_COMMON_STARTMENU = 0x0016 +CSIDL_COMMON_PROGRAMS = 0x0017 +CSIDL_COMMON_STARTUP = 0x0018 +CSIDL_COMMON_DESKTOPDIRECTORY = 0x0019 +CSIDL_APPDATA = 0x001a +CSIDL_PRINTHOOD = 0x001b +CSIDL_LOCAL_APPDATA = 0x001c +CSIDL_ALTSTARTUP = 0x001d +CSIDL_COMMON_ALTSTARTUP = 0x001e +CSIDL_COMMON_FAVORITES = 0x001f +CSIDL_INTERNET_CACHE = 0x0020 +CSIDL_COOKIES = 0x0021 +CSIDL_HISTORY = 0x0022 +CSIDL_COMMON_APPDATA = 0x0023 +CSIDL_WINDOWS = 0x0024 +CSIDL_SYSTEM = 0x0025 +CSIDL_PROGRAM_FILES = 0x0026 +CSIDL_MYPICTURES = 0x0027 +CSIDL_PROFILE = 0x0028 +CSIDL_SYSTEMX86 = 0x0029 +CSIDL_PROGRAM_FILESX86 = 0x002a +CSIDL_PROGRAM_FILES_COMMON = 0x002b +CSIDL_PROGRAM_FILES_COMMONX86 = 0x002c +CSIDL_COMMON_TEMPLATES = 0x002d +CSIDL_COMMON_DOCUMENTS = 0x002e +CSIDL_COMMON_ADMINTOOLS = 0x002f +CSIDL_ADMINTOOLS = 0x0030 +CSIDL_CONNECTIONS = 0x0031 +CSIDL_COMMON_MUSIC = 0x0035 +CSIDL_COMMON_PICTURES = 0x0036 +CSIDL_COMMON_VIDEO = 0x0037 +CSIDL_RESOURCES = 0x0038 +CSIDL_RESOURCES_LOCALIZED = 0x0039 +CSIDL_COMMON_OEM_LINKS = 0x003a +CSIDL_CDBURN_AREA = 0x003b +CSIDL_COMPUTERSNEARME = 0x003d +CSIDL_PROFILES = 0x003e + +CSIDL_FOLDER_MASK = 0x00ff + +CSIDL_FLAG_PER_USER_INIT = 0x0800 +CSIDL_FLAG_NO_ALIAS = 0x1000 +CSIDL_FLAG_DONT_VERIFY = 0x4000 +CSIDL_FLAG_CREATE = 0x8000 + +CSIDL_FLAG_MASK = 0xff00 + +#--- Structures --------------------------------------------------------------- + +# typedef struct _SHELLEXECUTEINFO { +# DWORD cbSize; +# ULONG fMask; +# HWND hwnd; +# LPCTSTR lpVerb; +# LPCTSTR lpFile; +# LPCTSTR lpParameters; +# LPCTSTR lpDirectory; +# int nShow; +# HINSTANCE hInstApp; +# LPVOID lpIDList; +# LPCTSTR lpClass; +# HKEY hkeyClass; +# DWORD dwHotKey; +# union { +# HANDLE hIcon; +# HANDLE hMonitor; +# } DUMMYUNIONNAME; +# HANDLE hProcess; +# } SHELLEXECUTEINFO, *LPSHELLEXECUTEINFO; + +class SHELLEXECUTEINFO(Structure): + _fields_ = [ + ("cbSize", DWORD), + ("fMask", ULONG), + ("hwnd", HWND), + ("lpVerb", LPSTR), + ("lpFile", LPSTR), + ("lpParameters", LPSTR), + ("lpDirectory", LPSTR), + ("nShow", ctypes.c_int), + ("hInstApp", HINSTANCE), + ("lpIDList", LPVOID), + ("lpClass", LPSTR), + ("hkeyClass", HKEY), + ("dwHotKey", DWORD), + ("hIcon", HANDLE), + ("hProcess", HANDLE), + ] + + def __get_hMonitor(self): + return self.hIcon + def __set_hMonitor(self, hMonitor): + self.hIcon = hMonitor + hMonitor = property(__get_hMonitor, __set_hMonitor) + +LPSHELLEXECUTEINFO = POINTER(SHELLEXECUTEINFO) + +#--- shell32.dll -------------------------------------------------------------- + +# LPWSTR *CommandLineToArgvW( +# LPCWSTR lpCmdLine, +# int *pNumArgs +# ); +def CommandLineToArgvW(lpCmdLine): + _CommandLineToArgvW = windll.shell32.CommandLineToArgvW + _CommandLineToArgvW.argtypes = [LPVOID, POINTER(ctypes.c_int)] + _CommandLineToArgvW.restype = LPVOID + + if not lpCmdLine: + lpCmdLine = None + argc = ctypes.c_int(0) + vptr = ctypes.windll.shell32.CommandLineToArgvW(lpCmdLine, byref(argc)) + if vptr == NULL: + raise ctypes.WinError() + argv = vptr + try: + argc = argc.value + if argc <= 0: + raise ctypes.WinError() + argv = ctypes.cast(argv, ctypes.POINTER(LPWSTR * argc) ) + argv = [ argv.contents[i] for i in compat.xrange(0, argc) ] + finally: + if vptr is not None: + LocalFree(vptr) + return argv + +def CommandLineToArgvA(lpCmdLine): + t_ansi = GuessStringType.t_ansi + t_unicode = GuessStringType.t_unicode + if isinstance(lpCmdLine, t_ansi): + cmdline = t_unicode(lpCmdLine) + else: + cmdline = lpCmdLine + return [t_ansi(x) for x in CommandLineToArgvW(cmdline)] + +CommandLineToArgv = GuessStringType(CommandLineToArgvA, CommandLineToArgvW) + +# HINSTANCE ShellExecute( +# HWND hwnd, +# LPCTSTR lpOperation, +# LPCTSTR lpFile, +# LPCTSTR lpParameters, +# LPCTSTR lpDirectory, +# INT nShowCmd +# ); +def ShellExecuteA(hwnd = None, lpOperation = None, lpFile = None, lpParameters = None, lpDirectory = None, nShowCmd = None): + _ShellExecuteA = windll.shell32.ShellExecuteA + _ShellExecuteA.argtypes = [HWND, LPSTR, LPSTR, LPSTR, LPSTR, INT] + _ShellExecuteA.restype = HINSTANCE + + if not nShowCmd: + nShowCmd = 0 + success = _ShellExecuteA(hwnd, lpOperation, lpFile, lpParameters, lpDirectory, nShowCmd) + success = ctypes.cast(success, c_int) + success = success.value + if not success > 32: # weird! isn't it? + raise ctypes.WinError(success) + +def ShellExecuteW(hwnd = None, lpOperation = None, lpFile = None, lpParameters = None, lpDirectory = None, nShowCmd = None): + _ShellExecuteW = windll.shell32.ShellExecuteW + _ShellExecuteW.argtypes = [HWND, LPWSTR, LPWSTR, LPWSTR, LPWSTR, INT] + _ShellExecuteW.restype = HINSTANCE + + if not nShowCmd: + nShowCmd = 0 + success = _ShellExecuteW(hwnd, lpOperation, lpFile, lpParameters, lpDirectory, nShowCmd) + success = ctypes.cast(success, c_int) + success = success.value + if not success > 32: # weird! isn't it? + raise ctypes.WinError(success) + +ShellExecute = GuessStringType(ShellExecuteA, ShellExecuteW) + +# BOOL ShellExecuteEx( +# __inout LPSHELLEXECUTEINFO lpExecInfo +# ); +def ShellExecuteEx(lpExecInfo): + if isinstance(lpExecInfo, SHELLEXECUTEINFOA): + ShellExecuteExA(lpExecInfo) + elif isinstance(lpExecInfo, SHELLEXECUTEINFOW): + ShellExecuteExW(lpExecInfo) + else: + raise TypeError("Expected SHELLEXECUTEINFOA or SHELLEXECUTEINFOW, got %s instead" % type(lpExecInfo)) + +def ShellExecuteExA(lpExecInfo): + _ShellExecuteExA = windll.shell32.ShellExecuteExA + _ShellExecuteExA.argtypes = [LPSHELLEXECUTEINFOA] + _ShellExecuteExA.restype = BOOL + _ShellExecuteExA.errcheck = RaiseIfZero + _ShellExecuteExA(byref(lpExecInfo)) + +def ShellExecuteExW(lpExecInfo): + _ShellExecuteExW = windll.shell32.ShellExecuteExW + _ShellExecuteExW.argtypes = [LPSHELLEXECUTEINFOW] + _ShellExecuteExW.restype = BOOL + _ShellExecuteExW.errcheck = RaiseIfZero + _ShellExecuteExW(byref(lpExecInfo)) + +# HINSTANCE FindExecutable( +# __in LPCTSTR lpFile, +# __in_opt LPCTSTR lpDirectory, +# __out LPTSTR lpResult +# ); +def FindExecutableA(lpFile, lpDirectory = None): + _FindExecutableA = windll.shell32.FindExecutableA + _FindExecutableA.argtypes = [LPSTR, LPSTR, LPSTR] + _FindExecutableA.restype = HINSTANCE + + lpResult = ctypes.create_string_buffer(MAX_PATH) + success = _FindExecutableA(lpFile, lpDirectory, lpResult) + success = ctypes.cast(success, ctypes.c_void_p) + success = success.value + if not success > 32: # weird! isn't it? + raise ctypes.WinError(success) + return lpResult.value + +def FindExecutableW(lpFile, lpDirectory = None): + _FindExecutableW = windll.shell32.FindExecutableW + _FindExecutableW.argtypes = [LPWSTR, LPWSTR, LPWSTR] + _FindExecutableW.restype = HINSTANCE + + lpResult = ctypes.create_unicode_buffer(MAX_PATH) + success = _FindExecutableW(lpFile, lpDirectory, lpResult) + success = ctypes.cast(success, ctypes.c_void_p) + success = success.value + if not success > 32: # weird! isn't it? + raise ctypes.WinError(success) + return lpResult.value + +FindExecutable = GuessStringType(FindExecutableA, FindExecutableW) + +# HRESULT SHGetFolderPath( +# __in HWND hwndOwner, +# __in int nFolder, +# __in HANDLE hToken, +# __in DWORD dwFlags, +# __out LPTSTR pszPath +# ); +def SHGetFolderPathA(nFolder, hToken = None, dwFlags = SHGFP_TYPE_CURRENT): + _SHGetFolderPathA = windll.shell32.SHGetFolderPathA # shfolder.dll in older win versions + _SHGetFolderPathA.argtypes = [HWND, ctypes.c_int, HANDLE, DWORD, LPSTR] + _SHGetFolderPathA.restype = HRESULT + _SHGetFolderPathA.errcheck = RaiseIfNotZero # S_OK == 0 + + pszPath = ctypes.create_string_buffer(MAX_PATH + 1) + _SHGetFolderPathA(None, nFolder, hToken, dwFlags, pszPath) + return pszPath.value + +def SHGetFolderPathW(nFolder, hToken = None, dwFlags = SHGFP_TYPE_CURRENT): + _SHGetFolderPathW = windll.shell32.SHGetFolderPathW # shfolder.dll in older win versions + _SHGetFolderPathW.argtypes = [HWND, ctypes.c_int, HANDLE, DWORD, LPWSTR] + _SHGetFolderPathW.restype = HRESULT + _SHGetFolderPathW.errcheck = RaiseIfNotZero # S_OK == 0 + + pszPath = ctypes.create_unicode_buffer(MAX_PATH + 1) + _SHGetFolderPathW(None, nFolder, hToken, dwFlags, pszPath) + return pszPath.value + +SHGetFolderPath = DefaultStringType(SHGetFolderPathA, SHGetFolderPathW) + +# BOOL IsUserAnAdmin(void); +def IsUserAnAdmin(): + # Supposedly, IsUserAnAdmin() is deprecated in Vista. + # But I tried it on Windows 7 and it works just fine. + _IsUserAnAdmin = windll.shell32.IsUserAnAdmin + _IsUserAnAdmin.argtypes = [] + _IsUserAnAdmin.restype = bool + return _IsUserAnAdmin() + +#============================================================================== +# This calculates the list of exported symbols. +_all = set(vars().keys()).difference(_all) +__all__ = [_x for _x in _all if not _x.startswith('_')] +__all__.sort() +#============================================================================== diff --git a/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/win32/shlwapi.py b/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/win32/shlwapi.py new file mode 100644 index 00000000..5f6eb3ea --- /dev/null +++ b/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/win32/shlwapi.py @@ -0,0 +1,756 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# Copyright (c) 2009-2014, Mario Vilas +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright +# notice,this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# * Neither the name of the copyright holder nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. + +""" +Wrapper for shlwapi.dll in ctypes. +""" + +__revision__ = "$Id$" + +from winappdbg.win32.defines import * +from winappdbg.win32.kernel32 import * + +#============================================================================== +# This is used later on to calculate the list of exported symbols. +_all = None +_all = set(vars().keys()) +#============================================================================== + +OS_WINDOWS = 0 +OS_NT = 1 +OS_WIN95ORGREATER = 2 +OS_NT4ORGREATER = 3 +OS_WIN98ORGREATER = 5 +OS_WIN98_GOLD = 6 +OS_WIN2000ORGREATER = 7 +OS_WIN2000PRO = 8 +OS_WIN2000SERVER = 9 +OS_WIN2000ADVSERVER = 10 +OS_WIN2000DATACENTER = 11 +OS_WIN2000TERMINAL = 12 +OS_EMBEDDED = 13 +OS_TERMINALCLIENT = 14 +OS_TERMINALREMOTEADMIN = 15 +OS_WIN95_GOLD = 16 +OS_MEORGREATER = 17 +OS_XPORGREATER = 18 +OS_HOME = 19 +OS_PROFESSIONAL = 20 +OS_DATACENTER = 21 +OS_ADVSERVER = 22 +OS_SERVER = 23 +OS_TERMINALSERVER = 24 +OS_PERSONALTERMINALSERVER = 25 +OS_FASTUSERSWITCHING = 26 +OS_WELCOMELOGONUI = 27 +OS_DOMAINMEMBER = 28 +OS_ANYSERVER = 29 +OS_WOW6432 = 30 +OS_WEBSERVER = 31 +OS_SMALLBUSINESSSERVER = 32 +OS_TABLETPC = 33 +OS_SERVERADMINUI = 34 +OS_MEDIACENTER = 35 +OS_APPLIANCE = 36 + +#--- shlwapi.dll -------------------------------------------------------------- + +# BOOL IsOS( +# DWORD dwOS +# ); +def IsOS(dwOS): + try: + _IsOS = windll.shlwapi.IsOS + _IsOS.argtypes = [DWORD] + _IsOS.restype = bool + except AttributeError: + # According to MSDN, on Windows versions prior to Vista + # this function is exported only by ordinal number 437. + # http://msdn.microsoft.com/en-us/library/bb773795%28VS.85%29.aspx + _GetProcAddress = windll.kernel32.GetProcAddress + _GetProcAddress.argtypes = [HINSTANCE, DWORD] + _GetProcAddress.restype = LPVOID + _IsOS = windll.kernel32.GetProcAddress(windll.shlwapi._handle, 437) + _IsOS = WINFUNCTYPE(bool, DWORD)(_IsOS) + return _IsOS(dwOS) + +# LPTSTR PathAddBackslash( +# LPTSTR lpszPath +# ); +def PathAddBackslashA(lpszPath): + _PathAddBackslashA = windll.shlwapi.PathAddBackslashA + _PathAddBackslashA.argtypes = [LPSTR] + _PathAddBackslashA.restype = LPSTR + + lpszPath = ctypes.create_string_buffer(lpszPath, MAX_PATH) + retval = _PathAddBackslashA(lpszPath) + if retval == NULL: + raise ctypes.WinError() + return lpszPath.value + +def PathAddBackslashW(lpszPath): + _PathAddBackslashW = windll.shlwapi.PathAddBackslashW + _PathAddBackslashW.argtypes = [LPWSTR] + _PathAddBackslashW.restype = LPWSTR + + lpszPath = ctypes.create_unicode_buffer(lpszPath, MAX_PATH) + retval = _PathAddBackslashW(lpszPath) + if retval == NULL: + raise ctypes.WinError() + return lpszPath.value + +PathAddBackslash = GuessStringType(PathAddBackslashA, PathAddBackslashW) + +# BOOL PathAddExtension( +# LPTSTR pszPath, +# LPCTSTR pszExtension +# ); +def PathAddExtensionA(lpszPath, pszExtension = None): + _PathAddExtensionA = windll.shlwapi.PathAddExtensionA + _PathAddExtensionA.argtypes = [LPSTR, LPSTR] + _PathAddExtensionA.restype = bool + _PathAddExtensionA.errcheck = RaiseIfZero + + if not pszExtension: + pszExtension = None + lpszPath = ctypes.create_string_buffer(lpszPath, MAX_PATH) + _PathAddExtensionA(lpszPath, pszExtension) + return lpszPath.value + +def PathAddExtensionW(lpszPath, pszExtension = None): + _PathAddExtensionW = windll.shlwapi.PathAddExtensionW + _PathAddExtensionW.argtypes = [LPWSTR, LPWSTR] + _PathAddExtensionW.restype = bool + _PathAddExtensionW.errcheck = RaiseIfZero + + if not pszExtension: + pszExtension = None + lpszPath = ctypes.create_unicode_buffer(lpszPath, MAX_PATH) + _PathAddExtensionW(lpszPath, pszExtension) + return lpszPath.value + +PathAddExtension = GuessStringType(PathAddExtensionA, PathAddExtensionW) + +# BOOL PathAppend( +# LPTSTR pszPath, +# LPCTSTR pszMore +# ); +def PathAppendA(lpszPath, pszMore = None): + _PathAppendA = windll.shlwapi.PathAppendA + _PathAppendA.argtypes = [LPSTR, LPSTR] + _PathAppendA.restype = bool + _PathAppendA.errcheck = RaiseIfZero + + if not pszMore: + pszMore = None + lpszPath = ctypes.create_string_buffer(lpszPath, MAX_PATH) + _PathAppendA(lpszPath, pszMore) + return lpszPath.value + +def PathAppendW(lpszPath, pszMore = None): + _PathAppendW = windll.shlwapi.PathAppendW + _PathAppendW.argtypes = [LPWSTR, LPWSTR] + _PathAppendW.restype = bool + _PathAppendW.errcheck = RaiseIfZero + + if not pszMore: + pszMore = None + lpszPath = ctypes.create_unicode_buffer(lpszPath, MAX_PATH) + _PathAppendW(lpszPath, pszMore) + return lpszPath.value + +PathAppend = GuessStringType(PathAppendA, PathAppendW) + +# LPTSTR PathCombine( +# LPTSTR lpszDest, +# LPCTSTR lpszDir, +# LPCTSTR lpszFile +# ); +def PathCombineA(lpszDir, lpszFile): + _PathCombineA = windll.shlwapi.PathCombineA + _PathCombineA.argtypes = [LPSTR, LPSTR, LPSTR] + _PathCombineA.restype = LPSTR + + lpszDest = ctypes.create_string_buffer("", max(MAX_PATH, len(lpszDir) + len(lpszFile) + 1)) + retval = _PathCombineA(lpszDest, lpszDir, lpszFile) + if retval == NULL: + return None + return lpszDest.value + +def PathCombineW(lpszDir, lpszFile): + _PathCombineW = windll.shlwapi.PathCombineW + _PathCombineW.argtypes = [LPWSTR, LPWSTR, LPWSTR] + _PathCombineW.restype = LPWSTR + + lpszDest = ctypes.create_unicode_buffer(u"", max(MAX_PATH, len(lpszDir) + len(lpszFile) + 1)) + retval = _PathCombineW(lpszDest, lpszDir, lpszFile) + if retval == NULL: + return None + return lpszDest.value + +PathCombine = GuessStringType(PathCombineA, PathCombineW) + +# BOOL PathCanonicalize( +# LPTSTR lpszDst, +# LPCTSTR lpszSrc +# ); +def PathCanonicalizeA(lpszSrc): + _PathCanonicalizeA = windll.shlwapi.PathCanonicalizeA + _PathCanonicalizeA.argtypes = [LPSTR, LPSTR] + _PathCanonicalizeA.restype = bool + _PathCanonicalizeA.errcheck = RaiseIfZero + + lpszDst = ctypes.create_string_buffer("", MAX_PATH) + _PathCanonicalizeA(lpszDst, lpszSrc) + return lpszDst.value + +def PathCanonicalizeW(lpszSrc): + _PathCanonicalizeW = windll.shlwapi.PathCanonicalizeW + _PathCanonicalizeW.argtypes = [LPWSTR, LPWSTR] + _PathCanonicalizeW.restype = bool + _PathCanonicalizeW.errcheck = RaiseIfZero + + lpszDst = ctypes.create_unicode_buffer(u"", MAX_PATH) + _PathCanonicalizeW(lpszDst, lpszSrc) + return lpszDst.value + +PathCanonicalize = GuessStringType(PathCanonicalizeA, PathCanonicalizeW) + +# BOOL PathRelativePathTo( +# _Out_ LPTSTR pszPath, +# _In_ LPCTSTR pszFrom, +# _In_ DWORD dwAttrFrom, +# _In_ LPCTSTR pszTo, +# _In_ DWORD dwAttrTo +# ); +def PathRelativePathToA(pszFrom = None, dwAttrFrom = FILE_ATTRIBUTE_DIRECTORY, pszTo = None, dwAttrTo = FILE_ATTRIBUTE_DIRECTORY): + _PathRelativePathToA = windll.shlwapi.PathRelativePathToA + _PathRelativePathToA.argtypes = [LPSTR, LPSTR, DWORD, LPSTR, DWORD] + _PathRelativePathToA.restype = bool + _PathRelativePathToA.errcheck = RaiseIfZero + + # Make the paths absolute or the function fails. + if pszFrom: + pszFrom = GetFullPathNameA(pszFrom)[0] + else: + pszFrom = GetCurrentDirectoryA() + if pszTo: + pszTo = GetFullPathNameA(pszTo)[0] + else: + pszTo = GetCurrentDirectoryA() + + # Argh, this function doesn't receive an output buffer size! + # We'll try to guess the maximum possible buffer size. + dwPath = max((len(pszFrom) + len(pszTo)) * 2 + 1, MAX_PATH + 1) + pszPath = ctypes.create_string_buffer('', dwPath) + + # Also, it doesn't set the last error value. + # Whoever coded it must have been drunk or tripping on acid. Or both. + # The only failure conditions I've seen were invalid paths, paths not + # on the same drive, or the path is not absolute. + SetLastError(ERROR_INVALID_PARAMETER) + + _PathRelativePathToA(pszPath, pszFrom, dwAttrFrom, pszTo, dwAttrTo) + return pszPath.value + +def PathRelativePathToW(pszFrom = None, dwAttrFrom = FILE_ATTRIBUTE_DIRECTORY, pszTo = None, dwAttrTo = FILE_ATTRIBUTE_DIRECTORY): + _PathRelativePathToW = windll.shlwapi.PathRelativePathToW + _PathRelativePathToW.argtypes = [LPWSTR, LPWSTR, DWORD, LPWSTR, DWORD] + _PathRelativePathToW.restype = bool + _PathRelativePathToW.errcheck = RaiseIfZero + + # Refer to PathRelativePathToA to know why this code is so ugly. + if pszFrom: + pszFrom = GetFullPathNameW(pszFrom)[0] + else: + pszFrom = GetCurrentDirectoryW() + if pszTo: + pszTo = GetFullPathNameW(pszTo)[0] + else: + pszTo = GetCurrentDirectoryW() + dwPath = max((len(pszFrom) + len(pszTo)) * 2 + 1, MAX_PATH + 1) + pszPath = ctypes.create_unicode_buffer(u'', dwPath) + SetLastError(ERROR_INVALID_PARAMETER) + _PathRelativePathToW(pszPath, pszFrom, dwAttrFrom, pszTo, dwAttrTo) + return pszPath.value + +PathRelativePathTo = GuessStringType(PathRelativePathToA, PathRelativePathToW) + +# BOOL PathFileExists( +# LPCTSTR pszPath +# ); +def PathFileExistsA(pszPath): + _PathFileExistsA = windll.shlwapi.PathFileExistsA + _PathFileExistsA.argtypes = [LPSTR] + _PathFileExistsA.restype = bool + return _PathFileExistsA(pszPath) + +def PathFileExistsW(pszPath): + _PathFileExistsW = windll.shlwapi.PathFileExistsW + _PathFileExistsW.argtypes = [LPWSTR] + _PathFileExistsW.restype = bool + return _PathFileExistsW(pszPath) + +PathFileExists = GuessStringType(PathFileExistsA, PathFileExistsW) + +# LPTSTR PathFindExtension( +# LPCTSTR pszPath +# ); +def PathFindExtensionA(pszPath): + _PathFindExtensionA = windll.shlwapi.PathFindExtensionA + _PathFindExtensionA.argtypes = [LPSTR] + _PathFindExtensionA.restype = LPSTR + pszPath = ctypes.create_string_buffer(pszPath) + return _PathFindExtensionA(pszPath) + +def PathFindExtensionW(pszPath): + _PathFindExtensionW = windll.shlwapi.PathFindExtensionW + _PathFindExtensionW.argtypes = [LPWSTR] + _PathFindExtensionW.restype = LPWSTR + pszPath = ctypes.create_unicode_buffer(pszPath) + return _PathFindExtensionW(pszPath) + +PathFindExtension = GuessStringType(PathFindExtensionA, PathFindExtensionW) + +# LPTSTR PathFindFileName( +# LPCTSTR pszPath +# ); +def PathFindFileNameA(pszPath): + _PathFindFileNameA = windll.shlwapi.PathFindFileNameA + _PathFindFileNameA.argtypes = [LPSTR] + _PathFindFileNameA.restype = LPSTR + pszPath = ctypes.create_string_buffer(pszPath) + return _PathFindFileNameA(pszPath) + +def PathFindFileNameW(pszPath): + _PathFindFileNameW = windll.shlwapi.PathFindFileNameW + _PathFindFileNameW.argtypes = [LPWSTR] + _PathFindFileNameW.restype = LPWSTR + pszPath = ctypes.create_unicode_buffer(pszPath) + return _PathFindFileNameW(pszPath) + +PathFindFileName = GuessStringType(PathFindFileNameA, PathFindFileNameW) + +# LPTSTR PathFindNextComponent( +# LPCTSTR pszPath +# ); +def PathFindNextComponentA(pszPath): + _PathFindNextComponentA = windll.shlwapi.PathFindNextComponentA + _PathFindNextComponentA.argtypes = [LPSTR] + _PathFindNextComponentA.restype = LPSTR + pszPath = ctypes.create_string_buffer(pszPath) + return _PathFindNextComponentA(pszPath) + +def PathFindNextComponentW(pszPath): + _PathFindNextComponentW = windll.shlwapi.PathFindNextComponentW + _PathFindNextComponentW.argtypes = [LPWSTR] + _PathFindNextComponentW.restype = LPWSTR + pszPath = ctypes.create_unicode_buffer(pszPath) + return _PathFindNextComponentW(pszPath) + +PathFindNextComponent = GuessStringType(PathFindNextComponentA, PathFindNextComponentW) + +# BOOL PathFindOnPath( +# LPTSTR pszFile, +# LPCTSTR *ppszOtherDirs +# ); +def PathFindOnPathA(pszFile, ppszOtherDirs = None): + _PathFindOnPathA = windll.shlwapi.PathFindOnPathA + _PathFindOnPathA.argtypes = [LPSTR, LPSTR] + _PathFindOnPathA.restype = bool + + pszFile = ctypes.create_string_buffer(pszFile, MAX_PATH) + if not ppszOtherDirs: + ppszOtherDirs = None + else: + szArray = "" + for pszOtherDirs in ppszOtherDirs: + if pszOtherDirs: + szArray = "%s%s\0" % (szArray, pszOtherDirs) + szArray = szArray + "\0" + pszOtherDirs = ctypes.create_string_buffer(szArray) + ppszOtherDirs = ctypes.pointer(pszOtherDirs) + if _PathFindOnPathA(pszFile, ppszOtherDirs): + return pszFile.value + return None + +def PathFindOnPathW(pszFile, ppszOtherDirs = None): + _PathFindOnPathW = windll.shlwapi.PathFindOnPathA + _PathFindOnPathW.argtypes = [LPWSTR, LPWSTR] + _PathFindOnPathW.restype = bool + + pszFile = ctypes.create_unicode_buffer(pszFile, MAX_PATH) + if not ppszOtherDirs: + ppszOtherDirs = None + else: + szArray = u"" + for pszOtherDirs in ppszOtherDirs: + if pszOtherDirs: + szArray = u"%s%s\0" % (szArray, pszOtherDirs) + szArray = szArray + u"\0" + pszOtherDirs = ctypes.create_unicode_buffer(szArray) + ppszOtherDirs = ctypes.pointer(pszOtherDirs) + if _PathFindOnPathW(pszFile, ppszOtherDirs): + return pszFile.value + return None + +PathFindOnPath = GuessStringType(PathFindOnPathA, PathFindOnPathW) + +# LPTSTR PathGetArgs( +# LPCTSTR pszPath +# ); +def PathGetArgsA(pszPath): + _PathGetArgsA = windll.shlwapi.PathGetArgsA + _PathGetArgsA.argtypes = [LPSTR] + _PathGetArgsA.restype = LPSTR + pszPath = ctypes.create_string_buffer(pszPath) + return _PathGetArgsA(pszPath) + +def PathGetArgsW(pszPath): + _PathGetArgsW = windll.shlwapi.PathGetArgsW + _PathGetArgsW.argtypes = [LPWSTR] + _PathGetArgsW.restype = LPWSTR + pszPath = ctypes.create_unicode_buffer(pszPath) + return _PathGetArgsW(pszPath) + +PathGetArgs = GuessStringType(PathGetArgsA, PathGetArgsW) + +# BOOL PathIsContentType( +# LPCTSTR pszPath, +# LPCTSTR pszContentType +# ); +def PathIsContentTypeA(pszPath, pszContentType): + _PathIsContentTypeA = windll.shlwapi.PathIsContentTypeA + _PathIsContentTypeA.argtypes = [LPSTR, LPSTR] + _PathIsContentTypeA.restype = bool + return _PathIsContentTypeA(pszPath, pszContentType) + +def PathIsContentTypeW(pszPath, pszContentType): + _PathIsContentTypeW = windll.shlwapi.PathIsContentTypeW + _PathIsContentTypeW.argtypes = [LPWSTR, LPWSTR] + _PathIsContentTypeW.restype = bool + return _PathIsContentTypeW(pszPath, pszContentType) + +PathIsContentType = GuessStringType(PathIsContentTypeA, PathIsContentTypeW) + +# BOOL PathIsDirectory( +# LPCTSTR pszPath +# ); +def PathIsDirectoryA(pszPath): + _PathIsDirectoryA = windll.shlwapi.PathIsDirectoryA + _PathIsDirectoryA.argtypes = [LPSTR] + _PathIsDirectoryA.restype = bool + return _PathIsDirectoryA(pszPath) + +def PathIsDirectoryW(pszPath): + _PathIsDirectoryW = windll.shlwapi.PathIsDirectoryW + _PathIsDirectoryW.argtypes = [LPWSTR] + _PathIsDirectoryW.restype = bool + return _PathIsDirectoryW(pszPath) + +PathIsDirectory = GuessStringType(PathIsDirectoryA, PathIsDirectoryW) + +# BOOL PathIsDirectoryEmpty( +# LPCTSTR pszPath +# ); +def PathIsDirectoryEmptyA(pszPath): + _PathIsDirectoryEmptyA = windll.shlwapi.PathIsDirectoryEmptyA + _PathIsDirectoryEmptyA.argtypes = [LPSTR] + _PathIsDirectoryEmptyA.restype = bool + return _PathIsDirectoryEmptyA(pszPath) + +def PathIsDirectoryEmptyW(pszPath): + _PathIsDirectoryEmptyW = windll.shlwapi.PathIsDirectoryEmptyW + _PathIsDirectoryEmptyW.argtypes = [LPWSTR] + _PathIsDirectoryEmptyW.restype = bool + return _PathIsDirectoryEmptyW(pszPath) + +PathIsDirectoryEmpty = GuessStringType(PathIsDirectoryEmptyA, PathIsDirectoryEmptyW) + +# BOOL PathIsNetworkPath( +# LPCTSTR pszPath +# ); +def PathIsNetworkPathA(pszPath): + _PathIsNetworkPathA = windll.shlwapi.PathIsNetworkPathA + _PathIsNetworkPathA.argtypes = [LPSTR] + _PathIsNetworkPathA.restype = bool + return _PathIsNetworkPathA(pszPath) + +def PathIsNetworkPathW(pszPath): + _PathIsNetworkPathW = windll.shlwapi.PathIsNetworkPathW + _PathIsNetworkPathW.argtypes = [LPWSTR] + _PathIsNetworkPathW.restype = bool + return _PathIsNetworkPathW(pszPath) + +PathIsNetworkPath = GuessStringType(PathIsNetworkPathA, PathIsNetworkPathW) + +# BOOL PathIsRelative( +# LPCTSTR lpszPath +# ); +def PathIsRelativeA(pszPath): + _PathIsRelativeA = windll.shlwapi.PathIsRelativeA + _PathIsRelativeA.argtypes = [LPSTR] + _PathIsRelativeA.restype = bool + return _PathIsRelativeA(pszPath) + +def PathIsRelativeW(pszPath): + _PathIsRelativeW = windll.shlwapi.PathIsRelativeW + _PathIsRelativeW.argtypes = [LPWSTR] + _PathIsRelativeW.restype = bool + return _PathIsRelativeW(pszPath) + +PathIsRelative = GuessStringType(PathIsRelativeA, PathIsRelativeW) + +# BOOL PathIsRoot( +# LPCTSTR pPath +# ); +def PathIsRootA(pszPath): + _PathIsRootA = windll.shlwapi.PathIsRootA + _PathIsRootA.argtypes = [LPSTR] + _PathIsRootA.restype = bool + return _PathIsRootA(pszPath) + +def PathIsRootW(pszPath): + _PathIsRootW = windll.shlwapi.PathIsRootW + _PathIsRootW.argtypes = [LPWSTR] + _PathIsRootW.restype = bool + return _PathIsRootW(pszPath) + +PathIsRoot = GuessStringType(PathIsRootA, PathIsRootW) + +# BOOL PathIsSameRoot( +# LPCTSTR pszPath1, +# LPCTSTR pszPath2 +# ); +def PathIsSameRootA(pszPath1, pszPath2): + _PathIsSameRootA = windll.shlwapi.PathIsSameRootA + _PathIsSameRootA.argtypes = [LPSTR, LPSTR] + _PathIsSameRootA.restype = bool + return _PathIsSameRootA(pszPath1, pszPath2) + +def PathIsSameRootW(pszPath1, pszPath2): + _PathIsSameRootW = windll.shlwapi.PathIsSameRootW + _PathIsSameRootW.argtypes = [LPWSTR, LPWSTR] + _PathIsSameRootW.restype = bool + return _PathIsSameRootW(pszPath1, pszPath2) + +PathIsSameRoot = GuessStringType(PathIsSameRootA, PathIsSameRootW) + +# BOOL PathIsUNC( +# LPCTSTR pszPath +# ); +def PathIsUNCA(pszPath): + _PathIsUNCA = windll.shlwapi.PathIsUNCA + _PathIsUNCA.argtypes = [LPSTR] + _PathIsUNCA.restype = bool + return _PathIsUNCA(pszPath) + +def PathIsUNCW(pszPath): + _PathIsUNCW = windll.shlwapi.PathIsUNCW + _PathIsUNCW.argtypes = [LPWSTR] + _PathIsUNCW.restype = bool + return _PathIsUNCW(pszPath) + +PathIsUNC = GuessStringType(PathIsUNCA, PathIsUNCW) + +# XXX WARNING +# PathMakePretty turns filenames into all lowercase. +# I'm not sure how well that might work on Wine. + +# BOOL PathMakePretty( +# LPCTSTR pszPath +# ); +def PathMakePrettyA(pszPath): + _PathMakePrettyA = windll.shlwapi.PathMakePrettyA + _PathMakePrettyA.argtypes = [LPSTR] + _PathMakePrettyA.restype = bool + _PathMakePrettyA.errcheck = RaiseIfZero + + pszPath = ctypes.create_string_buffer(pszPath, MAX_PATH) + _PathMakePrettyA(pszPath) + return pszPath.value + +def PathMakePrettyW(pszPath): + _PathMakePrettyW = windll.shlwapi.PathMakePrettyW + _PathMakePrettyW.argtypes = [LPWSTR] + _PathMakePrettyW.restype = bool + _PathMakePrettyW.errcheck = RaiseIfZero + + pszPath = ctypes.create_unicode_buffer(pszPath, MAX_PATH) + _PathMakePrettyW(pszPath) + return pszPath.value + +PathMakePretty = GuessStringType(PathMakePrettyA, PathMakePrettyW) + +# void PathRemoveArgs( +# LPTSTR pszPath +# ); +def PathRemoveArgsA(pszPath): + _PathRemoveArgsA = windll.shlwapi.PathRemoveArgsA + _PathRemoveArgsA.argtypes = [LPSTR] + + pszPath = ctypes.create_string_buffer(pszPath, MAX_PATH) + _PathRemoveArgsA(pszPath) + return pszPath.value + +def PathRemoveArgsW(pszPath): + _PathRemoveArgsW = windll.shlwapi.PathRemoveArgsW + _PathRemoveArgsW.argtypes = [LPWSTR] + + pszPath = ctypes.create_unicode_buffer(pszPath, MAX_PATH) + _PathRemoveArgsW(pszPath) + return pszPath.value + +PathRemoveArgs = GuessStringType(PathRemoveArgsA, PathRemoveArgsW) + +# void PathRemoveBackslash( +# LPTSTR pszPath +# ); +def PathRemoveBackslashA(pszPath): + _PathRemoveBackslashA = windll.shlwapi.PathRemoveBackslashA + _PathRemoveBackslashA.argtypes = [LPSTR] + + pszPath = ctypes.create_string_buffer(pszPath, MAX_PATH) + _PathRemoveBackslashA(pszPath) + return pszPath.value + +def PathRemoveBackslashW(pszPath): + _PathRemoveBackslashW = windll.shlwapi.PathRemoveBackslashW + _PathRemoveBackslashW.argtypes = [LPWSTR] + + pszPath = ctypes.create_unicode_buffer(pszPath, MAX_PATH) + _PathRemoveBackslashW(pszPath) + return pszPath.value + +PathRemoveBackslash = GuessStringType(PathRemoveBackslashA, PathRemoveBackslashW) + +# void PathRemoveExtension( +# LPTSTR pszPath +# ); +def PathRemoveExtensionA(pszPath): + _PathRemoveExtensionA = windll.shlwapi.PathRemoveExtensionA + _PathRemoveExtensionA.argtypes = [LPSTR] + + pszPath = ctypes.create_string_buffer(pszPath, MAX_PATH) + _PathRemoveExtensionA(pszPath) + return pszPath.value + +def PathRemoveExtensionW(pszPath): + _PathRemoveExtensionW = windll.shlwapi.PathRemoveExtensionW + _PathRemoveExtensionW.argtypes = [LPWSTR] + + pszPath = ctypes.create_unicode_buffer(pszPath, MAX_PATH) + _PathRemoveExtensionW(pszPath) + return pszPath.value + +PathRemoveExtension = GuessStringType(PathRemoveExtensionA, PathRemoveExtensionW) + +# void PathRemoveFileSpec( +# LPTSTR pszPath +# ); +def PathRemoveFileSpecA(pszPath): + _PathRemoveFileSpecA = windll.shlwapi.PathRemoveFileSpecA + _PathRemoveFileSpecA.argtypes = [LPSTR] + + pszPath = ctypes.create_string_buffer(pszPath, MAX_PATH) + _PathRemoveFileSpecA(pszPath) + return pszPath.value + +def PathRemoveFileSpecW(pszPath): + _PathRemoveFileSpecW = windll.shlwapi.PathRemoveFileSpecW + _PathRemoveFileSpecW.argtypes = [LPWSTR] + + pszPath = ctypes.create_unicode_buffer(pszPath, MAX_PATH) + _PathRemoveFileSpecW(pszPath) + return pszPath.value + +PathRemoveFileSpec = GuessStringType(PathRemoveFileSpecA, PathRemoveFileSpecW) + +# BOOL PathRenameExtension( +# LPTSTR pszPath, +# LPCTSTR pszExt +# ); +def PathRenameExtensionA(pszPath, pszExt): + _PathRenameExtensionA = windll.shlwapi.PathRenameExtensionA + _PathRenameExtensionA.argtypes = [LPSTR, LPSTR] + _PathRenameExtensionA.restype = bool + + pszPath = ctypes.create_string_buffer(pszPath, MAX_PATH) + if _PathRenameExtensionA(pszPath, pszExt): + return pszPath.value + return None + +def PathRenameExtensionW(pszPath, pszExt): + _PathRenameExtensionW = windll.shlwapi.PathRenameExtensionW + _PathRenameExtensionW.argtypes = [LPWSTR, LPWSTR] + _PathRenameExtensionW.restype = bool + + pszPath = ctypes.create_unicode_buffer(pszPath, MAX_PATH) + if _PathRenameExtensionW(pszPath, pszExt): + return pszPath.value + return None + +PathRenameExtension = GuessStringType(PathRenameExtensionA, PathRenameExtensionW) + +# BOOL PathUnExpandEnvStrings( +# LPCTSTR pszPath, +# LPTSTR pszBuf, +# UINT cchBuf +# ); +def PathUnExpandEnvStringsA(pszPath): + _PathUnExpandEnvStringsA = windll.shlwapi.PathUnExpandEnvStringsA + _PathUnExpandEnvStringsA.argtypes = [LPSTR, LPSTR] + _PathUnExpandEnvStringsA.restype = bool + _PathUnExpandEnvStringsA.errcheck = RaiseIfZero + + cchBuf = MAX_PATH + pszBuf = ctypes.create_string_buffer("", cchBuf) + _PathUnExpandEnvStringsA(pszPath, pszBuf, cchBuf) + return pszBuf.value + +def PathUnExpandEnvStringsW(pszPath): + _PathUnExpandEnvStringsW = windll.shlwapi.PathUnExpandEnvStringsW + _PathUnExpandEnvStringsW.argtypes = [LPWSTR, LPWSTR] + _PathUnExpandEnvStringsW.restype = bool + _PathUnExpandEnvStringsW.errcheck = RaiseIfZero + + cchBuf = MAX_PATH + pszBuf = ctypes.create_unicode_buffer(u"", cchBuf) + _PathUnExpandEnvStringsW(pszPath, pszBuf, cchBuf) + return pszBuf.value + +PathUnExpandEnvStrings = GuessStringType(PathUnExpandEnvStringsA, PathUnExpandEnvStringsW) + +#============================================================================== +# This calculates the list of exported symbols. +_all = set(vars().keys()).difference(_all) +__all__ = [_x for _x in _all if not _x.startswith('_')] +__all__.sort() +#============================================================================== diff --git a/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/win32/user32.py b/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/win32/user32.py new file mode 100644 index 00000000..18560e55 --- /dev/null +++ b/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/win32/user32.py @@ -0,0 +1,1727 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# Copyright (c) 2009-2014, Mario Vilas +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright +# notice,this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# * Neither the name of the copyright holder nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. + +""" +Wrapper for user32.dll in ctypes. +""" + +__revision__ = "$Id$" + +from winappdbg.win32.defines import * +from winappdbg.win32.version import bits +from winappdbg.win32.kernel32 import GetLastError, SetLastError +from winappdbg.win32.gdi32 import POINT, PPOINT, LPPOINT, RECT, PRECT, LPRECT + +#============================================================================== +# This is used later on to calculate the list of exported symbols. +_all = None +_all = set(vars().keys()) +#============================================================================== + +#--- Helpers ------------------------------------------------------------------ + +def MAKE_WPARAM(wParam): + """ + Convert arguments to the WPARAM type. + Used automatically by SendMessage, PostMessage, etc. + You shouldn't need to call this function. + """ + wParam = ctypes.cast(wParam, LPVOID).value + if wParam is None: + wParam = 0 + return wParam + +def MAKE_LPARAM(lParam): + """ + Convert arguments to the LPARAM type. + Used automatically by SendMessage, PostMessage, etc. + You shouldn't need to call this function. + """ + return ctypes.cast(lParam, LPARAM) + +class __WindowEnumerator (object): + """ + Window enumerator class. Used internally by the window enumeration APIs. + """ + def __init__(self): + self.hwnd = list() + def __call__(self, hwnd, lParam): +## print hwnd # XXX DEBUG + self.hwnd.append(hwnd) + return TRUE + +#--- Types -------------------------------------------------------------------- + +WNDENUMPROC = WINFUNCTYPE(BOOL, HWND, PVOID) + +#--- Constants ---------------------------------------------------------------- + +HWND_DESKTOP = 0 +HWND_TOP = 1 +HWND_BOTTOM = 1 +HWND_TOPMOST = -1 +HWND_NOTOPMOST = -2 +HWND_MESSAGE = -3 + +# GetWindowLong / SetWindowLong +GWL_WNDPROC = -4 +GWL_HINSTANCE = -6 +GWL_HWNDPARENT = -8 +GWL_ID = -12 +GWL_STYLE = -16 +GWL_EXSTYLE = -20 +GWL_USERDATA = -21 + +# GetWindowLongPtr / SetWindowLongPtr +GWLP_WNDPROC = GWL_WNDPROC +GWLP_HINSTANCE = GWL_HINSTANCE +GWLP_HWNDPARENT = GWL_HWNDPARENT +GWLP_STYLE = GWL_STYLE +GWLP_EXSTYLE = GWL_EXSTYLE +GWLP_USERDATA = GWL_USERDATA +GWLP_ID = GWL_ID + +# ShowWindow +SW_HIDE = 0 +SW_SHOWNORMAL = 1 +SW_NORMAL = 1 +SW_SHOWMINIMIZED = 2 +SW_SHOWMAXIMIZED = 3 +SW_MAXIMIZE = 3 +SW_SHOWNOACTIVATE = 4 +SW_SHOW = 5 +SW_MINIMIZE = 6 +SW_SHOWMINNOACTIVE = 7 +SW_SHOWNA = 8 +SW_RESTORE = 9 +SW_SHOWDEFAULT = 10 +SW_FORCEMINIMIZE = 11 + +# SendMessageTimeout flags +SMTO_NORMAL = 0 +SMTO_BLOCK = 1 +SMTO_ABORTIFHUNG = 2 +SMTO_NOTIMEOUTIFNOTHUNG = 8 +SMTO_ERRORONEXIT = 0x20 + +# WINDOWPLACEMENT flags +WPF_SETMINPOSITION = 1 +WPF_RESTORETOMAXIMIZED = 2 +WPF_ASYNCWINDOWPLACEMENT = 4 + +# GetAncestor flags +GA_PARENT = 1 +GA_ROOT = 2 +GA_ROOTOWNER = 3 + +# GetWindow flags +GW_HWNDFIRST = 0 +GW_HWNDLAST = 1 +GW_HWNDNEXT = 2 +GW_HWNDPREV = 3 +GW_OWNER = 4 +GW_CHILD = 5 +GW_ENABLEDPOPUP = 6 + +#--- Window messages ---------------------------------------------------------- + +WM_USER = 0x400 +WM_APP = 0x800 + +WM_NULL = 0 +WM_CREATE = 1 +WM_DESTROY = 2 +WM_MOVE = 3 +WM_SIZE = 5 +WM_ACTIVATE = 6 +WA_INACTIVE = 0 +WA_ACTIVE = 1 +WA_CLICKACTIVE = 2 +WM_SETFOCUS = 7 +WM_KILLFOCUS = 8 +WM_ENABLE = 0x0A +WM_SETREDRAW = 0x0B +WM_SETTEXT = 0x0C +WM_GETTEXT = 0x0D +WM_GETTEXTLENGTH = 0x0E +WM_PAINT = 0x0F +WM_CLOSE = 0x10 +WM_QUERYENDSESSION = 0x11 +WM_QUIT = 0x12 +WM_QUERYOPEN = 0x13 +WM_ERASEBKGND = 0x14 +WM_SYSCOLORCHANGE = 0x15 +WM_ENDSESSION = 0x16 +WM_SHOWWINDOW = 0x18 +WM_WININICHANGE = 0x1A +WM_SETTINGCHANGE = WM_WININICHANGE +WM_DEVMODECHANGE = 0x1B +WM_ACTIVATEAPP = 0x1C +WM_FONTCHANGE = 0x1D +WM_TIMECHANGE = 0x1E +WM_CANCELMODE = 0x1F +WM_SETCURSOR = 0x20 +WM_MOUSEACTIVATE = 0x21 +WM_CHILDACTIVATE = 0x22 +WM_QUEUESYNC = 0x23 +WM_GETMINMAXINFO = 0x24 +WM_PAINTICON = 0x26 +WM_ICONERASEBKGND = 0x27 +WM_NEXTDLGCTL = 0x28 +WM_SPOOLERSTATUS = 0x2A +WM_DRAWITEM = 0x2B +WM_MEASUREITEM = 0x2C +WM_DELETEITEM = 0x2D +WM_VKEYTOITEM = 0x2E +WM_CHARTOITEM = 0x2F +WM_SETFONT = 0x30 +WM_GETFONT = 0x31 +WM_SETHOTKEY = 0x32 +WM_GETHOTKEY = 0x33 +WM_QUERYDRAGICON = 0x37 +WM_COMPAREITEM = 0x39 +WM_GETOBJECT = 0x3D +WM_COMPACTING = 0x41 +WM_OTHERWINDOWCREATED = 0x42 +WM_OTHERWINDOWDESTROYED = 0x43 +WM_COMMNOTIFY = 0x44 + +CN_RECEIVE = 0x1 +CN_TRANSMIT = 0x2 +CN_EVENT = 0x4 + +WM_WINDOWPOSCHANGING = 0x46 +WM_WINDOWPOSCHANGED = 0x47 +WM_POWER = 0x48 + +PWR_OK = 1 +PWR_FAIL = -1 +PWR_SUSPENDREQUEST = 1 +PWR_SUSPENDRESUME = 2 +PWR_CRITICALRESUME = 3 + +WM_COPYDATA = 0x4A +WM_CANCELJOURNAL = 0x4B +WM_NOTIFY = 0x4E +WM_INPUTLANGCHANGEREQUEST = 0x50 +WM_INPUTLANGCHANGE = 0x51 +WM_TCARD = 0x52 +WM_HELP = 0x53 +WM_USERCHANGED = 0x54 +WM_NOTIFYFORMAT = 0x55 +WM_CONTEXTMENU = 0x7B +WM_STYLECHANGING = 0x7C +WM_STYLECHANGED = 0x7D +WM_DISPLAYCHANGE = 0x7E +WM_GETICON = 0x7F +WM_SETICON = 0x80 +WM_NCCREATE = 0x81 +WM_NCDESTROY = 0x82 +WM_NCCALCSIZE = 0x83 +WM_NCHITTEST = 0x84 +WM_NCPAINT = 0x85 +WM_NCACTIVATE = 0x86 +WM_GETDLGCODE = 0x87 +WM_SYNCPAINT = 0x88 +WM_NCMOUSEMOVE = 0x0A0 +WM_NCLBUTTONDOWN = 0x0A1 +WM_NCLBUTTONUP = 0x0A2 +WM_NCLBUTTONDBLCLK = 0x0A3 +WM_NCRBUTTONDOWN = 0x0A4 +WM_NCRBUTTONUP = 0x0A5 +WM_NCRBUTTONDBLCLK = 0x0A6 +WM_NCMBUTTONDOWN = 0x0A7 +WM_NCMBUTTONUP = 0x0A8 +WM_NCMBUTTONDBLCLK = 0x0A9 +WM_KEYFIRST = 0x100 +WM_KEYDOWN = 0x100 +WM_KEYUP = 0x101 +WM_CHAR = 0x102 +WM_DEADCHAR = 0x103 +WM_SYSKEYDOWN = 0x104 +WM_SYSKEYUP = 0x105 +WM_SYSCHAR = 0x106 +WM_SYSDEADCHAR = 0x107 +WM_KEYLAST = 0x108 +WM_INITDIALOG = 0x110 +WM_COMMAND = 0x111 +WM_SYSCOMMAND = 0x112 +WM_TIMER = 0x113 +WM_HSCROLL = 0x114 +WM_VSCROLL = 0x115 +WM_INITMENU = 0x116 +WM_INITMENUPOPUP = 0x117 +WM_MENUSELECT = 0x11F +WM_MENUCHAR = 0x120 +WM_ENTERIDLE = 0x121 +WM_CTLCOLORMSGBOX = 0x132 +WM_CTLCOLOREDIT = 0x133 +WM_CTLCOLORLISTBOX = 0x134 +WM_CTLCOLORBTN = 0x135 +WM_CTLCOLORDLG = 0x136 +WM_CTLCOLORSCROLLBAR = 0x137 +WM_CTLCOLORSTATIC = 0x138 +WM_MOUSEFIRST = 0x200 +WM_MOUSEMOVE = 0x200 +WM_LBUTTONDOWN = 0x201 +WM_LBUTTONUP = 0x202 +WM_LBUTTONDBLCLK = 0x203 +WM_RBUTTONDOWN = 0x204 +WM_RBUTTONUP = 0x205 +WM_RBUTTONDBLCLK = 0x206 +WM_MBUTTONDOWN = 0x207 +WM_MBUTTONUP = 0x208 +WM_MBUTTONDBLCLK = 0x209 +WM_MOUSELAST = 0x209 +WM_PARENTNOTIFY = 0x210 +WM_ENTERMENULOOP = 0x211 +WM_EXITMENULOOP = 0x212 +WM_MDICREATE = 0x220 +WM_MDIDESTROY = 0x221 +WM_MDIACTIVATE = 0x222 +WM_MDIRESTORE = 0x223 +WM_MDINEXT = 0x224 +WM_MDIMAXIMIZE = 0x225 +WM_MDITILE = 0x226 +WM_MDICASCADE = 0x227 +WM_MDIICONARRANGE = 0x228 +WM_MDIGETACTIVE = 0x229 +WM_MDISETMENU = 0x230 +WM_DROPFILES = 0x233 +WM_MDIREFRESHMENU = 0x234 +WM_CUT = 0x300 +WM_COPY = 0x301 +WM_PASTE = 0x302 +WM_CLEAR = 0x303 +WM_UNDO = 0x304 +WM_RENDERFORMAT = 0x305 +WM_RENDERALLFORMATS = 0x306 +WM_DESTROYCLIPBOARD = 0x307 +WM_DRAWCLIPBOARD = 0x308 +WM_PAINTCLIPBOARD = 0x309 +WM_VSCROLLCLIPBOARD = 0x30A +WM_SIZECLIPBOARD = 0x30B +WM_ASKCBFORMATNAME = 0x30C +WM_CHANGECBCHAIN = 0x30D +WM_HSCROLLCLIPBOARD = 0x30E +WM_QUERYNEWPALETTE = 0x30F +WM_PALETTEISCHANGING = 0x310 +WM_PALETTECHANGED = 0x311 +WM_HOTKEY = 0x312 +WM_PRINT = 0x317 +WM_PRINTCLIENT = 0x318 +WM_PENWINFIRST = 0x380 +WM_PENWINLAST = 0x38F + +#--- Structures --------------------------------------------------------------- + +# typedef struct _WINDOWPLACEMENT { +# UINT length; +# UINT flags; +# UINT showCmd; +# POINT ptMinPosition; +# POINT ptMaxPosition; +# RECT rcNormalPosition; +# } WINDOWPLACEMENT; +class WINDOWPLACEMENT(Structure): + _fields_ = [ + ('length', UINT), + ('flags', UINT), + ('showCmd', UINT), + ('ptMinPosition', POINT), + ('ptMaxPosition', POINT), + ('rcNormalPosition', RECT), + ] +PWINDOWPLACEMENT = POINTER(WINDOWPLACEMENT) +LPWINDOWPLACEMENT = PWINDOWPLACEMENT + +# typedef struct tagGUITHREADINFO { +# DWORD cbSize; +# DWORD flags; +# HWND hwndActive; +# HWND hwndFocus; +# HWND hwndCapture; +# HWND hwndMenuOwner; +# HWND hwndMoveSize; +# HWND hwndCaret; +# RECT rcCaret; +# } GUITHREADINFO, *PGUITHREADINFO; +class GUITHREADINFO(Structure): + _fields_ = [ + ('cbSize', DWORD), + ('flags', DWORD), + ('hwndActive', HWND), + ('hwndFocus', HWND), + ('hwndCapture', HWND), + ('hwndMenuOwner', HWND), + ('hwndMoveSize', HWND), + ('hwndCaret', HWND), + ('rcCaret', RECT), + ] +PGUITHREADINFO = POINTER(GUITHREADINFO) +LPGUITHREADINFO = PGUITHREADINFO + +#--- High level classes ------------------------------------------------------- + +# Point() and Rect() are here instead of gdi32.py because they were mainly +# created to handle window coordinates rather than drawing on the screen. + +# XXX not sure if these classes should be psyco-optimized, +# it may not work if the user wants to serialize them for some reason + +class Point(object): + """ + Python wrapper over the L{POINT} class. + + @type x: int + @ivar x: Horizontal coordinate + @type y: int + @ivar y: Vertical coordinate + """ + + def __init__(self, x = 0, y = 0): + """ + @see: L{POINT} + @type x: int + @param x: Horizontal coordinate + @type y: int + @param y: Vertical coordinate + """ + self.x = x + self.y = y + + def __iter__(self): + return (self.x, self.y).__iter__() + + def __len__(self): + return 2 + + def __getitem__(self, index): + return (self.x, self.y) [index] + + def __setitem__(self, index, value): + if index == 0: + self.x = value + elif index == 1: + self.y = value + else: + raise IndexError("index out of range") + + @property + def _as_parameter_(self): + """ + Compatibility with ctypes. + Allows passing transparently a Point object to an API call. + """ + return POINT(self.x, self.y) + + def screen_to_client(self, hWnd): + """ + Translates window screen coordinates to client coordinates. + + @see: L{client_to_screen}, L{translate} + + @type hWnd: int or L{HWND} or L{system.Window} + @param hWnd: Window handle. + + @rtype: L{Point} + @return: New object containing the translated coordinates. + """ + return ScreenToClient(hWnd, self) + + def client_to_screen(self, hWnd): + """ + Translates window client coordinates to screen coordinates. + + @see: L{screen_to_client}, L{translate} + + @type hWnd: int or L{HWND} or L{system.Window} + @param hWnd: Window handle. + + @rtype: L{Point} + @return: New object containing the translated coordinates. + """ + return ClientToScreen(hWnd, self) + + def translate(self, hWndFrom = HWND_DESKTOP, hWndTo = HWND_DESKTOP): + """ + Translate coordinates from one window to another. + + @note: To translate multiple points it's more efficient to use the + L{MapWindowPoints} function instead. + + @see: L{client_to_screen}, L{screen_to_client} + + @type hWndFrom: int or L{HWND} or L{system.Window} + @param hWndFrom: Window handle to translate from. + Use C{HWND_DESKTOP} for screen coordinates. + + @type hWndTo: int or L{HWND} or L{system.Window} + @param hWndTo: Window handle to translate to. + Use C{HWND_DESKTOP} for screen coordinates. + + @rtype: L{Point} + @return: New object containing the translated coordinates. + """ + return MapWindowPoints(hWndFrom, hWndTo, [self]) + +class Rect(object): + """ + Python wrapper over the L{RECT} class. + + @type left: int + @ivar left: Horizontal coordinate for the top left corner. + @type top: int + @ivar top: Vertical coordinate for the top left corner. + @type right: int + @ivar right: Horizontal coordinate for the bottom right corner. + @type bottom: int + @ivar bottom: Vertical coordinate for the bottom right corner. + + @type width: int + @ivar width: Width in pixels. Same as C{right - left}. + @type height: int + @ivar height: Height in pixels. Same as C{bottom - top}. + """ + + def __init__(self, left = 0, top = 0, right = 0, bottom = 0): + """ + @see: L{RECT} + @type left: int + @param left: Horizontal coordinate for the top left corner. + @type top: int + @param top: Vertical coordinate for the top left corner. + @type right: int + @param right: Horizontal coordinate for the bottom right corner. + @type bottom: int + @param bottom: Vertical coordinate for the bottom right corner. + """ + self.left = left + self.top = top + self.right = right + self.bottom = bottom + + def __iter__(self): + return (self.left, self.top, self.right, self.bottom).__iter__() + + def __len__(self): + return 2 + + def __getitem__(self, index): + return (self.left, self.top, self.right, self.bottom) [index] + + def __setitem__(self, index, value): + if index == 0: + self.left = value + elif index == 1: + self.top = value + elif index == 2: + self.right = value + elif index == 3: + self.bottom = value + else: + raise IndexError("index out of range") + + @property + def _as_parameter_(self): + """ + Compatibility with ctypes. + Allows passing transparently a Point object to an API call. + """ + return RECT(self.left, self.top, self.right, self.bottom) + + def __get_width(self): + return self.right - self.left + + def __get_height(self): + return self.bottom - self.top + + def __set_width(self, value): + self.right = value - self.left + + def __set_height(self, value): + self.bottom = value - self.top + + width = property(__get_width, __set_width) + height = property(__get_height, __set_height) + + def screen_to_client(self, hWnd): + """ + Translates window screen coordinates to client coordinates. + + @see: L{client_to_screen}, L{translate} + + @type hWnd: int or L{HWND} or L{system.Window} + @param hWnd: Window handle. + + @rtype: L{Rect} + @return: New object containing the translated coordinates. + """ + topleft = ScreenToClient(hWnd, (self.left, self.top)) + bottomright = ScreenToClient(hWnd, (self.bottom, self.right)) + return Rect( topleft.x, topleft.y, bottomright.x, bottomright.y ) + + def client_to_screen(self, hWnd): + """ + Translates window client coordinates to screen coordinates. + + @see: L{screen_to_client}, L{translate} + + @type hWnd: int or L{HWND} or L{system.Window} + @param hWnd: Window handle. + + @rtype: L{Rect} + @return: New object containing the translated coordinates. + """ + topleft = ClientToScreen(hWnd, (self.left, self.top)) + bottomright = ClientToScreen(hWnd, (self.bottom, self.right)) + return Rect( topleft.x, topleft.y, bottomright.x, bottomright.y ) + + def translate(self, hWndFrom = HWND_DESKTOP, hWndTo = HWND_DESKTOP): + """ + Translate coordinates from one window to another. + + @see: L{client_to_screen}, L{screen_to_client} + + @type hWndFrom: int or L{HWND} or L{system.Window} + @param hWndFrom: Window handle to translate from. + Use C{HWND_DESKTOP} for screen coordinates. + + @type hWndTo: int or L{HWND} or L{system.Window} + @param hWndTo: Window handle to translate to. + Use C{HWND_DESKTOP} for screen coordinates. + + @rtype: L{Rect} + @return: New object containing the translated coordinates. + """ + points = [ (self.left, self.top), (self.right, self.bottom) ] + return MapWindowPoints(hWndFrom, hWndTo, points) + +class WindowPlacement(object): + """ + Python wrapper over the L{WINDOWPLACEMENT} class. + """ + + def __init__(self, wp = None): + """ + @type wp: L{WindowPlacement} or L{WINDOWPLACEMENT} + @param wp: Another window placement object. + """ + + # Initialize all properties with empty values. + self.flags = 0 + self.showCmd = 0 + self.ptMinPosition = Point() + self.ptMaxPosition = Point() + self.rcNormalPosition = Rect() + + # If a window placement was given copy it's properties. + if wp: + self.flags = wp.flags + self.showCmd = wp.showCmd + self.ptMinPosition = Point( wp.ptMinPosition.x, wp.ptMinPosition.y ) + self.ptMaxPosition = Point( wp.ptMaxPosition.x, wp.ptMaxPosition.y ) + self.rcNormalPosition = Rect( + wp.rcNormalPosition.left, + wp.rcNormalPosition.top, + wp.rcNormalPosition.right, + wp.rcNormalPosition.bottom, + ) + + @property + def _as_parameter_(self): + """ + Compatibility with ctypes. + Allows passing transparently a Point object to an API call. + """ + wp = WINDOWPLACEMENT() + wp.length = sizeof(wp) + wp.flags = self.flags + wp.showCmd = self.showCmd + wp.ptMinPosition.x = self.ptMinPosition.x + wp.ptMinPosition.y = self.ptMinPosition.y + wp.ptMaxPosition.x = self.ptMaxPosition.x + wp.ptMaxPosition.y = self.ptMaxPosition.y + wp.rcNormalPosition.left = self.rcNormalPosition.left + wp.rcNormalPosition.top = self.rcNormalPosition.top + wp.rcNormalPosition.right = self.rcNormalPosition.right + wp.rcNormalPosition.bottom = self.rcNormalPosition.bottom + return wp + +#--- user32.dll --------------------------------------------------------------- + +# void WINAPI SetLastErrorEx( +# __in DWORD dwErrCode, +# __in DWORD dwType +# ); +def SetLastErrorEx(dwErrCode, dwType = 0): + _SetLastErrorEx = windll.user32.SetLastErrorEx + _SetLastErrorEx.argtypes = [DWORD, DWORD] + _SetLastErrorEx.restype = None + _SetLastErrorEx(dwErrCode, dwType) + +# HWND FindWindow( +# LPCTSTR lpClassName, +# LPCTSTR lpWindowName +# ); +def FindWindowA(lpClassName = None, lpWindowName = None): + _FindWindowA = windll.user32.FindWindowA + _FindWindowA.argtypes = [LPSTR, LPSTR] + _FindWindowA.restype = HWND + + hWnd = _FindWindowA(lpClassName, lpWindowName) + if not hWnd: + errcode = GetLastError() + if errcode != ERROR_SUCCESS: + raise ctypes.WinError(errcode) + return hWnd + +def FindWindowW(lpClassName = None, lpWindowName = None): + _FindWindowW = windll.user32.FindWindowW + _FindWindowW.argtypes = [LPWSTR, LPWSTR] + _FindWindowW.restype = HWND + + hWnd = _FindWindowW(lpClassName, lpWindowName) + if not hWnd: + errcode = GetLastError() + if errcode != ERROR_SUCCESS: + raise ctypes.WinError(errcode) + return hWnd + +FindWindow = GuessStringType(FindWindowA, FindWindowW) + +# HWND WINAPI FindWindowEx( +# __in_opt HWND hwndParent, +# __in_opt HWND hwndChildAfter, +# __in_opt LPCTSTR lpszClass, +# __in_opt LPCTSTR lpszWindow +# ); +def FindWindowExA(hwndParent = None, hwndChildAfter = None, lpClassName = None, lpWindowName = None): + _FindWindowExA = windll.user32.FindWindowExA + _FindWindowExA.argtypes = [HWND, HWND, LPSTR, LPSTR] + _FindWindowExA.restype = HWND + + hWnd = _FindWindowExA(hwndParent, hwndChildAfter, lpClassName, lpWindowName) + if not hWnd: + errcode = GetLastError() + if errcode != ERROR_SUCCESS: + raise ctypes.WinError(errcode) + return hWnd + +def FindWindowExW(hwndParent = None, hwndChildAfter = None, lpClassName = None, lpWindowName = None): + _FindWindowExW = windll.user32.FindWindowExW + _FindWindowExW.argtypes = [HWND, HWND, LPWSTR, LPWSTR] + _FindWindowExW.restype = HWND + + hWnd = _FindWindowExW(hwndParent, hwndChildAfter, lpClassName, lpWindowName) + if not hWnd: + errcode = GetLastError() + if errcode != ERROR_SUCCESS: + raise ctypes.WinError(errcode) + return hWnd + +FindWindowEx = GuessStringType(FindWindowExA, FindWindowExW) + +# int GetClassName( +# HWND hWnd, +# LPTSTR lpClassName, +# int nMaxCount +# ); +def GetClassNameA(hWnd): + _GetClassNameA = windll.user32.GetClassNameA + _GetClassNameA.argtypes = [HWND, LPSTR, ctypes.c_int] + _GetClassNameA.restype = ctypes.c_int + + nMaxCount = 0x1000 + dwCharSize = sizeof(CHAR) + while 1: + lpClassName = ctypes.create_string_buffer("", nMaxCount) + nCount = _GetClassNameA(hWnd, lpClassName, nMaxCount) + if nCount == 0: + raise ctypes.WinError() + if nCount < nMaxCount - dwCharSize: + break + nMaxCount += 0x1000 + return lpClassName.value + +def GetClassNameW(hWnd): + _GetClassNameW = windll.user32.GetClassNameW + _GetClassNameW.argtypes = [HWND, LPWSTR, ctypes.c_int] + _GetClassNameW.restype = ctypes.c_int + + nMaxCount = 0x1000 + dwCharSize = sizeof(WCHAR) + while 1: + lpClassName = ctypes.create_unicode_buffer(u"", nMaxCount) + nCount = _GetClassNameW(hWnd, lpClassName, nMaxCount) + if nCount == 0: + raise ctypes.WinError() + if nCount < nMaxCount - dwCharSize: + break + nMaxCount += 0x1000 + return lpClassName.value + +GetClassName = GuessStringType(GetClassNameA, GetClassNameW) + +# int WINAPI GetWindowText( +# __in HWND hWnd, +# __out LPTSTR lpString, +# __in int nMaxCount +# ); +def GetWindowTextA(hWnd): + _GetWindowTextA = windll.user32.GetWindowTextA + _GetWindowTextA.argtypes = [HWND, LPSTR, ctypes.c_int] + _GetWindowTextA.restype = ctypes.c_int + + nMaxCount = 0x1000 + dwCharSize = sizeof(CHAR) + while 1: + lpString = ctypes.create_string_buffer("", nMaxCount) + nCount = _GetWindowTextA(hWnd, lpString, nMaxCount) + if nCount == 0: + raise ctypes.WinError() + if nCount < nMaxCount - dwCharSize: + break + nMaxCount += 0x1000 + return lpString.value + +def GetWindowTextW(hWnd): + _GetWindowTextW = windll.user32.GetWindowTextW + _GetWindowTextW.argtypes = [HWND, LPWSTR, ctypes.c_int] + _GetWindowTextW.restype = ctypes.c_int + + nMaxCount = 0x1000 + dwCharSize = sizeof(CHAR) + while 1: + lpString = ctypes.create_string_buffer("", nMaxCount) + nCount = _GetWindowTextW(hWnd, lpString, nMaxCount) + if nCount == 0: + raise ctypes.WinError() + if nCount < nMaxCount - dwCharSize: + break + nMaxCount += 0x1000 + return lpString.value + +GetWindowText = GuessStringType(GetWindowTextA, GetWindowTextW) + +# BOOL WINAPI SetWindowText( +# __in HWND hWnd, +# __in_opt LPCTSTR lpString +# ); +def SetWindowTextA(hWnd, lpString = None): + _SetWindowTextA = windll.user32.SetWindowTextA + _SetWindowTextA.argtypes = [HWND, LPSTR] + _SetWindowTextA.restype = bool + _SetWindowTextA.errcheck = RaiseIfZero + _SetWindowTextA(hWnd, lpString) + +def SetWindowTextW(hWnd, lpString = None): + _SetWindowTextW = windll.user32.SetWindowTextW + _SetWindowTextW.argtypes = [HWND, LPWSTR] + _SetWindowTextW.restype = bool + _SetWindowTextW.errcheck = RaiseIfZero + _SetWindowTextW(hWnd, lpString) + +SetWindowText = GuessStringType(SetWindowTextA, SetWindowTextW) + +# LONG GetWindowLong( +# HWND hWnd, +# int nIndex +# ); +def GetWindowLongA(hWnd, nIndex = 0): + _GetWindowLongA = windll.user32.GetWindowLongA + _GetWindowLongA.argtypes = [HWND, ctypes.c_int] + _GetWindowLongA.restype = DWORD + + SetLastError(ERROR_SUCCESS) + retval = _GetWindowLongA(hWnd, nIndex) + if retval == 0: + errcode = GetLastError() + if errcode != ERROR_SUCCESS: + raise ctypes.WinError(errcode) + return retval + +def GetWindowLongW(hWnd, nIndex = 0): + _GetWindowLongW = windll.user32.GetWindowLongW + _GetWindowLongW.argtypes = [HWND, ctypes.c_int] + _GetWindowLongW.restype = DWORD + + SetLastError(ERROR_SUCCESS) + retval = _GetWindowLongW(hWnd, nIndex) + if retval == 0: + errcode = GetLastError() + if errcode != ERROR_SUCCESS: + raise ctypes.WinError(errcode) + return retval + +GetWindowLong = DefaultStringType(GetWindowLongA, GetWindowLongW) + +# LONG_PTR WINAPI GetWindowLongPtr( +# _In_ HWND hWnd, +# _In_ int nIndex +# ); + +if bits == 32: + + GetWindowLongPtrA = GetWindowLongA + GetWindowLongPtrW = GetWindowLongW + GetWindowLongPtr = GetWindowLong + +else: + + def GetWindowLongPtrA(hWnd, nIndex = 0): + _GetWindowLongPtrA = windll.user32.GetWindowLongPtrA + _GetWindowLongPtrA.argtypes = [HWND, ctypes.c_int] + _GetWindowLongPtrA.restype = SIZE_T + + SetLastError(ERROR_SUCCESS) + retval = _GetWindowLongPtrA(hWnd, nIndex) + if retval == 0: + errcode = GetLastError() + if errcode != ERROR_SUCCESS: + raise ctypes.WinError(errcode) + return retval + + def GetWindowLongPtrW(hWnd, nIndex = 0): + _GetWindowLongPtrW = windll.user32.GetWindowLongPtrW + _GetWindowLongPtrW.argtypes = [HWND, ctypes.c_int] + _GetWindowLongPtrW.restype = DWORD + + SetLastError(ERROR_SUCCESS) + retval = _GetWindowLongPtrW(hWnd, nIndex) + if retval == 0: + errcode = GetLastError() + if errcode != ERROR_SUCCESS: + raise ctypes.WinError(errcode) + return retval + + GetWindowLongPtr = DefaultStringType(GetWindowLongPtrA, GetWindowLongPtrW) + +# LONG WINAPI SetWindowLong( +# _In_ HWND hWnd, +# _In_ int nIndex, +# _In_ LONG dwNewLong +# ); + +def SetWindowLongA(hWnd, nIndex, dwNewLong): + _SetWindowLongA = windll.user32.SetWindowLongA + _SetWindowLongA.argtypes = [HWND, ctypes.c_int, DWORD] + _SetWindowLongA.restype = DWORD + + SetLastError(ERROR_SUCCESS) + retval = _SetWindowLongA(hWnd, nIndex, dwNewLong) + if retval == 0: + errcode = GetLastError() + if errcode != ERROR_SUCCESS: + raise ctypes.WinError(errcode) + return retval + +def SetWindowLongW(hWnd, nIndex, dwNewLong): + _SetWindowLongW = windll.user32.SetWindowLongW + _SetWindowLongW.argtypes = [HWND, ctypes.c_int, DWORD] + _SetWindowLongW.restype = DWORD + + SetLastError(ERROR_SUCCESS) + retval = _SetWindowLongW(hWnd, nIndex, dwNewLong) + if retval == 0: + errcode = GetLastError() + if errcode != ERROR_SUCCESS: + raise ctypes.WinError(errcode) + return retval + +SetWindowLong = DefaultStringType(SetWindowLongA, SetWindowLongW) + +# LONG_PTR WINAPI SetWindowLongPtr( +# _In_ HWND hWnd, +# _In_ int nIndex, +# _In_ LONG_PTR dwNewLong +# ); + +if bits == 32: + + SetWindowLongPtrA = SetWindowLongA + SetWindowLongPtrW = SetWindowLongW + SetWindowLongPtr = SetWindowLong + +else: + + def SetWindowLongPtrA(hWnd, nIndex, dwNewLong): + _SetWindowLongPtrA = windll.user32.SetWindowLongPtrA + _SetWindowLongPtrA.argtypes = [HWND, ctypes.c_int, SIZE_T] + _SetWindowLongPtrA.restype = SIZE_T + + SetLastError(ERROR_SUCCESS) + retval = _SetWindowLongPtrA(hWnd, nIndex, dwNewLong) + if retval == 0: + errcode = GetLastError() + if errcode != ERROR_SUCCESS: + raise ctypes.WinError(errcode) + return retval + + def SetWindowLongPtrW(hWnd, nIndex, dwNewLong): + _SetWindowLongPtrW = windll.user32.SetWindowLongPtrW + _SetWindowLongPtrW.argtypes = [HWND, ctypes.c_int, SIZE_T] + _SetWindowLongPtrW.restype = SIZE_T + + SetLastError(ERROR_SUCCESS) + retval = _SetWindowLongPtrW(hWnd, nIndex, dwNewLong) + if retval == 0: + errcode = GetLastError() + if errcode != ERROR_SUCCESS: + raise ctypes.WinError(errcode) + return retval + + SetWindowLongPtr = DefaultStringType(SetWindowLongPtrA, SetWindowLongPtrW) + +# HWND GetShellWindow(VOID); +def GetShellWindow(): + _GetShellWindow = windll.user32.GetShellWindow + _GetShellWindow.argtypes = [] + _GetShellWindow.restype = HWND + _GetShellWindow.errcheck = RaiseIfZero + return _GetShellWindow() + +# DWORD GetWindowThreadProcessId( +# HWND hWnd, +# LPDWORD lpdwProcessId +# ); +def GetWindowThreadProcessId(hWnd): + _GetWindowThreadProcessId = windll.user32.GetWindowThreadProcessId + _GetWindowThreadProcessId.argtypes = [HWND, LPDWORD] + _GetWindowThreadProcessId.restype = DWORD + _GetWindowThreadProcessId.errcheck = RaiseIfZero + + dwProcessId = DWORD(0) + dwThreadId = _GetWindowThreadProcessId(hWnd, byref(dwProcessId)) + return (dwThreadId, dwProcessId.value) + +# HWND WINAPI GetWindow( +# __in HWND hwnd, +# __in UINT uCmd +# ); +def GetWindow(hWnd, uCmd): + _GetWindow = windll.user32.GetWindow + _GetWindow.argtypes = [HWND, UINT] + _GetWindow.restype = HWND + + SetLastError(ERROR_SUCCESS) + hWndTarget = _GetWindow(hWnd, uCmd) + if not hWndTarget: + winerr = GetLastError() + if winerr != ERROR_SUCCESS: + raise ctypes.WinError(winerr) + return hWndTarget + +# HWND GetParent( +# HWND hWnd +# ); +def GetParent(hWnd): + _GetParent = windll.user32.GetParent + _GetParent.argtypes = [HWND] + _GetParent.restype = HWND + + SetLastError(ERROR_SUCCESS) + hWndParent = _GetParent(hWnd) + if not hWndParent: + winerr = GetLastError() + if winerr != ERROR_SUCCESS: + raise ctypes.WinError(winerr) + return hWndParent + +# HWND WINAPI GetAncestor( +# __in HWND hwnd, +# __in UINT gaFlags +# ); +def GetAncestor(hWnd, gaFlags = GA_PARENT): + _GetAncestor = windll.user32.GetAncestor + _GetAncestor.argtypes = [HWND, UINT] + _GetAncestor.restype = HWND + + SetLastError(ERROR_SUCCESS) + hWndParent = _GetAncestor(hWnd, gaFlags) + if not hWndParent: + winerr = GetLastError() + if winerr != ERROR_SUCCESS: + raise ctypes.WinError(winerr) + return hWndParent + +# BOOL EnableWindow( +# HWND hWnd, +# BOOL bEnable +# ); +def EnableWindow(hWnd, bEnable = True): + _EnableWindow = windll.user32.EnableWindow + _EnableWindow.argtypes = [HWND, BOOL] + _EnableWindow.restype = bool + return _EnableWindow(hWnd, bool(bEnable)) + +# BOOL ShowWindow( +# HWND hWnd, +# int nCmdShow +# ); +def ShowWindow(hWnd, nCmdShow = SW_SHOW): + _ShowWindow = windll.user32.ShowWindow + _ShowWindow.argtypes = [HWND, ctypes.c_int] + _ShowWindow.restype = bool + return _ShowWindow(hWnd, nCmdShow) + +# BOOL ShowWindowAsync( +# HWND hWnd, +# int nCmdShow +# ); +def ShowWindowAsync(hWnd, nCmdShow = SW_SHOW): + _ShowWindowAsync = windll.user32.ShowWindowAsync + _ShowWindowAsync.argtypes = [HWND, ctypes.c_int] + _ShowWindowAsync.restype = bool + return _ShowWindowAsync(hWnd, nCmdShow) + +# HWND GetDesktopWindow(VOID); +def GetDesktopWindow(): + _GetDesktopWindow = windll.user32.GetDesktopWindow + _GetDesktopWindow.argtypes = [] + _GetDesktopWindow.restype = HWND + _GetDesktopWindow.errcheck = RaiseIfZero + return _GetDesktopWindow() + +# HWND GetForegroundWindow(VOID); +def GetForegroundWindow(): + _GetForegroundWindow = windll.user32.GetForegroundWindow + _GetForegroundWindow.argtypes = [] + _GetForegroundWindow.restype = HWND + _GetForegroundWindow.errcheck = RaiseIfZero + return _GetForegroundWindow() + +# BOOL IsWindow( +# HWND hWnd +# ); +def IsWindow(hWnd): + _IsWindow = windll.user32.IsWindow + _IsWindow.argtypes = [HWND] + _IsWindow.restype = bool + return _IsWindow(hWnd) + +# BOOL IsWindowVisible( +# HWND hWnd +# ); +def IsWindowVisible(hWnd): + _IsWindowVisible = windll.user32.IsWindowVisible + _IsWindowVisible.argtypes = [HWND] + _IsWindowVisible.restype = bool + return _IsWindowVisible(hWnd) + +# BOOL IsWindowEnabled( +# HWND hWnd +# ); +def IsWindowEnabled(hWnd): + _IsWindowEnabled = windll.user32.IsWindowEnabled + _IsWindowEnabled.argtypes = [HWND] + _IsWindowEnabled.restype = bool + return _IsWindowEnabled(hWnd) + +# BOOL IsZoomed( +# HWND hWnd +# ); +def IsZoomed(hWnd): + _IsZoomed = windll.user32.IsZoomed + _IsZoomed.argtypes = [HWND] + _IsZoomed.restype = bool + return _IsZoomed(hWnd) + +# BOOL IsIconic( +# HWND hWnd +# ); +def IsIconic(hWnd): + _IsIconic = windll.user32.IsIconic + _IsIconic.argtypes = [HWND] + _IsIconic.restype = bool + return _IsIconic(hWnd) + +# BOOL IsChild( +# HWND hWnd +# ); +def IsChild(hWnd): + _IsChild = windll.user32.IsChild + _IsChild.argtypes = [HWND] + _IsChild.restype = bool + return _IsChild(hWnd) + +# HWND WindowFromPoint( +# POINT Point +# ); +def WindowFromPoint(point): + _WindowFromPoint = windll.user32.WindowFromPoint + _WindowFromPoint.argtypes = [POINT] + _WindowFromPoint.restype = HWND + _WindowFromPoint.errcheck = RaiseIfZero + if isinstance(point, tuple): + point = POINT(*point) + return _WindowFromPoint(point) + +# HWND ChildWindowFromPoint( +# HWND hWndParent, +# POINT Point +# ); +def ChildWindowFromPoint(hWndParent, point): + _ChildWindowFromPoint = windll.user32.ChildWindowFromPoint + _ChildWindowFromPoint.argtypes = [HWND, POINT] + _ChildWindowFromPoint.restype = HWND + _ChildWindowFromPoint.errcheck = RaiseIfZero + if isinstance(point, tuple): + point = POINT(*point) + return _ChildWindowFromPoint(hWndParent, point) + +#HWND RealChildWindowFromPoint( +# HWND hwndParent, +# POINT ptParentClientCoords +#); +def RealChildWindowFromPoint(hWndParent, ptParentClientCoords): + _RealChildWindowFromPoint = windll.user32.RealChildWindowFromPoint + _RealChildWindowFromPoint.argtypes = [HWND, POINT] + _RealChildWindowFromPoint.restype = HWND + _RealChildWindowFromPoint.errcheck = RaiseIfZero + if isinstance(ptParentClientCoords, tuple): + ptParentClientCoords = POINT(*ptParentClientCoords) + return _RealChildWindowFromPoint(hWndParent, ptParentClientCoords) + +# BOOL ScreenToClient( +# __in HWND hWnd, +# LPPOINT lpPoint +# ); +def ScreenToClient(hWnd, lpPoint): + _ScreenToClient = windll.user32.ScreenToClient + _ScreenToClient.argtypes = [HWND, LPPOINT] + _ScreenToClient.restype = bool + _ScreenToClient.errcheck = RaiseIfZero + + if isinstance(lpPoint, tuple): + lpPoint = POINT(*lpPoint) + else: + lpPoint = POINT(lpPoint.x, lpPoint.y) + _ScreenToClient(hWnd, byref(lpPoint)) + return Point(lpPoint.x, lpPoint.y) + +# BOOL ClientToScreen( +# HWND hWnd, +# LPPOINT lpPoint +# ); +def ClientToScreen(hWnd, lpPoint): + _ClientToScreen = windll.user32.ClientToScreen + _ClientToScreen.argtypes = [HWND, LPPOINT] + _ClientToScreen.restype = bool + _ClientToScreen.errcheck = RaiseIfZero + + if isinstance(lpPoint, tuple): + lpPoint = POINT(*lpPoint) + else: + lpPoint = POINT(lpPoint.x, lpPoint.y) + _ClientToScreen(hWnd, byref(lpPoint)) + return Point(lpPoint.x, lpPoint.y) + +# int MapWindowPoints( +# __in HWND hWndFrom, +# __in HWND hWndTo, +# __inout LPPOINT lpPoints, +# __in UINT cPoints +# ); +def MapWindowPoints(hWndFrom, hWndTo, lpPoints): + _MapWindowPoints = windll.user32.MapWindowPoints + _MapWindowPoints.argtypes = [HWND, HWND, LPPOINT, UINT] + _MapWindowPoints.restype = ctypes.c_int + + cPoints = len(lpPoints) + lpPoints = (POINT * cPoints)(* lpPoints) + SetLastError(ERROR_SUCCESS) + number = _MapWindowPoints(hWndFrom, hWndTo, byref(lpPoints), cPoints) + if number == 0: + errcode = GetLastError() + if errcode != ERROR_SUCCESS: + raise ctypes.WinError(errcode) + x_delta = number & 0xFFFF + y_delta = (number >> 16) & 0xFFFF + return x_delta, y_delta, [ (Point.x, Point.y) for Point in lpPoints ] + +#BOOL SetForegroundWindow( +# HWND hWnd +#); +def SetForegroundWindow(hWnd): + _SetForegroundWindow = windll.user32.SetForegroundWindow + _SetForegroundWindow.argtypes = [HWND] + _SetForegroundWindow.restype = bool + _SetForegroundWindow.errcheck = RaiseIfZero + return _SetForegroundWindow(hWnd) + +# BOOL GetWindowPlacement( +# HWND hWnd, +# WINDOWPLACEMENT *lpwndpl +# ); +def GetWindowPlacement(hWnd): + _GetWindowPlacement = windll.user32.GetWindowPlacement + _GetWindowPlacement.argtypes = [HWND, PWINDOWPLACEMENT] + _GetWindowPlacement.restype = bool + _GetWindowPlacement.errcheck = RaiseIfZero + + lpwndpl = WINDOWPLACEMENT() + lpwndpl.length = sizeof(lpwndpl) + _GetWindowPlacement(hWnd, byref(lpwndpl)) + return WindowPlacement(lpwndpl) + +# BOOL SetWindowPlacement( +# HWND hWnd, +# WINDOWPLACEMENT *lpwndpl +# ); +def SetWindowPlacement(hWnd, lpwndpl): + _SetWindowPlacement = windll.user32.SetWindowPlacement + _SetWindowPlacement.argtypes = [HWND, PWINDOWPLACEMENT] + _SetWindowPlacement.restype = bool + _SetWindowPlacement.errcheck = RaiseIfZero + + if isinstance(lpwndpl, WINDOWPLACEMENT): + lpwndpl.length = sizeof(lpwndpl) + _SetWindowPlacement(hWnd, byref(lpwndpl)) + +# BOOL WINAPI GetWindowRect( +# __in HWND hWnd, +# __out LPRECT lpRect +# ); +def GetWindowRect(hWnd): + _GetWindowRect = windll.user32.GetWindowRect + _GetWindowRect.argtypes = [HWND, LPRECT] + _GetWindowRect.restype = bool + _GetWindowRect.errcheck = RaiseIfZero + + lpRect = RECT() + _GetWindowRect(hWnd, byref(lpRect)) + return Rect(lpRect.left, lpRect.top, lpRect.right, lpRect.bottom) + +# BOOL WINAPI GetClientRect( +# __in HWND hWnd, +# __out LPRECT lpRect +# ); +def GetClientRect(hWnd): + _GetClientRect = windll.user32.GetClientRect + _GetClientRect.argtypes = [HWND, LPRECT] + _GetClientRect.restype = bool + _GetClientRect.errcheck = RaiseIfZero + + lpRect = RECT() + _GetClientRect(hWnd, byref(lpRect)) + return Rect(lpRect.left, lpRect.top, lpRect.right, lpRect.bottom) + +#BOOL MoveWindow( +# HWND hWnd, +# int X, +# int Y, +# int nWidth, +# int nHeight, +# BOOL bRepaint +#); +def MoveWindow(hWnd, X, Y, nWidth, nHeight, bRepaint = True): + _MoveWindow = windll.user32.MoveWindow + _MoveWindow.argtypes = [HWND, ctypes.c_int, ctypes.c_int, ctypes.c_int, ctypes.c_int, BOOL] + _MoveWindow.restype = bool + _MoveWindow.errcheck = RaiseIfZero + _MoveWindow(hWnd, X, Y, nWidth, nHeight, bool(bRepaint)) + +# BOOL GetGUIThreadInfo( +# DWORD idThread, +# LPGUITHREADINFO lpgui +# ); +def GetGUIThreadInfo(idThread): + _GetGUIThreadInfo = windll.user32.GetGUIThreadInfo + _GetGUIThreadInfo.argtypes = [DWORD, LPGUITHREADINFO] + _GetGUIThreadInfo.restype = bool + _GetGUIThreadInfo.errcheck = RaiseIfZero + + gui = GUITHREADINFO() + _GetGUIThreadInfo(idThread, byref(gui)) + return gui + +# BOOL CALLBACK EnumWndProc( +# HWND hwnd, +# LPARAM lParam +# ); +class __EnumWndProc (__WindowEnumerator): + pass + +# BOOL EnumWindows( +# WNDENUMPROC lpEnumFunc, +# LPARAM lParam +# ); +def EnumWindows(): + _EnumWindows = windll.user32.EnumWindows + _EnumWindows.argtypes = [WNDENUMPROC, LPARAM] + _EnumWindows.restype = bool + + EnumFunc = __EnumWndProc() + lpEnumFunc = WNDENUMPROC(EnumFunc) + if not _EnumWindows(lpEnumFunc, NULL): + errcode = GetLastError() + if errcode not in (ERROR_NO_MORE_FILES, ERROR_SUCCESS): + raise ctypes.WinError(errcode) + return EnumFunc.hwnd + +# BOOL CALLBACK EnumThreadWndProc( +# HWND hwnd, +# LPARAM lParam +# ); +class __EnumThreadWndProc (__WindowEnumerator): + pass + +# BOOL EnumThreadWindows( +# DWORD dwThreadId, +# WNDENUMPROC lpfn, +# LPARAM lParam +# ); +def EnumThreadWindows(dwThreadId): + _EnumThreadWindows = windll.user32.EnumThreadWindows + _EnumThreadWindows.argtypes = [DWORD, WNDENUMPROC, LPARAM] + _EnumThreadWindows.restype = bool + + fn = __EnumThreadWndProc() + lpfn = WNDENUMPROC(fn) + if not _EnumThreadWindows(dwThreadId, lpfn, NULL): + errcode = GetLastError() + if errcode not in (ERROR_NO_MORE_FILES, ERROR_SUCCESS): + raise ctypes.WinError(errcode) + return fn.hwnd + +# BOOL CALLBACK EnumChildProc( +# HWND hwnd, +# LPARAM lParam +# ); +class __EnumChildProc (__WindowEnumerator): + pass + +# BOOL EnumChildWindows( +# HWND hWndParent, +# WNDENUMPROC lpEnumFunc, +# LPARAM lParam +# ); +def EnumChildWindows(hWndParent = NULL): + _EnumChildWindows = windll.user32.EnumChildWindows + _EnumChildWindows.argtypes = [HWND, WNDENUMPROC, LPARAM] + _EnumChildWindows.restype = bool + + EnumFunc = __EnumChildProc() + lpEnumFunc = WNDENUMPROC(EnumFunc) + SetLastError(ERROR_SUCCESS) + _EnumChildWindows(hWndParent, lpEnumFunc, NULL) + errcode = GetLastError() + if errcode != ERROR_SUCCESS and errcode not in (ERROR_NO_MORE_FILES, ERROR_SUCCESS): + raise ctypes.WinError(errcode) + return EnumFunc.hwnd + +# LRESULT SendMessage( +# HWND hWnd, +# UINT Msg, +# WPARAM wParam, +# LPARAM lParam +# ); +def SendMessageA(hWnd, Msg, wParam = 0, lParam = 0): + _SendMessageA = windll.user32.SendMessageA + _SendMessageA.argtypes = [HWND, UINT, WPARAM, LPARAM] + _SendMessageA.restype = LRESULT + + wParam = MAKE_WPARAM(wParam) + lParam = MAKE_LPARAM(lParam) + return _SendMessageA(hWnd, Msg, wParam, lParam) + +def SendMessageW(hWnd, Msg, wParam = 0, lParam = 0): + _SendMessageW = windll.user32.SendMessageW + _SendMessageW.argtypes = [HWND, UINT, WPARAM, LPARAM] + _SendMessageW.restype = LRESULT + + wParam = MAKE_WPARAM(wParam) + lParam = MAKE_LPARAM(lParam) + return _SendMessageW(hWnd, Msg, wParam, lParam) + +SendMessage = GuessStringType(SendMessageA, SendMessageW) + +# BOOL PostMessage( +# HWND hWnd, +# UINT Msg, +# WPARAM wParam, +# LPARAM lParam +# ); +def PostMessageA(hWnd, Msg, wParam = 0, lParam = 0): + _PostMessageA = windll.user32.PostMessageA + _PostMessageA.argtypes = [HWND, UINT, WPARAM, LPARAM] + _PostMessageA.restype = bool + _PostMessageA.errcheck = RaiseIfZero + + wParam = MAKE_WPARAM(wParam) + lParam = MAKE_LPARAM(lParam) + _PostMessageA(hWnd, Msg, wParam, lParam) + +def PostMessageW(hWnd, Msg, wParam = 0, lParam = 0): + _PostMessageW = windll.user32.PostMessageW + _PostMessageW.argtypes = [HWND, UINT, WPARAM, LPARAM] + _PostMessageW.restype = bool + _PostMessageW.errcheck = RaiseIfZero + + wParam = MAKE_WPARAM(wParam) + lParam = MAKE_LPARAM(lParam) + _PostMessageW(hWnd, Msg, wParam, lParam) + +PostMessage = GuessStringType(PostMessageA, PostMessageW) + +# BOOL PostThreadMessage( +# DWORD idThread, +# UINT Msg, +# WPARAM wParam, +# LPARAM lParam +# ); +def PostThreadMessageA(idThread, Msg, wParam = 0, lParam = 0): + _PostThreadMessageA = windll.user32.PostThreadMessageA + _PostThreadMessageA.argtypes = [DWORD, UINT, WPARAM, LPARAM] + _PostThreadMessageA.restype = bool + _PostThreadMessageA.errcheck = RaiseIfZero + + wParam = MAKE_WPARAM(wParam) + lParam = MAKE_LPARAM(lParam) + _PostThreadMessageA(idThread, Msg, wParam, lParam) + +def PostThreadMessageW(idThread, Msg, wParam = 0, lParam = 0): + _PostThreadMessageW = windll.user32.PostThreadMessageW + _PostThreadMessageW.argtypes = [DWORD, UINT, WPARAM, LPARAM] + _PostThreadMessageW.restype = bool + _PostThreadMessageW.errcheck = RaiseIfZero + + wParam = MAKE_WPARAM(wParam) + lParam = MAKE_LPARAM(lParam) + _PostThreadMessageW(idThread, Msg, wParam, lParam) + +PostThreadMessage = GuessStringType(PostThreadMessageA, PostThreadMessageW) + +# LRESULT c( +# HWND hWnd, +# UINT Msg, +# WPARAM wParam, +# LPARAM lParam, +# UINT fuFlags, +# UINT uTimeout, +# PDWORD_PTR lpdwResult +# ); +def SendMessageTimeoutA(hWnd, Msg, wParam = 0, lParam = 0, fuFlags = 0, uTimeout = 0): + _SendMessageTimeoutA = windll.user32.SendMessageTimeoutA + _SendMessageTimeoutA.argtypes = [HWND, UINT, WPARAM, LPARAM, UINT, UINT, PDWORD_PTR] + _SendMessageTimeoutA.restype = LRESULT + _SendMessageTimeoutA.errcheck = RaiseIfZero + + wParam = MAKE_WPARAM(wParam) + lParam = MAKE_LPARAM(lParam) + dwResult = DWORD(0) + _SendMessageTimeoutA(hWnd, Msg, wParam, lParam, fuFlags, uTimeout, byref(dwResult)) + return dwResult.value + +def SendMessageTimeoutW(hWnd, Msg, wParam = 0, lParam = 0): + _SendMessageTimeoutW = windll.user32.SendMessageTimeoutW + _SendMessageTimeoutW.argtypes = [HWND, UINT, WPARAM, LPARAM, UINT, UINT, PDWORD_PTR] + _SendMessageTimeoutW.restype = LRESULT + _SendMessageTimeoutW.errcheck = RaiseIfZero + + wParam = MAKE_WPARAM(wParam) + lParam = MAKE_LPARAM(lParam) + dwResult = DWORD(0) + _SendMessageTimeoutW(hWnd, Msg, wParam, lParam, fuFlags, uTimeout, byref(dwResult)) + return dwResult.value + +SendMessageTimeout = GuessStringType(SendMessageTimeoutA, SendMessageTimeoutW) + +# BOOL SendNotifyMessage( +# HWND hWnd, +# UINT Msg, +# WPARAM wParam, +# LPARAM lParam +# ); +def SendNotifyMessageA(hWnd, Msg, wParam = 0, lParam = 0): + _SendNotifyMessageA = windll.user32.SendNotifyMessageA + _SendNotifyMessageA.argtypes = [HWND, UINT, WPARAM, LPARAM] + _SendNotifyMessageA.restype = bool + _SendNotifyMessageA.errcheck = RaiseIfZero + + wParam = MAKE_WPARAM(wParam) + lParam = MAKE_LPARAM(lParam) + _SendNotifyMessageA(hWnd, Msg, wParam, lParam) + +def SendNotifyMessageW(hWnd, Msg, wParam = 0, lParam = 0): + _SendNotifyMessageW = windll.user32.SendNotifyMessageW + _SendNotifyMessageW.argtypes = [HWND, UINT, WPARAM, LPARAM] + _SendNotifyMessageW.restype = bool + _SendNotifyMessageW.errcheck = RaiseIfZero + + wParam = MAKE_WPARAM(wParam) + lParam = MAKE_LPARAM(lParam) + _SendNotifyMessageW(hWnd, Msg, wParam, lParam) + +SendNotifyMessage = GuessStringType(SendNotifyMessageA, SendNotifyMessageW) + +# LRESULT SendDlgItemMessage( +# HWND hDlg, +# int nIDDlgItem, +# UINT Msg, +# WPARAM wParam, +# LPARAM lParam +# ); +def SendDlgItemMessageA(hDlg, nIDDlgItem, Msg, wParam = 0, lParam = 0): + _SendDlgItemMessageA = windll.user32.SendDlgItemMessageA + _SendDlgItemMessageA.argtypes = [HWND, ctypes.c_int, UINT, WPARAM, LPARAM] + _SendDlgItemMessageA.restype = LRESULT + + wParam = MAKE_WPARAM(wParam) + lParam = MAKE_LPARAM(lParam) + return _SendDlgItemMessageA(hDlg, nIDDlgItem, Msg, wParam, lParam) + +def SendDlgItemMessageW(hDlg, nIDDlgItem, Msg, wParam = 0, lParam = 0): + _SendDlgItemMessageW = windll.user32.SendDlgItemMessageW + _SendDlgItemMessageW.argtypes = [HWND, ctypes.c_int, UINT, WPARAM, LPARAM] + _SendDlgItemMessageW.restype = LRESULT + + wParam = MAKE_WPARAM(wParam) + lParam = MAKE_LPARAM(lParam) + return _SendDlgItemMessageW(hDlg, nIDDlgItem, Msg, wParam, lParam) + +SendDlgItemMessage = GuessStringType(SendDlgItemMessageA, SendDlgItemMessageW) + +# DWORD WINAPI WaitForInputIdle( +# _In_ HANDLE hProcess, +# _In_ DWORD dwMilliseconds +# ); +def WaitForInputIdle(hProcess, dwMilliseconds = INFINITE): + _WaitForInputIdle = windll.user32.WaitForInputIdle + _WaitForInputIdle.argtypes = [HANDLE, DWORD] + _WaitForInputIdle.restype = DWORD + + r = _WaitForInputIdle(hProcess, dwMilliseconds) + if r == WAIT_FAILED: + raise ctypes.WinError() + return r + +# UINT RegisterWindowMessage( +# LPCTSTR lpString +# ); +def RegisterWindowMessageA(lpString): + _RegisterWindowMessageA = windll.user32.RegisterWindowMessageA + _RegisterWindowMessageA.argtypes = [LPSTR] + _RegisterWindowMessageA.restype = UINT + _RegisterWindowMessageA.errcheck = RaiseIfZero + return _RegisterWindowMessageA(lpString) + +def RegisterWindowMessageW(lpString): + _RegisterWindowMessageW = windll.user32.RegisterWindowMessageW + _RegisterWindowMessageW.argtypes = [LPWSTR] + _RegisterWindowMessageW.restype = UINT + _RegisterWindowMessageW.errcheck = RaiseIfZero + return _RegisterWindowMessageW(lpString) + +RegisterWindowMessage = GuessStringType(RegisterWindowMessageA, RegisterWindowMessageW) + +# UINT RegisterClipboardFormat( +# LPCTSTR lpString +# ); +def RegisterClipboardFormatA(lpString): + _RegisterClipboardFormatA = windll.user32.RegisterClipboardFormatA + _RegisterClipboardFormatA.argtypes = [LPSTR] + _RegisterClipboardFormatA.restype = UINT + _RegisterClipboardFormatA.errcheck = RaiseIfZero + return _RegisterClipboardFormatA(lpString) + +def RegisterClipboardFormatW(lpString): + _RegisterClipboardFormatW = windll.user32.RegisterClipboardFormatW + _RegisterClipboardFormatW.argtypes = [LPWSTR] + _RegisterClipboardFormatW.restype = UINT + _RegisterClipboardFormatW.errcheck = RaiseIfZero + return _RegisterClipboardFormatW(lpString) + +RegisterClipboardFormat = GuessStringType(RegisterClipboardFormatA, RegisterClipboardFormatW) + +# HANDLE WINAPI GetProp( +# __in HWND hWnd, +# __in LPCTSTR lpString +# ); +def GetPropA(hWnd, lpString): + _GetPropA = windll.user32.GetPropA + _GetPropA.argtypes = [HWND, LPSTR] + _GetPropA.restype = HANDLE + return _GetPropA(hWnd, lpString) + +def GetPropW(hWnd, lpString): + _GetPropW = windll.user32.GetPropW + _GetPropW.argtypes = [HWND, LPWSTR] + _GetPropW.restype = HANDLE + return _GetPropW(hWnd, lpString) + +GetProp = GuessStringType(GetPropA, GetPropW) + +# BOOL WINAPI SetProp( +# __in HWND hWnd, +# __in LPCTSTR lpString, +# __in_opt HANDLE hData +# ); +def SetPropA(hWnd, lpString, hData): + _SetPropA = windll.user32.SetPropA + _SetPropA.argtypes = [HWND, LPSTR, HANDLE] + _SetPropA.restype = BOOL + _SetPropA.errcheck = RaiseIfZero + _SetPropA(hWnd, lpString, hData) + +def SetPropW(hWnd, lpString, hData): + _SetPropW = windll.user32.SetPropW + _SetPropW.argtypes = [HWND, LPWSTR, HANDLE] + _SetPropW.restype = BOOL + _SetPropW.errcheck = RaiseIfZero + _SetPropW(hWnd, lpString, hData) + +SetProp = GuessStringType(SetPropA, SetPropW) + +# HANDLE WINAPI RemoveProp( +# __in HWND hWnd, +# __in LPCTSTR lpString +# ); +def RemovePropA(hWnd, lpString): + _RemovePropA = windll.user32.RemovePropA + _RemovePropA.argtypes = [HWND, LPSTR] + _RemovePropA.restype = HANDLE + return _RemovePropA(hWnd, lpString) + +def RemovePropW(hWnd, lpString): + _RemovePropW = windll.user32.RemovePropW + _RemovePropW.argtypes = [HWND, LPWSTR] + _RemovePropW.restype = HANDLE + return _RemovePropW(hWnd, lpString) + +RemoveProp = GuessStringType(RemovePropA, RemovePropW) + +#============================================================================== +# This calculates the list of exported symbols. +_all = set(vars().keys()).difference(_all) +__all__ = [_x for _x in _all if not _x.startswith('_')] +__all__.sort() +#============================================================================== diff --git a/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/win32/version.py b/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/win32/version.py new file mode 100644 index 00000000..19b6d53c --- /dev/null +++ b/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/win32/version.py @@ -0,0 +1,1038 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# Copyright (c) 2009-2014, Mario Vilas +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright +# notice,this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# * Neither the name of the copyright holder nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. + +""" +Detect the current architecture and operating system. + +Some functions here are really from kernel32.dll, others from version.dll. +""" + +__revision__ = "$Id$" + +from winappdbg.win32.defines import * + +#============================================================================== +# This is used later on to calculate the list of exported symbols. +_all = None +_all = set(vars().keys()) +#============================================================================== + +#--- NTDDI version ------------------------------------------------------------ + +NTDDI_WIN8 = 0x06020000 +NTDDI_WIN7SP1 = 0x06010100 +NTDDI_WIN7 = 0x06010000 +NTDDI_WS08 = 0x06000100 +NTDDI_VISTASP1 = 0x06000100 +NTDDI_VISTA = 0x06000000 +NTDDI_LONGHORN = NTDDI_VISTA +NTDDI_WS03SP2 = 0x05020200 +NTDDI_WS03SP1 = 0x05020100 +NTDDI_WS03 = 0x05020000 +NTDDI_WINXPSP3 = 0x05010300 +NTDDI_WINXPSP2 = 0x05010200 +NTDDI_WINXPSP1 = 0x05010100 +NTDDI_WINXP = 0x05010000 +NTDDI_WIN2KSP4 = 0x05000400 +NTDDI_WIN2KSP3 = 0x05000300 +NTDDI_WIN2KSP2 = 0x05000200 +NTDDI_WIN2KSP1 = 0x05000100 +NTDDI_WIN2K = 0x05000000 +NTDDI_WINNT4 = 0x04000000 + +OSVERSION_MASK = 0xFFFF0000 +SPVERSION_MASK = 0x0000FF00 +SUBVERSION_MASK = 0x000000FF + +#--- OSVERSIONINFO and OSVERSIONINFOEX structures and constants --------------- + +VER_PLATFORM_WIN32s = 0 +VER_PLATFORM_WIN32_WINDOWS = 1 +VER_PLATFORM_WIN32_NT = 2 + +VER_SUITE_BACKOFFICE = 0x00000004 +VER_SUITE_BLADE = 0x00000400 +VER_SUITE_COMPUTE_SERVER = 0x00004000 +VER_SUITE_DATACENTER = 0x00000080 +VER_SUITE_ENTERPRISE = 0x00000002 +VER_SUITE_EMBEDDEDNT = 0x00000040 +VER_SUITE_PERSONAL = 0x00000200 +VER_SUITE_SINGLEUSERTS = 0x00000100 +VER_SUITE_SMALLBUSINESS = 0x00000001 +VER_SUITE_SMALLBUSINESS_RESTRICTED = 0x00000020 +VER_SUITE_STORAGE_SERVER = 0x00002000 +VER_SUITE_TERMINAL = 0x00000010 +VER_SUITE_WH_SERVER = 0x00008000 + +VER_NT_DOMAIN_CONTROLLER = 0x0000002 +VER_NT_SERVER = 0x0000003 +VER_NT_WORKSTATION = 0x0000001 + +VER_BUILDNUMBER = 0x0000004 +VER_MAJORVERSION = 0x0000002 +VER_MINORVERSION = 0x0000001 +VER_PLATFORMID = 0x0000008 +VER_PRODUCT_TYPE = 0x0000080 +VER_SERVICEPACKMAJOR = 0x0000020 +VER_SERVICEPACKMINOR = 0x0000010 +VER_SUITENAME = 0x0000040 + +VER_EQUAL = 1 +VER_GREATER = 2 +VER_GREATER_EQUAL = 3 +VER_LESS = 4 +VER_LESS_EQUAL = 5 +VER_AND = 6 +VER_OR = 7 + +# typedef struct _OSVERSIONINFO { +# DWORD dwOSVersionInfoSize; +# DWORD dwMajorVersion; +# DWORD dwMinorVersion; +# DWORD dwBuildNumber; +# DWORD dwPlatformId; +# TCHAR szCSDVersion[128]; +# }OSVERSIONINFO; +class OSVERSIONINFOA(Structure): + _fields_ = [ + ("dwOSVersionInfoSize", DWORD), + ("dwMajorVersion", DWORD), + ("dwMinorVersion", DWORD), + ("dwBuildNumber", DWORD), + ("dwPlatformId", DWORD), + ("szCSDVersion", CHAR * 128), + ] +class OSVERSIONINFOW(Structure): + _fields_ = [ + ("dwOSVersionInfoSize", DWORD), + ("dwMajorVersion", DWORD), + ("dwMinorVersion", DWORD), + ("dwBuildNumber", DWORD), + ("dwPlatformId", DWORD), + ("szCSDVersion", WCHAR * 128), + ] + +# typedef struct _OSVERSIONINFOEX { +# DWORD dwOSVersionInfoSize; +# DWORD dwMajorVersion; +# DWORD dwMinorVersion; +# DWORD dwBuildNumber; +# DWORD dwPlatformId; +# TCHAR szCSDVersion[128]; +# WORD wServicePackMajor; +# WORD wServicePackMinor; +# WORD wSuiteMask; +# BYTE wProductType; +# BYTE wReserved; +# }OSVERSIONINFOEX, *POSVERSIONINFOEX, *LPOSVERSIONINFOEX; +class OSVERSIONINFOEXA(Structure): + _fields_ = [ + ("dwOSVersionInfoSize", DWORD), + ("dwMajorVersion", DWORD), + ("dwMinorVersion", DWORD), + ("dwBuildNumber", DWORD), + ("dwPlatformId", DWORD), + ("szCSDVersion", CHAR * 128), + ("wServicePackMajor", WORD), + ("wServicePackMinor", WORD), + ("wSuiteMask", WORD), + ("wProductType", BYTE), + ("wReserved", BYTE), + ] +class OSVERSIONINFOEXW(Structure): + _fields_ = [ + ("dwOSVersionInfoSize", DWORD), + ("dwMajorVersion", DWORD), + ("dwMinorVersion", DWORD), + ("dwBuildNumber", DWORD), + ("dwPlatformId", DWORD), + ("szCSDVersion", WCHAR * 128), + ("wServicePackMajor", WORD), + ("wServicePackMinor", WORD), + ("wSuiteMask", WORD), + ("wProductType", BYTE), + ("wReserved", BYTE), + ] + +LPOSVERSIONINFOA = POINTER(OSVERSIONINFOA) +LPOSVERSIONINFOW = POINTER(OSVERSIONINFOW) +LPOSVERSIONINFOEXA = POINTER(OSVERSIONINFOEXA) +LPOSVERSIONINFOEXW = POINTER(OSVERSIONINFOEXW) +POSVERSIONINFOA = LPOSVERSIONINFOA +POSVERSIONINFOW = LPOSVERSIONINFOW +POSVERSIONINFOEXA = LPOSVERSIONINFOEXA +POSVERSIONINFOEXW = LPOSVERSIONINFOA + +#--- GetSystemMetrics constants ----------------------------------------------- + +SM_CXSCREEN = 0 +SM_CYSCREEN = 1 +SM_CXVSCROLL = 2 +SM_CYHSCROLL = 3 +SM_CYCAPTION = 4 +SM_CXBORDER = 5 +SM_CYBORDER = 6 +SM_CXDLGFRAME = 7 +SM_CYDLGFRAME = 8 +SM_CYVTHUMB = 9 +SM_CXHTHUMB = 10 +SM_CXICON = 11 +SM_CYICON = 12 +SM_CXCURSOR = 13 +SM_CYCURSOR = 14 +SM_CYMENU = 15 +SM_CXFULLSCREEN = 16 +SM_CYFULLSCREEN = 17 +SM_CYKANJIWINDOW = 18 +SM_MOUSEPRESENT = 19 +SM_CYVSCROLL = 20 +SM_CXHSCROLL = 21 +SM_DEBUG = 22 +SM_SWAPBUTTON = 23 +SM_RESERVED1 = 24 +SM_RESERVED2 = 25 +SM_RESERVED3 = 26 +SM_RESERVED4 = 27 +SM_CXMIN = 28 +SM_CYMIN = 29 +SM_CXSIZE = 30 +SM_CYSIZE = 31 +SM_CXFRAME = 32 +SM_CYFRAME = 33 +SM_CXMINTRACK = 34 +SM_CYMINTRACK = 35 +SM_CXDOUBLECLK = 36 +SM_CYDOUBLECLK = 37 +SM_CXICONSPACING = 38 +SM_CYICONSPACING = 39 +SM_MENUDROPALIGNMENT = 40 +SM_PENWINDOWS = 41 +SM_DBCSENABLED = 42 +SM_CMOUSEBUTTONS = 43 + +SM_CXFIXEDFRAME = SM_CXDLGFRAME # ;win40 name change +SM_CYFIXEDFRAME = SM_CYDLGFRAME # ;win40 name change +SM_CXSIZEFRAME = SM_CXFRAME # ;win40 name change +SM_CYSIZEFRAME = SM_CYFRAME # ;win40 name change + +SM_SECURE = 44 +SM_CXEDGE = 45 +SM_CYEDGE = 46 +SM_CXMINSPACING = 47 +SM_CYMINSPACING = 48 +SM_CXSMICON = 49 +SM_CYSMICON = 50 +SM_CYSMCAPTION = 51 +SM_CXSMSIZE = 52 +SM_CYSMSIZE = 53 +SM_CXMENUSIZE = 54 +SM_CYMENUSIZE = 55 +SM_ARRANGE = 56 +SM_CXMINIMIZED = 57 +SM_CYMINIMIZED = 58 +SM_CXMAXTRACK = 59 +SM_CYMAXTRACK = 60 +SM_CXMAXIMIZED = 61 +SM_CYMAXIMIZED = 62 +SM_NETWORK = 63 +SM_CLEANBOOT = 67 +SM_CXDRAG = 68 +SM_CYDRAG = 69 +SM_SHOWSOUNDS = 70 +SM_CXMENUCHECK = 71 # Use instead of GetMenuCheckMarkDimensions()! +SM_CYMENUCHECK = 72 +SM_SLOWMACHINE = 73 +SM_MIDEASTENABLED = 74 +SM_MOUSEWHEELPRESENT = 75 +SM_XVIRTUALSCREEN = 76 +SM_YVIRTUALSCREEN = 77 +SM_CXVIRTUALSCREEN = 78 +SM_CYVIRTUALSCREEN = 79 +SM_CMONITORS = 80 +SM_SAMEDISPLAYFORMAT = 81 +SM_IMMENABLED = 82 +SM_CXFOCUSBORDER = 83 +SM_CYFOCUSBORDER = 84 +SM_TABLETPC = 86 +SM_MEDIACENTER = 87 +SM_STARTER = 88 +SM_SERVERR2 = 89 +SM_MOUSEHORIZONTALWHEELPRESENT = 91 +SM_CXPADDEDBORDER = 92 + +SM_CMETRICS = 93 + +SM_REMOTESESSION = 0x1000 +SM_SHUTTINGDOWN = 0x2000 +SM_REMOTECONTROL = 0x2001 +SM_CARETBLINKINGENABLED = 0x2002 + +#--- SYSTEM_INFO structure, GetSystemInfo() and GetNativeSystemInfo() --------- + +# Values used by Wine +# Documented values at MSDN are marked with an asterisk +PROCESSOR_ARCHITECTURE_UNKNOWN = 0xFFFF; # Unknown architecture. +PROCESSOR_ARCHITECTURE_INTEL = 0 # x86 (AMD or Intel) * +PROCESSOR_ARCHITECTURE_MIPS = 1 # MIPS +PROCESSOR_ARCHITECTURE_ALPHA = 2 # Alpha +PROCESSOR_ARCHITECTURE_PPC = 3 # Power PC +PROCESSOR_ARCHITECTURE_SHX = 4 # SHX +PROCESSOR_ARCHITECTURE_ARM = 5 # ARM +PROCESSOR_ARCHITECTURE_IA64 = 6 # Intel Itanium * +PROCESSOR_ARCHITECTURE_ALPHA64 = 7 # Alpha64 +PROCESSOR_ARCHITECTURE_MSIL = 8 # MSIL +PROCESSOR_ARCHITECTURE_AMD64 = 9 # x64 (AMD or Intel) * +PROCESSOR_ARCHITECTURE_IA32_ON_WIN64 = 10 # IA32 on Win64 +PROCESSOR_ARCHITECTURE_SPARC = 20 # Sparc (Wine) + +# Values used by Wine +# PROCESSOR_OPTIL value found at http://code.google.com/p/ddab-lib/ +# Documented values at MSDN are marked with an asterisk +PROCESSOR_INTEL_386 = 386 # Intel i386 * +PROCESSOR_INTEL_486 = 486 # Intel i486 * +PROCESSOR_INTEL_PENTIUM = 586 # Intel Pentium * +PROCESSOR_INTEL_IA64 = 2200 # Intel IA64 (Itanium) * +PROCESSOR_AMD_X8664 = 8664 # AMD X86 64 * +PROCESSOR_MIPS_R4000 = 4000 # MIPS R4000, R4101, R3910 +PROCESSOR_ALPHA_21064 = 21064 # Alpha 210 64 +PROCESSOR_PPC_601 = 601 # PPC 601 +PROCESSOR_PPC_603 = 603 # PPC 603 +PROCESSOR_PPC_604 = 604 # PPC 604 +PROCESSOR_PPC_620 = 620 # PPC 620 +PROCESSOR_HITACHI_SH3 = 10003 # Hitachi SH3 (Windows CE) +PROCESSOR_HITACHI_SH3E = 10004 # Hitachi SH3E (Windows CE) +PROCESSOR_HITACHI_SH4 = 10005 # Hitachi SH4 (Windows CE) +PROCESSOR_MOTOROLA_821 = 821 # Motorola 821 (Windows CE) +PROCESSOR_SHx_SH3 = 103 # SHx SH3 (Windows CE) +PROCESSOR_SHx_SH4 = 104 # SHx SH4 (Windows CE) +PROCESSOR_STRONGARM = 2577 # StrongARM (Windows CE) +PROCESSOR_ARM720 = 1824 # ARM 720 (Windows CE) +PROCESSOR_ARM820 = 2080 # ARM 820 (Windows CE) +PROCESSOR_ARM920 = 2336 # ARM 920 (Windows CE) +PROCESSOR_ARM_7TDMI = 70001 # ARM 7TDMI (Windows CE) +PROCESSOR_OPTIL = 0x494F # MSIL + +# typedef struct _SYSTEM_INFO { +# union { +# DWORD dwOemId; +# struct { +# WORD wProcessorArchitecture; +# WORD wReserved; +# } ; +# } ; +# DWORD dwPageSize; +# LPVOID lpMinimumApplicationAddress; +# LPVOID lpMaximumApplicationAddress; +# DWORD_PTR dwActiveProcessorMask; +# DWORD dwNumberOfProcessors; +# DWORD dwProcessorType; +# DWORD dwAllocationGranularity; +# WORD wProcessorLevel; +# WORD wProcessorRevision; +# } SYSTEM_INFO; + +class _SYSTEM_INFO_OEM_ID_STRUCT(Structure): + _fields_ = [ + ("wProcessorArchitecture", WORD), + ("wReserved", WORD), +] + +class _SYSTEM_INFO_OEM_ID(Union): + _fields_ = [ + ("dwOemId", DWORD), + ("w", _SYSTEM_INFO_OEM_ID_STRUCT), +] + +class SYSTEM_INFO(Structure): + _fields_ = [ + ("id", _SYSTEM_INFO_OEM_ID), + ("dwPageSize", DWORD), + ("lpMinimumApplicationAddress", LPVOID), + ("lpMaximumApplicationAddress", LPVOID), + ("dwActiveProcessorMask", DWORD_PTR), + ("dwNumberOfProcessors", DWORD), + ("dwProcessorType", DWORD), + ("dwAllocationGranularity", DWORD), + ("wProcessorLevel", WORD), + ("wProcessorRevision", WORD), + ] + + def __get_dwOemId(self): + return self.id.dwOemId + def __set_dwOemId(self, value): + self.id.dwOemId = value + dwOemId = property(__get_dwOemId, __set_dwOemId) + + def __get_wProcessorArchitecture(self): + return self.id.w.wProcessorArchitecture + def __set_wProcessorArchitecture(self, value): + self.id.w.wProcessorArchitecture = value + wProcessorArchitecture = property(__get_wProcessorArchitecture, __set_wProcessorArchitecture) + +LPSYSTEM_INFO = ctypes.POINTER(SYSTEM_INFO) + +# void WINAPI GetSystemInfo( +# __out LPSYSTEM_INFO lpSystemInfo +# ); +def GetSystemInfo(): + _GetSystemInfo = windll.kernel32.GetSystemInfo + _GetSystemInfo.argtypes = [LPSYSTEM_INFO] + _GetSystemInfo.restype = None + + sysinfo = SYSTEM_INFO() + _GetSystemInfo(byref(sysinfo)) + return sysinfo + +# void WINAPI GetNativeSystemInfo( +# __out LPSYSTEM_INFO lpSystemInfo +# ); +def GetNativeSystemInfo(): + _GetNativeSystemInfo = windll.kernel32.GetNativeSystemInfo + _GetNativeSystemInfo.argtypes = [LPSYSTEM_INFO] + _GetNativeSystemInfo.restype = None + + sysinfo = SYSTEM_INFO() + _GetNativeSystemInfo(byref(sysinfo)) + return sysinfo + +# int WINAPI GetSystemMetrics( +# __in int nIndex +# ); +def GetSystemMetrics(nIndex): + _GetSystemMetrics = windll.user32.GetSystemMetrics + _GetSystemMetrics.argtypes = [ctypes.c_int] + _GetSystemMetrics.restype = ctypes.c_int + return _GetSystemMetrics(nIndex) + +# SIZE_T WINAPI GetLargePageMinimum(void); +def GetLargePageMinimum(): + _GetLargePageMinimum = windll.user32.GetLargePageMinimum + _GetLargePageMinimum.argtypes = [] + _GetLargePageMinimum.restype = SIZE_T + return _GetLargePageMinimum() + +# HANDLE WINAPI GetCurrentProcess(void); +def GetCurrentProcess(): +## return 0xFFFFFFFFFFFFFFFFL + _GetCurrentProcess = windll.kernel32.GetCurrentProcess + _GetCurrentProcess.argtypes = [] + _GetCurrentProcess.restype = HANDLE + return _GetCurrentProcess() + +# HANDLE WINAPI GetCurrentThread(void); +def GetCurrentThread(): +## return 0xFFFFFFFFFFFFFFFEL + _GetCurrentThread = windll.kernel32.GetCurrentThread + _GetCurrentThread.argtypes = [] + _GetCurrentThread.restype = HANDLE + return _GetCurrentThread() + +# BOOL WINAPI IsWow64Process( +# __in HANDLE hProcess, +# __out PBOOL Wow64Process +# ); +def IsWow64Process(hProcess): + _IsWow64Process = windll.kernel32.IsWow64Process + _IsWow64Process.argtypes = [HANDLE, PBOOL] + _IsWow64Process.restype = bool + _IsWow64Process.errcheck = RaiseIfZero + + Wow64Process = BOOL(FALSE) + _IsWow64Process(hProcess, byref(Wow64Process)) + return bool(Wow64Process) + +# DWORD WINAPI GetVersion(void); +def GetVersion(): + _GetVersion = windll.kernel32.GetVersion + _GetVersion.argtypes = [] + _GetVersion.restype = DWORD + _GetVersion.errcheck = RaiseIfZero + + # See the example code here: + # http://msdn.microsoft.com/en-us/library/ms724439(VS.85).aspx + + dwVersion = _GetVersion() + dwMajorVersion = dwVersion & 0x000000FF + dwMinorVersion = (dwVersion & 0x0000FF00) >> 8 + if (dwVersion & 0x80000000) == 0: + dwBuild = (dwVersion & 0x7FFF0000) >> 16 + else: + dwBuild = None + return int(dwMajorVersion), int(dwMinorVersion), int(dwBuild) + +# BOOL WINAPI GetVersionEx( +# __inout LPOSVERSIONINFO lpVersionInfo +# ); +def GetVersionExA(): + _GetVersionExA = windll.kernel32.GetVersionExA + _GetVersionExA.argtypes = [POINTER(OSVERSIONINFOEXA)] + _GetVersionExA.restype = bool + _GetVersionExA.errcheck = RaiseIfZero + + osi = OSVERSIONINFOEXA() + osi.dwOSVersionInfoSize = sizeof(osi) + try: + _GetVersionExA(byref(osi)) + except WindowsError: + osi = OSVERSIONINFOA() + osi.dwOSVersionInfoSize = sizeof(osi) + _GetVersionExA.argtypes = [POINTER(OSVERSIONINFOA)] + _GetVersionExA(byref(osi)) + return osi + +def GetVersionExW(): + _GetVersionExW = windll.kernel32.GetVersionExW + _GetVersionExW.argtypes = [POINTER(OSVERSIONINFOEXW)] + _GetVersionExW.restype = bool + _GetVersionExW.errcheck = RaiseIfZero + + osi = OSVERSIONINFOEXW() + osi.dwOSVersionInfoSize = sizeof(osi) + try: + _GetVersionExW(byref(osi)) + except WindowsError: + osi = OSVERSIONINFOW() + osi.dwOSVersionInfoSize = sizeof(osi) + _GetVersionExW.argtypes = [POINTER(OSVERSIONINFOW)] + _GetVersionExW(byref(osi)) + return osi + +GetVersionEx = GuessStringType(GetVersionExA, GetVersionExW) + +# BOOL WINAPI GetProductInfo( +# __in DWORD dwOSMajorVersion, +# __in DWORD dwOSMinorVersion, +# __in DWORD dwSpMajorVersion, +# __in DWORD dwSpMinorVersion, +# __out PDWORD pdwReturnedProductType +# ); +def GetProductInfo(dwOSMajorVersion, dwOSMinorVersion, dwSpMajorVersion, dwSpMinorVersion): + _GetProductInfo = windll.kernel32.GetProductInfo + _GetProductInfo.argtypes = [DWORD, DWORD, DWORD, DWORD, PDWORD] + _GetProductInfo.restype = BOOL + _GetProductInfo.errcheck = RaiseIfZero + + dwReturnedProductType = DWORD(0) + _GetProductInfo(dwOSMajorVersion, dwOSMinorVersion, dwSpMajorVersion, dwSpMinorVersion, byref(dwReturnedProductType)) + return dwReturnedProductType.value + +# BOOL WINAPI VerifyVersionInfo( +# __in LPOSVERSIONINFOEX lpVersionInfo, +# __in DWORD dwTypeMask, +# __in DWORDLONG dwlConditionMask +# ); +def VerifyVersionInfo(lpVersionInfo, dwTypeMask, dwlConditionMask): + if isinstance(lpVersionInfo, OSVERSIONINFOEXA): + return VerifyVersionInfoA(lpVersionInfo, dwTypeMask, dwlConditionMask) + if isinstance(lpVersionInfo, OSVERSIONINFOEXW): + return VerifyVersionInfoW(lpVersionInfo, dwTypeMask, dwlConditionMask) + raise TypeError("Bad OSVERSIONINFOEX structure") + +def VerifyVersionInfoA(lpVersionInfo, dwTypeMask, dwlConditionMask): + _VerifyVersionInfoA = windll.kernel32.VerifyVersionInfoA + _VerifyVersionInfoA.argtypes = [LPOSVERSIONINFOEXA, DWORD, DWORDLONG] + _VerifyVersionInfoA.restype = bool + return _VerifyVersionInfoA(byref(lpVersionInfo), dwTypeMask, dwlConditionMask) + +def VerifyVersionInfoW(lpVersionInfo, dwTypeMask, dwlConditionMask): + _VerifyVersionInfoW = windll.kernel32.VerifyVersionInfoW + _VerifyVersionInfoW.argtypes = [LPOSVERSIONINFOEXW, DWORD, DWORDLONG] + _VerifyVersionInfoW.restype = bool + return _VerifyVersionInfoW(byref(lpVersionInfo), dwTypeMask, dwlConditionMask) + +# ULONGLONG WINAPI VerSetConditionMask( +# __in ULONGLONG dwlConditionMask, +# __in DWORD dwTypeBitMask, +# __in BYTE dwConditionMask +# ); +def VerSetConditionMask(dwlConditionMask, dwTypeBitMask, dwConditionMask): + _VerSetConditionMask = windll.kernel32.VerSetConditionMask + _VerSetConditionMask.argtypes = [ULONGLONG, DWORD, BYTE] + _VerSetConditionMask.restype = ULONGLONG + return _VerSetConditionMask(dwlConditionMask, dwTypeBitMask, dwConditionMask) + +#--- get_bits, get_arch and get_os -------------------------------------------- + +ARCH_UNKNOWN = "unknown" +ARCH_I386 = "i386" +ARCH_MIPS = "mips" +ARCH_ALPHA = "alpha" +ARCH_PPC = "ppc" +ARCH_SHX = "shx" +ARCH_ARM = "arm" +ARCH_ARM64 = "arm64" +ARCH_THUMB = "thumb" +ARCH_IA64 = "ia64" +ARCH_ALPHA64 = "alpha64" +ARCH_MSIL = "msil" +ARCH_AMD64 = "amd64" +ARCH_SPARC = "sparc" + +# aliases +ARCH_IA32 = ARCH_I386 +ARCH_X86 = ARCH_I386 +ARCH_X64 = ARCH_AMD64 +ARCH_ARM7 = ARCH_ARM +ARCH_ARM8 = ARCH_ARM64 +ARCH_T32 = ARCH_THUMB +ARCH_AARCH32 = ARCH_ARM7 +ARCH_AARCH64 = ARCH_ARM8 +ARCH_POWERPC = ARCH_PPC +ARCH_HITACHI = ARCH_SHX +ARCH_ITANIUM = ARCH_IA64 + +# win32 constants -> our constants +_arch_map = { + PROCESSOR_ARCHITECTURE_INTEL : ARCH_I386, + PROCESSOR_ARCHITECTURE_MIPS : ARCH_MIPS, + PROCESSOR_ARCHITECTURE_ALPHA : ARCH_ALPHA, + PROCESSOR_ARCHITECTURE_PPC : ARCH_PPC, + PROCESSOR_ARCHITECTURE_SHX : ARCH_SHX, + PROCESSOR_ARCHITECTURE_ARM : ARCH_ARM, + PROCESSOR_ARCHITECTURE_IA64 : ARCH_IA64, + PROCESSOR_ARCHITECTURE_ALPHA64 : ARCH_ALPHA64, + PROCESSOR_ARCHITECTURE_MSIL : ARCH_MSIL, + PROCESSOR_ARCHITECTURE_AMD64 : ARCH_AMD64, + PROCESSOR_ARCHITECTURE_SPARC : ARCH_SPARC, +} + +OS_UNKNOWN = "Unknown" +OS_NT = "Windows NT" +OS_W2K = "Windows 2000" +OS_XP = "Windows XP" +OS_XP_64 = "Windows XP (64 bits)" +OS_W2K3 = "Windows 2003" +OS_W2K3_64 = "Windows 2003 (64 bits)" +OS_W2K3R2 = "Windows 2003 R2" +OS_W2K3R2_64 = "Windows 2003 R2 (64 bits)" +OS_W2K8 = "Windows 2008" +OS_W2K8_64 = "Windows 2008 (64 bits)" +OS_W2K8R2 = "Windows 2008 R2" +OS_W2K8R2_64 = "Windows 2008 R2 (64 bits)" +OS_VISTA = "Windows Vista" +OS_VISTA_64 = "Windows Vista (64 bits)" +OS_W7 = "Windows 7" +OS_W7_64 = "Windows 7 (64 bits)" + +OS_SEVEN = OS_W7 +OS_SEVEN_64 = OS_W7_64 + +OS_WINDOWS_NT = OS_NT +OS_WINDOWS_2000 = OS_W2K +OS_WINDOWS_XP = OS_XP +OS_WINDOWS_XP_64 = OS_XP_64 +OS_WINDOWS_2003 = OS_W2K3 +OS_WINDOWS_2003_64 = OS_W2K3_64 +OS_WINDOWS_2003_R2 = OS_W2K3R2 +OS_WINDOWS_2003_R2_64 = OS_W2K3R2_64 +OS_WINDOWS_2008 = OS_W2K8 +OS_WINDOWS_2008_64 = OS_W2K8_64 +OS_WINDOWS_2008_R2 = OS_W2K8R2 +OS_WINDOWS_2008_R2_64 = OS_W2K8R2_64 +OS_WINDOWS_VISTA = OS_VISTA +OS_WINDOWS_VISTA_64 = OS_VISTA_64 +OS_WINDOWS_SEVEN = OS_W7 +OS_WINDOWS_SEVEN_64 = OS_W7_64 + +def _get_bits(): + """ + Determines the current integer size in bits. + + This is useful to know if we're running in a 32 bits or a 64 bits machine. + + @rtype: int + @return: Returns the size of L{SIZE_T} in bits. + """ + return sizeof(SIZE_T) * 8 + +def _get_arch(): + """ + Determines the current processor architecture. + + @rtype: str + @return: + On error, returns: + + - L{ARCH_UNKNOWN} (C{"unknown"}) meaning the architecture could not be detected or is not known to WinAppDbg. + + On success, returns one of the following values: + + - L{ARCH_I386} (C{"i386"}) for Intel 32-bit x86 processor or compatible. + - L{ARCH_AMD64} (C{"amd64"}) for Intel 64-bit x86_64 processor or compatible. + + May also return one of the following values if you get both Python and + WinAppDbg to work in such machines... let me know if you do! :) + + - L{ARCH_MIPS} (C{"mips"}) for MIPS compatible processors. + - L{ARCH_ALPHA} (C{"alpha"}) for Alpha processors. + - L{ARCH_PPC} (C{"ppc"}) for PowerPC compatible processors. + - L{ARCH_SHX} (C{"shx"}) for Hitachi SH processors. + - L{ARCH_ARM} (C{"arm"}) for ARM compatible processors. + - L{ARCH_IA64} (C{"ia64"}) for Intel Itanium processor or compatible. + - L{ARCH_ALPHA64} (C{"alpha64"}) for Alpha64 processors. + - L{ARCH_MSIL} (C{"msil"}) for the .NET virtual machine. + - L{ARCH_SPARC} (C{"sparc"}) for Sun Sparc processors. + + Probably IronPython returns C{ARCH_MSIL} but I haven't tried it. Python + on Windows CE and Windows Mobile should return C{ARCH_ARM}. Python on + Solaris using Wine would return C{ARCH_SPARC}. Python in an Itanium + machine should return C{ARCH_IA64} both on Wine and proper Windows. + All other values should only be returned on Linux using Wine. + """ + try: + si = GetNativeSystemInfo() + except Exception: + si = GetSystemInfo() + try: + return _arch_map[si.id.w.wProcessorArchitecture] + except KeyError: + return ARCH_UNKNOWN + +def _get_wow64(): + """ + Determines if the current process is running in Windows-On-Windows 64 bits. + + @rtype: bool + @return: C{True} of the current process is a 32 bit program running in a + 64 bit version of Windows, C{False} if it's either a 32 bit program + in a 32 bit Windows or a 64 bit program in a 64 bit Windows. + """ + # Try to determine if the debugger itself is running on WOW64. + # On error assume False. + if bits == 64: + wow64 = False + else: + try: + wow64 = IsWow64Process( GetCurrentProcess() ) + except Exception: + wow64 = False + return wow64 + +def _get_os(osvi = None): + """ + Determines the current operating system. + + This function allows you to quickly tell apart major OS differences. + For more detailed information call L{GetVersionEx} instead. + + @note: + Wine reports itself as Windows XP 32 bits + (even if the Linux host is 64 bits). + ReactOS may report itself as Windows 2000 or Windows XP, + depending on the version of ReactOS. + + @type osvi: L{OSVERSIONINFOEXA} + @param osvi: Optional. The return value from L{GetVersionEx}. + + @rtype: str + @return: + One of the following values: + - L{OS_UNKNOWN} (C{"Unknown"}) + - L{OS_NT} (C{"Windows NT"}) + - L{OS_W2K} (C{"Windows 2000"}) + - L{OS_XP} (C{"Windows XP"}) + - L{OS_XP_64} (C{"Windows XP (64 bits)"}) + - L{OS_W2K3} (C{"Windows 2003"}) + - L{OS_W2K3_64} (C{"Windows 2003 (64 bits)"}) + - L{OS_W2K3R2} (C{"Windows 2003 R2"}) + - L{OS_W2K3R2_64} (C{"Windows 2003 R2 (64 bits)"}) + - L{OS_W2K8} (C{"Windows 2008"}) + - L{OS_W2K8_64} (C{"Windows 2008 (64 bits)"}) + - L{OS_W2K8R2} (C{"Windows 2008 R2"}) + - L{OS_W2K8R2_64} (C{"Windows 2008 R2 (64 bits)"}) + - L{OS_VISTA} (C{"Windows Vista"}) + - L{OS_VISTA_64} (C{"Windows Vista (64 bits)"}) + - L{OS_W7} (C{"Windows 7"}) + - L{OS_W7_64} (C{"Windows 7 (64 bits)"}) + """ + # rough port of http://msdn.microsoft.com/en-us/library/ms724429%28VS.85%29.aspx + if not osvi: + osvi = GetVersionEx() + if osvi.dwPlatformId == VER_PLATFORM_WIN32_NT and osvi.dwMajorVersion > 4: + if osvi.dwMajorVersion == 6: + if osvi.dwMinorVersion == 0: + if osvi.wProductType == VER_NT_WORKSTATION: + if bits == 64 or wow64: + return 'Windows Vista (64 bits)' + return 'Windows Vista' + else: + if bits == 64 or wow64: + return 'Windows 2008 (64 bits)' + return 'Windows 2008' + if osvi.dwMinorVersion == 1: + if osvi.wProductType == VER_NT_WORKSTATION: + if bits == 64 or wow64: + return 'Windows 7 (64 bits)' + return 'Windows 7' + else: + if bits == 64 or wow64: + return 'Windows 2008 R2 (64 bits)' + return 'Windows 2008 R2' + if osvi.dwMajorVersion == 5: + if osvi.dwMinorVersion == 2: + if GetSystemMetrics(SM_SERVERR2): + if bits == 64 or wow64: + return 'Windows 2003 R2 (64 bits)' + return 'Windows 2003 R2' + if osvi.wSuiteMask in (VER_SUITE_STORAGE_SERVER, VER_SUITE_WH_SERVER): + if bits == 64 or wow64: + return 'Windows 2003 (64 bits)' + return 'Windows 2003' + if osvi.wProductType == VER_NT_WORKSTATION and arch == ARCH_AMD64: + return 'Windows XP (64 bits)' + else: + if bits == 64 or wow64: + return 'Windows 2003 (64 bits)' + return 'Windows 2003' + if osvi.dwMinorVersion == 1: + return 'Windows XP' + if osvi.dwMinorVersion == 0: + return 'Windows 2000' + if osvi.dwMajorVersion == 4: + return 'Windows NT' + return 'Unknown' + +def _get_ntddi(osvi): + """ + Determines the current operating system. + + This function allows you to quickly tell apart major OS differences. + For more detailed information call L{kernel32.GetVersionEx} instead. + + @note: + Wine reports itself as Windows XP 32 bits + (even if the Linux host is 64 bits). + ReactOS may report itself as Windows 2000 or Windows XP, + depending on the version of ReactOS. + + @type osvi: L{OSVERSIONINFOEXA} + @param osvi: Optional. The return value from L{kernel32.GetVersionEx}. + + @rtype: int + @return: NTDDI version number. + """ + if not osvi: + osvi = GetVersionEx() + ntddi = 0 + ntddi += (osvi.dwMajorVersion & 0xFF) << 24 + ntddi += (osvi.dwMinorVersion & 0xFF) << 16 + ntddi += (osvi.wServicePackMajor & 0xFF) << 8 + ntddi += (osvi.wServicePackMinor & 0xFF) + return ntddi + +# The order of the following definitions DOES matter! + +# Current integer size in bits. See L{_get_bits} for more details. +bits = _get_bits() + +# Current processor architecture. See L{_get_arch} for more details. +arch = _get_arch() + +# Set to C{True} if the current process is running in WOW64. See L{_get_wow64} for more details. +wow64 = _get_wow64() + +_osvi = GetVersionEx() + +# Current operating system. See L{_get_os} for more details. +os = _get_os(_osvi) + +# Current operating system as an NTDDI constant. See L{_get_ntddi} for more details. +NTDDI_VERSION = _get_ntddi(_osvi) + +# Upper word of L{NTDDI_VERSION}, contains the OS major and minor version number. +WINVER = NTDDI_VERSION >> 16 + +#--- version.dll -------------------------------------------------------------- + +VS_FF_DEBUG = 0x00000001 +VS_FF_PRERELEASE = 0x00000002 +VS_FF_PATCHED = 0x00000004 +VS_FF_PRIVATEBUILD = 0x00000008 +VS_FF_INFOINFERRED = 0x00000010 +VS_FF_SPECIALBUILD = 0x00000020 + +VOS_UNKNOWN = 0x00000000 +VOS__WINDOWS16 = 0x00000001 +VOS__PM16 = 0x00000002 +VOS__PM32 = 0x00000003 +VOS__WINDOWS32 = 0x00000004 +VOS_DOS = 0x00010000 +VOS_OS216 = 0x00020000 +VOS_OS232 = 0x00030000 +VOS_NT = 0x00040000 + +VOS_DOS_WINDOWS16 = 0x00010001 +VOS_DOS_WINDOWS32 = 0x00010004 +VOS_NT_WINDOWS32 = 0x00040004 +VOS_OS216_PM16 = 0x00020002 +VOS_OS232_PM32 = 0x00030003 + +VFT_UNKNOWN = 0x00000000 +VFT_APP = 0x00000001 +VFT_DLL = 0x00000002 +VFT_DRV = 0x00000003 +VFT_FONT = 0x00000004 +VFT_VXD = 0x00000005 +VFT_RESERVED = 0x00000006 # undocumented +VFT_STATIC_LIB = 0x00000007 + +VFT2_UNKNOWN = 0x00000000 + +VFT2_DRV_PRINTER = 0x00000001 +VFT2_DRV_KEYBOARD = 0x00000002 +VFT2_DRV_LANGUAGE = 0x00000003 +VFT2_DRV_DISPLAY = 0x00000004 +VFT2_DRV_MOUSE = 0x00000005 +VFT2_DRV_NETWORK = 0x00000006 +VFT2_DRV_SYSTEM = 0x00000007 +VFT2_DRV_INSTALLABLE = 0x00000008 +VFT2_DRV_SOUND = 0x00000009 +VFT2_DRV_COMM = 0x0000000A +VFT2_DRV_RESERVED = 0x0000000B # undocumented +VFT2_DRV_VERSIONED_PRINTER = 0x0000000C + +VFT2_FONT_RASTER = 0x00000001 +VFT2_FONT_VECTOR = 0x00000002 +VFT2_FONT_TRUETYPE = 0x00000003 + +# typedef struct tagVS_FIXEDFILEINFO { +# DWORD dwSignature; +# DWORD dwStrucVersion; +# DWORD dwFileVersionMS; +# DWORD dwFileVersionLS; +# DWORD dwProductVersionMS; +# DWORD dwProductVersionLS; +# DWORD dwFileFlagsMask; +# DWORD dwFileFlags; +# DWORD dwFileOS; +# DWORD dwFileType; +# DWORD dwFileSubtype; +# DWORD dwFileDateMS; +# DWORD dwFileDateLS; +# } VS_FIXEDFILEINFO; +class VS_FIXEDFILEINFO(Structure): + _fields_ = [ + ("dwSignature", DWORD), + ("dwStrucVersion", DWORD), + ("dwFileVersionMS", DWORD), + ("dwFileVersionLS", DWORD), + ("dwProductVersionMS", DWORD), + ("dwProductVersionLS", DWORD), + ("dwFileFlagsMask", DWORD), + ("dwFileFlags", DWORD), + ("dwFileOS", DWORD), + ("dwFileType", DWORD), + ("dwFileSubtype", DWORD), + ("dwFileDateMS", DWORD), + ("dwFileDateLS", DWORD), +] +PVS_FIXEDFILEINFO = POINTER(VS_FIXEDFILEINFO) +LPVS_FIXEDFILEINFO = PVS_FIXEDFILEINFO + +# BOOL WINAPI GetFileVersionInfo( +# _In_ LPCTSTR lptstrFilename, +# _Reserved_ DWORD dwHandle, +# _In_ DWORD dwLen, +# _Out_ LPVOID lpData +# ); +# DWORD WINAPI GetFileVersionInfoSize( +# _In_ LPCTSTR lptstrFilename, +# _Out_opt_ LPDWORD lpdwHandle +# ); +def GetFileVersionInfoA(lptstrFilename): + _GetFileVersionInfoA = windll.version.GetFileVersionInfoA + _GetFileVersionInfoA.argtypes = [LPSTR, DWORD, DWORD, LPVOID] + _GetFileVersionInfoA.restype = bool + _GetFileVersionInfoA.errcheck = RaiseIfZero + + _GetFileVersionInfoSizeA = windll.version.GetFileVersionInfoSizeA + _GetFileVersionInfoSizeA.argtypes = [LPSTR, LPVOID] + _GetFileVersionInfoSizeA.restype = DWORD + _GetFileVersionInfoSizeA.errcheck = RaiseIfZero + + dwLen = _GetFileVersionInfoSizeA(lptstrFilename, None) + lpData = ctypes.create_string_buffer(dwLen) + _GetFileVersionInfoA(lptstrFilename, 0, dwLen, byref(lpData)) + return lpData + +def GetFileVersionInfoW(lptstrFilename): + _GetFileVersionInfoW = windll.version.GetFileVersionInfoW + _GetFileVersionInfoW.argtypes = [LPWSTR, DWORD, DWORD, LPVOID] + _GetFileVersionInfoW.restype = bool + _GetFileVersionInfoW.errcheck = RaiseIfZero + + _GetFileVersionInfoSizeW = windll.version.GetFileVersionInfoSizeW + _GetFileVersionInfoSizeW.argtypes = [LPWSTR, LPVOID] + _GetFileVersionInfoSizeW.restype = DWORD + _GetFileVersionInfoSizeW.errcheck = RaiseIfZero + + dwLen = _GetFileVersionInfoSizeW(lptstrFilename, None) + lpData = ctypes.create_string_buffer(dwLen) # not a string! + _GetFileVersionInfoW(lptstrFilename, 0, dwLen, byref(lpData)) + return lpData + +GetFileVersionInfo = GuessStringType(GetFileVersionInfoA, GetFileVersionInfoW) + +# BOOL WINAPI VerQueryValue( +# _In_ LPCVOID pBlock, +# _In_ LPCTSTR lpSubBlock, +# _Out_ LPVOID *lplpBuffer, +# _Out_ PUINT puLen +# ); +def VerQueryValueA(pBlock, lpSubBlock): + _VerQueryValueA = windll.version.VerQueryValueA + _VerQueryValueA.argtypes = [LPVOID, LPSTR, LPVOID, POINTER(UINT)] + _VerQueryValueA.restype = bool + _VerQueryValueA.errcheck = RaiseIfZero + + lpBuffer = LPVOID(0) + uLen = UINT(0) + _VerQueryValueA(pBlock, lpSubBlock, byref(lpBuffer), byref(uLen)) + return lpBuffer, uLen.value + +def VerQueryValueW(pBlock, lpSubBlock): + _VerQueryValueW = windll.version.VerQueryValueW + _VerQueryValueW.argtypes = [LPVOID, LPWSTR, LPVOID, POINTER(UINT)] + _VerQueryValueW.restype = bool + _VerQueryValueW.errcheck = RaiseIfZero + + lpBuffer = LPVOID(0) + uLen = UINT(0) + _VerQueryValueW(pBlock, lpSubBlock, byref(lpBuffer), byref(uLen)) + return lpBuffer, uLen.value + +VerQueryValue = GuessStringType(VerQueryValueA, VerQueryValueW) + +#============================================================================== +# This calculates the list of exported symbols. +_all = set(vars().keys()).difference(_all) +__all__ = [_x for _x in _all if not _x.startswith('_')] +__all__.sort() +#============================================================================== diff --git a/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/win32/wtsapi32.py b/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/win32/wtsapi32.py new file mode 100644 index 00000000..13227db3 --- /dev/null +++ b/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/win32/wtsapi32.py @@ -0,0 +1,337 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# Copyright (c) 2009-2014, Mario Vilas +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright +# notice,this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# * Neither the name of the copyright holder nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. + +""" +Wrapper for wtsapi32.dll in ctypes. +""" + +__revision__ = "$Id$" + +from winappdbg.win32.defines import * +from winappdbg.win32.advapi32 import * + +#============================================================================== +# This is used later on to calculate the list of exported symbols. +_all = None +_all = set(vars().keys()) +#============================================================================== + +#--- Constants ---------------------------------------------------------------- + +WTS_CURRENT_SERVER_HANDLE = 0 +WTS_CURRENT_SESSION = 1 + +#--- WTS_PROCESS_INFO structure ----------------------------------------------- + +# typedef struct _WTS_PROCESS_INFO { +# DWORD SessionId; +# DWORD ProcessId; +# LPTSTR pProcessName; +# PSID pUserSid; +# } WTS_PROCESS_INFO, *PWTS_PROCESS_INFO; + +class WTS_PROCESS_INFOA(Structure): + _fields_ = [ + ("SessionId", DWORD), + ("ProcessId", DWORD), + ("pProcessName", LPSTR), + ("pUserSid", PSID), + ] +PWTS_PROCESS_INFOA = POINTER(WTS_PROCESS_INFOA) + +class WTS_PROCESS_INFOW(Structure): + _fields_ = [ + ("SessionId", DWORD), + ("ProcessId", DWORD), + ("pProcessName", LPWSTR), + ("pUserSid", PSID), + ] +PWTS_PROCESS_INFOW = POINTER(WTS_PROCESS_INFOW) + +#--- WTSQuerySessionInformation enums and structures -------------------------- + +# typedef enum _WTS_INFO_CLASS { +# WTSInitialProgram = 0, +# WTSApplicationName = 1, +# WTSWorkingDirectory = 2, +# WTSOEMId = 3, +# WTSSessionId = 4, +# WTSUserName = 5, +# WTSWinStationName = 6, +# WTSDomainName = 7, +# WTSConnectState = 8, +# WTSClientBuildNumber = 9, +# WTSClientName = 10, +# WTSClientDirectory = 11, +# WTSClientProductId = 12, +# WTSClientHardwareId = 13, +# WTSClientAddress = 14, +# WTSClientDisplay = 15, +# WTSClientProtocolType = 16, +# WTSIdleTime = 17, +# WTSLogonTime = 18, +# WTSIncomingBytes = 19, +# WTSOutgoingBytes = 20, +# WTSIncomingFrames = 21, +# WTSOutgoingFrames = 22, +# WTSClientInfo = 23, +# WTSSessionInfo = 24, +# WTSSessionInfoEx = 25, +# WTSConfigInfo = 26, +# WTSValidationInfo = 27, +# WTSSessionAddressV4 = 28, +# WTSIsRemoteSession = 29 +# } WTS_INFO_CLASS; + +WTSInitialProgram = 0 +WTSApplicationName = 1 +WTSWorkingDirectory = 2 +WTSOEMId = 3 +WTSSessionId = 4 +WTSUserName = 5 +WTSWinStationName = 6 +WTSDomainName = 7 +WTSConnectState = 8 +WTSClientBuildNumber = 9 +WTSClientName = 10 +WTSClientDirectory = 11 +WTSClientProductId = 12 +WTSClientHardwareId = 13 +WTSClientAddress = 14 +WTSClientDisplay = 15 +WTSClientProtocolType = 16 +WTSIdleTime = 17 +WTSLogonTime = 18 +WTSIncomingBytes = 19 +WTSOutgoingBytes = 20 +WTSIncomingFrames = 21 +WTSOutgoingFrames = 22 +WTSClientInfo = 23 +WTSSessionInfo = 24 +WTSSessionInfoEx = 25 +WTSConfigInfo = 26 +WTSValidationInfo = 27 +WTSSessionAddressV4 = 28 +WTSIsRemoteSession = 29 + +WTS_INFO_CLASS = ctypes.c_int + +# typedef enum _WTS_CONNECTSTATE_CLASS { +# WTSActive, +# WTSConnected, +# WTSConnectQuery, +# WTSShadow, +# WTSDisconnected, +# WTSIdle, +# WTSListen, +# WTSReset, +# WTSDown, +# WTSInit +# } WTS_CONNECTSTATE_CLASS; + +WTSActive = 0 +WTSConnected = 1 +WTSConnectQuery = 2 +WTSShadow = 3 +WTSDisconnected = 4 +WTSIdle = 5 +WTSListen = 6 +WTSReset = 7 +WTSDown = 8 +WTSInit = 9 + +WTS_CONNECTSTATE_CLASS = ctypes.c_int + +# typedef struct _WTS_CLIENT_DISPLAY { +# DWORD HorizontalResolution; +# DWORD VerticalResolution; +# DWORD ColorDepth; +# } WTS_CLIENT_DISPLAY, *PWTS_CLIENT_DISPLAY; +class WTS_CLIENT_DISPLAY(Structure): + _fields_ = [ + ("HorizontalResolution", DWORD), + ("VerticalResolution", DWORD), + ("ColorDepth", DWORD), + ] +PWTS_CLIENT_DISPLAY = POINTER(WTS_CLIENT_DISPLAY) + +# typedef struct _WTS_CLIENT_ADDRESS { +# DWORD AddressFamily; +# BYTE Address[20]; +# } WTS_CLIENT_ADDRESS, *PWTS_CLIENT_ADDRESS; + +# XXX TODO + +# typedef struct _WTSCLIENT { +# WCHAR ClientName[CLIENTNAME_LENGTH + 1]; +# WCHAR Domain[DOMAIN_LENGTH + 1 ]; +# WCHAR UserName[USERNAME_LENGTH + 1]; +# WCHAR WorkDirectory[MAX_PATH + 1]; +# WCHAR InitialProgram[MAX_PATH + 1]; +# BYTE EncryptionLevel; +# ULONG ClientAddressFamily; +# USHORT ClientAddress[CLIENTADDRESS_LENGTH + 1]; +# USHORT HRes; +# USHORT VRes; +# USHORT ColorDepth; +# WCHAR ClientDirectory[MAX_PATH + 1]; +# ULONG ClientBuildNumber; +# ULONG ClientHardwareId; +# USHORT ClientProductId; +# USHORT OutBufCountHost; +# USHORT OutBufCountClient; +# USHORT OutBufLength; +# WCHAR DeviceId[MAX_PATH + 1]; +# } WTSCLIENT, *PWTSCLIENT; + +# XXX TODO + +# typedef struct _WTSINFO { +# WTS_CONNECTSTATE_CLASS State; +# DWORD SessionId; +# DWORD IncomingBytes; +# DWORD OutgoingBytes; +# DWORD IncomingCompressedBytes; +# DWORD OutgoingCompressedBytes; +# WCHAR WinStationName; +# WCHAR Domain; +# WCHAR UserName; +# LARGE_INTEGER ConnectTime; +# LARGE_INTEGER DisconnectTime; +# LARGE_INTEGER LastInputTime; +# LARGE_INTEGER LogonTime; +# LARGE_INTEGER CurrentTime; +# } WTSINFO, *PWTSINFO; + +# XXX TODO + +# typedef struct _WTSINFOEX { +# DWORD Level; +# WTSINFOEX_LEVEL Data; +# } WTSINFOEX, *PWTSINFOEX; + +# XXX TODO + +#--- wtsapi32.dll ------------------------------------------------------------- + +# void WTSFreeMemory( +# __in PVOID pMemory +# ); +def WTSFreeMemory(pMemory): + _WTSFreeMemory = windll.wtsapi32.WTSFreeMemory + _WTSFreeMemory.argtypes = [PVOID] + _WTSFreeMemory.restype = None + _WTSFreeMemory(pMemory) + +# BOOL WTSEnumerateProcesses( +# __in HANDLE hServer, +# __in DWORD Reserved, +# __in DWORD Version, +# __out PWTS_PROCESS_INFO *ppProcessInfo, +# __out DWORD *pCount +# ); +def WTSEnumerateProcessesA(hServer = WTS_CURRENT_SERVER_HANDLE): + _WTSEnumerateProcessesA = windll.wtsapi32.WTSEnumerateProcessesA + _WTSEnumerateProcessesA.argtypes = [HANDLE, DWORD, DWORD, POINTER(PWTS_PROCESS_INFOA), PDWORD] + _WTSEnumerateProcessesA.restype = bool + _WTSEnumerateProcessesA.errcheck = RaiseIfZero + + pProcessInfo = PWTS_PROCESS_INFOA() + Count = DWORD(0) + _WTSEnumerateProcessesA(hServer, 0, 1, byref(pProcessInfo), byref(Count)) + return pProcessInfo, Count.value + +def WTSEnumerateProcessesW(hServer = WTS_CURRENT_SERVER_HANDLE): + _WTSEnumerateProcessesW = windll.wtsapi32.WTSEnumerateProcessesW + _WTSEnumerateProcessesW.argtypes = [HANDLE, DWORD, DWORD, POINTER(PWTS_PROCESS_INFOW), PDWORD] + _WTSEnumerateProcessesW.restype = bool + _WTSEnumerateProcessesW.errcheck = RaiseIfZero + + pProcessInfo = PWTS_PROCESS_INFOW() + Count = DWORD(0) + _WTSEnumerateProcessesW(hServer, 0, 1, byref(pProcessInfo), byref(Count)) + return pProcessInfo, Count.value + +WTSEnumerateProcesses = DefaultStringType(WTSEnumerateProcessesA, WTSEnumerateProcessesW) + +# BOOL WTSTerminateProcess( +# __in HANDLE hServer, +# __in DWORD ProcessId, +# __in DWORD ExitCode +# ); +def WTSTerminateProcess(hServer, ProcessId, ExitCode): + _WTSTerminateProcess = windll.wtsapi32.WTSTerminateProcess + _WTSTerminateProcess.argtypes = [HANDLE, DWORD, DWORD] + _WTSTerminateProcess.restype = bool + _WTSTerminateProcess.errcheck = RaiseIfZero + _WTSTerminateProcess(hServer, ProcessId, ExitCode) + +# BOOL WTSQuerySessionInformation( +# __in HANDLE hServer, +# __in DWORD SessionId, +# __in WTS_INFO_CLASS WTSInfoClass, +# __out LPTSTR *ppBuffer, +# __out DWORD *pBytesReturned +# ); + +# XXX TODO + +#--- kernel32.dll ------------------------------------------------------------- + +# I've no idea why these functions are in kernel32.dll instead of wtsapi32.dll + +# BOOL ProcessIdToSessionId( +# __in DWORD dwProcessId, +# __out DWORD *pSessionId +# ); +def ProcessIdToSessionId(dwProcessId): + _ProcessIdToSessionId = windll.kernel32.ProcessIdToSessionId + _ProcessIdToSessionId.argtypes = [DWORD, PDWORD] + _ProcessIdToSessionId.restype = bool + _ProcessIdToSessionId.errcheck = RaiseIfZero + + dwSessionId = DWORD(0) + _ProcessIdToSessionId(dwProcessId, byref(dwSessionId)) + return dwSessionId.value + +# DWORD WTSGetActiveConsoleSessionId(void); +def WTSGetActiveConsoleSessionId(): + _WTSGetActiveConsoleSessionId = windll.kernel32.WTSGetActiveConsoleSessionId + _WTSGetActiveConsoleSessionId.argtypes = [] + _WTSGetActiveConsoleSessionId.restype = DWORD + _WTSGetActiveConsoleSessionId.errcheck = RaiseIfZero + return _WTSGetActiveConsoleSessionId() + +#============================================================================== +# This calculates the list of exported symbols. +_all = set(vars().keys()).difference(_all) +__all__ = [_x for _x in _all if not _x.startswith('_')] +__all__.sort() +#============================================================================== diff --git a/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/window.py b/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/window.py new file mode 100644 index 00000000..6e865e7a --- /dev/null +++ b/ptvsd/pydevd/pydevd_attach_to_process/winappdbg/window.py @@ -0,0 +1,759 @@ +#!~/.wine/drive_c/Python25/python.exe +# -*- coding: utf-8 -*- + +# Copyright (c) 2009-2014, Mario Vilas +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright +# notice,this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# * Neither the name of the copyright holder nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. + +""" +Window instrumentation. + +@group Instrumentation: + Window +""" + +__revision__ = "$Id$" + +__all__ = ['Window'] + +from winappdbg import win32 + +# delayed imports +Process = None +Thread = None + +#============================================================================== + +# Unlike Process, Thread and Module, there's no container for Window objects. +# That's because Window objects don't really store any data besides the handle. + +# XXX TODO +# * implement sending fake user input (mouse and keyboard messages) +# * maybe implement low-level hooks? (they don't require a dll to be injected) + +# XXX TODO +# +# Will it be possible to implement window hooks too? That requires a DLL to be +# injected in the target process. Perhaps with CPython it could be done easier, +# compiling a native extension is the safe bet, but both require having a non +# pure Python module, which is something I was trying to avoid so far. +# +# Another possibility would be to malloc some CC's in the target process and +# point the hook callback to it. We'd need to have the remote procedure call +# feature first as (I believe) the hook can't be set remotely in this case. + +class Window (object): + """ + Interface to an open window in the current desktop. + + @group Properties: + get_handle, get_pid, get_tid, + get_process, get_thread, + set_process, set_thread, + get_classname, get_style, get_extended_style, + get_text, set_text, + get_placement, set_placement, + get_screen_rect, get_client_rect, + screen_to_client, client_to_screen + + @group State: + is_valid, is_visible, is_enabled, is_maximized, is_minimized, is_child, + is_zoomed, is_iconic + + @group Navigation: + get_parent, get_children, get_root, get_tree, + get_child_at + + @group Instrumentation: + enable, disable, show, hide, maximize, minimize, restore, move, kill + + @group Low-level access: + send, post + + @type hWnd: int + @ivar hWnd: Window handle. + + @type dwProcessId: int + @ivar dwProcessId: Global ID of the process that owns this window. + + @type dwThreadId: int + @ivar dwThreadId: Global ID of the thread that owns this window. + + @type process: L{Process} + @ivar process: Process that owns this window. + Use the L{get_process} method instead. + + @type thread: L{Thread} + @ivar thread: Thread that owns this window. + Use the L{get_thread} method instead. + + @type classname: str + @ivar classname: Window class name. + + @type text: str + @ivar text: Window text (caption). + + @type placement: L{win32.WindowPlacement} + @ivar placement: Window placement in the desktop. + """ + + def __init__(self, hWnd = None, process = None, thread = None): + """ + @type hWnd: int or L{win32.HWND} + @param hWnd: Window handle. + + @type process: L{Process} + @param process: (Optional) Process that owns this window. + + @type thread: L{Thread} + @param thread: (Optional) Thread that owns this window. + """ + self.hWnd = hWnd + self.dwProcessId = None + self.dwThreadId = None + self.set_process(process) + self.set_thread(thread) + + @property + def _as_parameter_(self): + """ + Compatibility with ctypes. + Allows passing transparently a Window object to an API call. + """ + return self.get_handle() + + def get_handle(self): + """ + @rtype: int + @return: Window handle. + @raise ValueError: No window handle set. + """ + if self.hWnd is None: + raise ValueError("No window handle set!") + return self.hWnd + + def get_pid(self): + """ + @rtype: int + @return: Global ID of the process that owns this window. + """ + if self.dwProcessId is not None: + return self.dwProcessId + self.__get_pid_and_tid() + return self.dwProcessId + + def get_tid(self): + """ + @rtype: int + @return: Global ID of the thread that owns this window. + """ + if self.dwThreadId is not None: + return self.dwThreadId + self.__get_pid_and_tid() + return self.dwThreadId + + def __get_pid_and_tid(self): + "Internally used by get_pid() and get_tid()." + self.dwThreadId, self.dwProcessId = \ + win32.GetWindowThreadProcessId(self.get_handle()) + + def __load_Process_class(self): + global Process # delayed import + if Process is None: + from winappdbg.process import Process + + def __load_Thread_class(self): + global Thread # delayed import + if Thread is None: + from winappdbg.thread import Thread + + def get_process(self): + """ + @rtype: L{Process} + @return: Parent Process object. + """ + if self.__process is not None: + return self.__process + self.__load_Process_class() + self.__process = Process(self.get_pid()) + return self.__process + + def set_process(self, process = None): + """ + Manually set the parent process. Use with care! + + @type process: L{Process} + @param process: (Optional) Process object. Use C{None} to autodetect. + """ + if process is None: + self.__process = None + else: + self.__load_Process_class() + if not isinstance(process, Process): + msg = "Parent process must be a Process instance, " + msg += "got %s instead" % type(process) + raise TypeError(msg) + self.dwProcessId = process.get_pid() + self.__process = process + + def get_thread(self): + """ + @rtype: L{Thread} + @return: Parent Thread object. + """ + if self.__thread is not None: + return self.__thread + self.__load_Thread_class() + self.__thread = Thread(self.get_tid()) + return self.__thread + + def set_thread(self, thread = None): + """ + Manually set the thread process. Use with care! + + @type thread: L{Thread} + @param thread: (Optional) Thread object. Use C{None} to autodetect. + """ + if thread is None: + self.__thread = None + else: + self.__load_Thread_class() + if not isinstance(thread, Thread): + msg = "Parent thread must be a Thread instance, " + msg += "got %s instead" % type(thread) + raise TypeError(msg) + self.dwThreadId = thread.get_tid() + self.__thread = thread + + def __get_window(self, hWnd): + """ + User internally to get another Window from this one. + It'll try to copy the parent Process and Thread references if possible. + """ + window = Window(hWnd) + if window.get_pid() == self.get_pid(): + window.set_process( self.get_process() ) + if window.get_tid() == self.get_tid(): + window.set_thread( self.get_thread() ) + return window + +#------------------------------------------------------------------------------ + + def get_classname(self): + """ + @rtype: str + @return: Window class name. + + @raise WindowsError: An error occured while processing this request. + """ + return win32.GetClassName( self.get_handle() ) + + def get_style(self): + """ + @rtype: int + @return: Window style mask. + + @raise WindowsError: An error occured while processing this request. + """ + return win32.GetWindowLongPtr( self.get_handle(), win32.GWL_STYLE ) + + def get_extended_style(self): + """ + @rtype: int + @return: Window extended style mask. + + @raise WindowsError: An error occured while processing this request. + """ + return win32.GetWindowLongPtr( self.get_handle(), win32.GWL_EXSTYLE ) + + def get_text(self): + """ + @see: L{set_text} + @rtype: str + @return: Window text (caption) on success, C{None} on error. + """ + try: + return win32.GetWindowText( self.get_handle() ) + except WindowsError: + return None + + def set_text(self, text): + """ + Set the window text (caption). + + @see: L{get_text} + + @type text: str + @param text: New window text. + + @raise WindowsError: An error occured while processing this request. + """ + win32.SetWindowText( self.get_handle(), text ) + + def get_placement(self): + """ + Retrieve the window placement in the desktop. + + @see: L{set_placement} + + @rtype: L{win32.WindowPlacement} + @return: Window placement in the desktop. + + @raise WindowsError: An error occured while processing this request. + """ + return win32.GetWindowPlacement( self.get_handle() ) + + def set_placement(self, placement): + """ + Set the window placement in the desktop. + + @see: L{get_placement} + + @type placement: L{win32.WindowPlacement} + @param placement: Window placement in the desktop. + + @raise WindowsError: An error occured while processing this request. + """ + win32.SetWindowPlacement( self.get_handle(), placement ) + + def get_screen_rect(self): + """ + Get the window coordinates in the desktop. + + @rtype: L{win32.Rect} + @return: Rectangle occupied by the window in the desktop. + + @raise WindowsError: An error occured while processing this request. + """ + return win32.GetWindowRect( self.get_handle() ) + + def get_client_rect(self): + """ + Get the window's client area coordinates in the desktop. + + @rtype: L{win32.Rect} + @return: Rectangle occupied by the window's client area in the desktop. + + @raise WindowsError: An error occured while processing this request. + """ + cr = win32.GetClientRect( self.get_handle() ) + cr.left, cr.top = self.client_to_screen(cr.left, cr.top) + cr.right, cr.bottom = self.client_to_screen(cr.right, cr.bottom) + return cr + + # XXX TODO + # * properties x, y, width, height + # * properties left, top, right, bottom + + process = property(get_process, set_process, doc="") + thread = property(get_thread, set_thread, doc="") + classname = property(get_classname, doc="") + style = property(get_style, doc="") + exstyle = property(get_extended_style, doc="") + text = property(get_text, set_text, doc="") + placement = property(get_placement, set_placement, doc="") + +#------------------------------------------------------------------------------ + + def client_to_screen(self, x, y): + """ + Translates window client coordinates to screen coordinates. + + @note: This is a simplified interface to some of the functionality of + the L{win32.Point} class. + + @see: {win32.Point.client_to_screen} + + @type x: int + @param x: Horizontal coordinate. + @type y: int + @param y: Vertical coordinate. + + @rtype: tuple( int, int ) + @return: Translated coordinates in a tuple (x, y). + + @raise WindowsError: An error occured while processing this request. + """ + return tuple( win32.ClientToScreen( self.get_handle(), (x, y) ) ) + + def screen_to_client(self, x, y): + """ + Translates window screen coordinates to client coordinates. + + @note: This is a simplified interface to some of the functionality of + the L{win32.Point} class. + + @see: {win32.Point.screen_to_client} + + @type x: int + @param x: Horizontal coordinate. + @type y: int + @param y: Vertical coordinate. + + @rtype: tuple( int, int ) + @return: Translated coordinates in a tuple (x, y). + + @raise WindowsError: An error occured while processing this request. + """ + return tuple( win32.ScreenToClient( self.get_handle(), (x, y) ) ) + +#------------------------------------------------------------------------------ + + def get_parent(self): + """ + @see: L{get_children} + @rtype: L{Window} or None + @return: Parent window. Returns C{None} if the window has no parent. + @raise WindowsError: An error occured while processing this request. + """ + hWnd = win32.GetParent( self.get_handle() ) + if hWnd: + return self.__get_window(hWnd) + + def get_children(self): + """ + @see: L{get_parent} + @rtype: list( L{Window} ) + @return: List of child windows. + @raise WindowsError: An error occured while processing this request. + """ + return [ + self.__get_window(hWnd) \ + for hWnd in win32.EnumChildWindows( self.get_handle() ) + ] + + def get_tree(self): + """ + @see: L{get_root} + @rtype: dict( L{Window} S{->} dict( ... ) ) + @return: Dictionary of dictionaries forming a tree of child windows. + @raise WindowsError: An error occured while processing this request. + """ + subtree = dict() + for aWindow in self.get_children(): + subtree[ aWindow ] = aWindow.get_tree() + return subtree + + def get_root(self): + """ + @see: L{get_tree} + @rtype: L{Window} + @return: If this is a child window, return the top-level window it + belongs to. + If this window is already a top-level window, returns itself. + @raise WindowsError: An error occured while processing this request. + """ + hWnd = self.get_handle() + history = set() + hPrevWnd = hWnd + while hWnd and hWnd not in history: + history.add(hWnd) + hPrevWnd = hWnd + hWnd = win32.GetParent(hWnd) + if hWnd in history: + # See: https://docs.google.com/View?id=dfqd62nk_228h28szgz + return self + if hPrevWnd != self.get_handle(): + return self.__get_window(hPrevWnd) + return self + + def get_child_at(self, x, y, bAllowTransparency = True): + """ + Get the child window located at the given coordinates. If no such + window exists an exception is raised. + + @see: L{get_children} + + @type x: int + @param x: Horizontal coordinate. + + @type y: int + @param y: Vertical coordinate. + + @type bAllowTransparency: bool + @param bAllowTransparency: If C{True} transparent areas in windows are + ignored, returning the window behind them. If C{False} transparent + areas are treated just like any other area. + + @rtype: L{Window} + @return: Child window at the requested position, or C{None} if there + is no window at those coordinates. + """ + try: + if bAllowTransparency: + hWnd = win32.RealChildWindowFromPoint( self.get_handle(), (x, y) ) + else: + hWnd = win32.ChildWindowFromPoint( self.get_handle(), (x, y) ) + if hWnd: + return self.__get_window(hWnd) + except WindowsError: + pass + return None + +#------------------------------------------------------------------------------ + + def is_valid(self): + """ + @rtype: bool + @return: C{True} if the window handle is still valid. + """ + return win32.IsWindow( self.get_handle() ) + + def is_visible(self): + """ + @see: {show}, {hide} + @rtype: bool + @return: C{True} if the window is in a visible state. + """ + return win32.IsWindowVisible( self.get_handle() ) + + def is_enabled(self): + """ + @see: {enable}, {disable} + @rtype: bool + @return: C{True} if the window is in an enabled state. + """ + return win32.IsWindowEnabled( self.get_handle() ) + + def is_maximized(self): + """ + @see: L{maximize} + @rtype: bool + @return: C{True} if the window is maximized. + """ + return win32.IsZoomed( self.get_handle() ) + + def is_minimized(self): + """ + @see: L{minimize} + @rtype: bool + @return: C{True} if the window is minimized. + """ + return win32.IsIconic( self.get_handle() ) + + def is_child(self): + """ + @see: L{get_parent} + @rtype: bool + @return: C{True} if the window is a child window. + """ + return win32.IsChild( self.get_handle() ) + + is_zoomed = is_maximized + is_iconic = is_minimized + +#------------------------------------------------------------------------------ + + def enable(self): + """ + Enable the user input for the window. + + @see: L{disable} + + @raise WindowsError: An error occured while processing this request. + """ + win32.EnableWindow( self.get_handle(), True ) + + def disable(self): + """ + Disable the user input for the window. + + @see: L{enable} + + @raise WindowsError: An error occured while processing this request. + """ + win32.EnableWindow( self.get_handle(), False ) + + def show(self, bAsync = True): + """ + Make the window visible. + + @see: L{hide} + + @type bAsync: bool + @param bAsync: Perform the request asynchronously. + + @raise WindowsError: An error occured while processing this request. + """ + if bAsync: + win32.ShowWindowAsync( self.get_handle(), win32.SW_SHOW ) + else: + win32.ShowWindow( self.get_handle(), win32.SW_SHOW ) + + def hide(self, bAsync = True): + """ + Make the window invisible. + + @see: L{show} + + @type bAsync: bool + @param bAsync: Perform the request asynchronously. + + @raise WindowsError: An error occured while processing this request. + """ + if bAsync: + win32.ShowWindowAsync( self.get_handle(), win32.SW_HIDE ) + else: + win32.ShowWindow( self.get_handle(), win32.SW_HIDE ) + + def maximize(self, bAsync = True): + """ + Maximize the window. + + @see: L{minimize}, L{restore} + + @type bAsync: bool + @param bAsync: Perform the request asynchronously. + + @raise WindowsError: An error occured while processing this request. + """ + if bAsync: + win32.ShowWindowAsync( self.get_handle(), win32.SW_MAXIMIZE ) + else: + win32.ShowWindow( self.get_handle(), win32.SW_MAXIMIZE ) + + def minimize(self, bAsync = True): + """ + Minimize the window. + + @see: L{maximize}, L{restore} + + @type bAsync: bool + @param bAsync: Perform the request asynchronously. + + @raise WindowsError: An error occured while processing this request. + """ + if bAsync: + win32.ShowWindowAsync( self.get_handle(), win32.SW_MINIMIZE ) + else: + win32.ShowWindow( self.get_handle(), win32.SW_MINIMIZE ) + + def restore(self, bAsync = True): + """ + Unmaximize and unminimize the window. + + @see: L{maximize}, L{minimize} + + @type bAsync: bool + @param bAsync: Perform the request asynchronously. + + @raise WindowsError: An error occured while processing this request. + """ + if bAsync: + win32.ShowWindowAsync( self.get_handle(), win32.SW_RESTORE ) + else: + win32.ShowWindow( self.get_handle(), win32.SW_RESTORE ) + + def move(self, x = None, y = None, width = None, height = None, + bRepaint = True): + """ + Moves and/or resizes the window. + + @note: This is request is performed syncronously. + + @type x: int + @param x: (Optional) New horizontal coordinate. + + @type y: int + @param y: (Optional) New vertical coordinate. + + @type width: int + @param width: (Optional) Desired window width. + + @type height: int + @param height: (Optional) Desired window height. + + @type bRepaint: bool + @param bRepaint: + (Optional) C{True} if the window should be redrawn afterwards. + + @raise WindowsError: An error occured while processing this request. + """ + if None in (x, y, width, height): + rect = self.get_screen_rect() + if x is None: + x = rect.left + if y is None: + y = rect.top + if width is None: + width = rect.right - rect.left + if height is None: + height = rect.bottom - rect.top + win32.MoveWindow(self.get_handle(), x, y, width, height, bRepaint) + + def kill(self): + """ + Signals the program to quit. + + @note: This is an asyncronous request. + + @raise WindowsError: An error occured while processing this request. + """ + self.post(win32.WM_QUIT) + + def send(self, uMsg, wParam = None, lParam = None, dwTimeout = None): + """ + Send a low-level window message syncronically. + + @type uMsg: int + @param uMsg: Message code. + + @param wParam: + The type and meaning of this parameter depends on the message. + + @param lParam: + The type and meaning of this parameter depends on the message. + + @param dwTimeout: Optional timeout for the operation. + Use C{None} to wait indefinitely. + + @rtype: int + @return: The meaning of the return value depends on the window message. + Typically a value of C{0} means an error occured. You can get the + error code by calling L{win32.GetLastError}. + """ + if dwTimeout is None: + return win32.SendMessage(self.get_handle(), uMsg, wParam, lParam) + return win32.SendMessageTimeout( + self.get_handle(), uMsg, wParam, lParam, + win32.SMTO_ABORTIFHUNG | win32.SMTO_ERRORONEXIT, dwTimeout) + + def post(self, uMsg, wParam = None, lParam = None): + """ + Post a low-level window message asyncronically. + + @type uMsg: int + @param uMsg: Message code. + + @param wParam: + The type and meaning of this parameter depends on the message. + + @param lParam: + The type and meaning of this parameter depends on the message. + + @raise WindowsError: An error occured while sending the message. + """ + win32.PostMessage(self.get_handle(), uMsg, wParam, lParam) diff --git a/ptvsd/pydevd/pydevd_concurrency_analyser/__init__.py b/ptvsd/pydevd/pydevd_concurrency_analyser/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/ptvsd/pydevd/pydevd_concurrency_analyser/pydevd_concurrency_logger.py b/ptvsd/pydevd/pydevd_concurrency_analyser/pydevd_concurrency_logger.py new file mode 100644 index 00000000..d470493b --- /dev/null +++ b/ptvsd/pydevd/pydevd_concurrency_analyser/pydevd_concurrency_logger.py @@ -0,0 +1,342 @@ +from pydevd_concurrency_analyser.pydevd_thread_wrappers import ObjectWrapper, wrap_attr + +import pydevd_file_utils +from _pydevd_bundle import pydevd_xml +from _pydev_bundle._pydev_filesystem_encoding import getfilesystemencoding +from _pydevd_bundle.pydevd_constants import get_thread_id, IS_PY3K + +file_system_encoding = getfilesystemencoding() + +try: + from urllib import quote +except: + from urllib.parse import quote # @UnresolvedImport + +from _pydev_imps._pydev_saved_modules import threading +threadingCurrentThread = threading.currentThread + + +DONT_TRACE_THREADING = ['threading.py', 'pydevd.py'] +INNER_METHODS = ['_stop'] +INNER_FILES = ['threading.py'] +THREAD_METHODS = ['start', '_stop', 'join'] +LOCK_METHODS = ['__init__', 'acquire', 'release', '__enter__', '__exit__'] +QUEUE_METHODS = ['put', 'get'] + +from _pydevd_bundle.pydevd_comm import GlobalDebuggerHolder, NetCommand +import traceback + +import time +# return time since epoch in milliseconds +cur_time = lambda: int(round(time.time() * 1000000)) + + +try: + import asyncio # @UnresolvedImport +except: + pass + + +def get_text_list_for_frame(frame): + # partial copy-paste from make_thread_suspend_str + curFrame = frame + cmdTextList = [] + try: + while curFrame: + #print cmdText + myId = str(id(curFrame)) + #print "id is ", myId + + if curFrame.f_code is None: + break #Iron Python sometimes does not have it! + + myName = curFrame.f_code.co_name #method name (if in method) or ? if global + if myName is None: + break #Iron Python sometimes does not have it! + + #print "name is ", myName + + filename = pydevd_file_utils.get_abs_path_real_path_and_base_from_frame(curFrame)[1] + + myFile = pydevd_file_utils.norm_file_to_client(filename) + if file_system_encoding.lower() != "utf-8" and hasattr(myFile, "decode"): + # myFile is a byte string encoded using the file system encoding + # convert it to utf8 + myFile = myFile.decode(file_system_encoding).encode("utf-8") + + #print "file is ", myFile + #myFile = inspect.getsourcefile(curFrame) or inspect.getfile(frame) + + myLine = str(curFrame.f_lineno) + #print "line is ", myLine + + #the variables are all gotten 'on-demand' + #variables = pydevd_xml.frame_vars_to_xml(curFrame.f_locals) + + variables = '' + cmdTextList.append('' % (quote(myFile, '/>_= \t'), myLine)) + cmdTextList.append(variables) + cmdTextList.append("") + curFrame = curFrame.f_back + except : + traceback.print_exc() + + return cmdTextList + + +def send_message(event_class, time, name, thread_id, type, event, file, line, frame, lock_id=0, parent=None): + dbg = GlobalDebuggerHolder.global_dbg + cmdTextList = [''] + + cmdTextList.append('<' + event_class) + cmdTextList.append(' time="%s"' % pydevd_xml.make_valid_xml_value(str(time))) + cmdTextList.append(' name="%s"' % pydevd_xml.make_valid_xml_value(name)) + cmdTextList.append(' thread_id="%s"' % pydevd_xml.make_valid_xml_value(thread_id)) + cmdTextList.append(' type="%s"' % pydevd_xml.make_valid_xml_value(type)) + if type == "lock": + cmdTextList.append(' lock_id="%s"' % pydevd_xml.make_valid_xml_value(str(lock_id))) + if parent is not None: + cmdTextList.append(' parent="%s"' % pydevd_xml.make_valid_xml_value(parent)) + cmdTextList.append(' event="%s"' % pydevd_xml.make_valid_xml_value(event)) + cmdTextList.append(' file="%s"' % pydevd_xml.make_valid_xml_value(file)) + cmdTextList.append(' line="%s"' % pydevd_xml.make_valid_xml_value(str(line))) + cmdTextList.append('>') + + cmdTextList += get_text_list_for_frame(frame) + cmdTextList.append('') + + text = ''.join(cmdTextList) + if dbg.writer is not None: + dbg.writer.add_command(NetCommand(145, 0, text)) + + +def log_new_thread(global_debugger): + t = threadingCurrentThread() + event_time = cur_time() - global_debugger.thread_analyser.start_time + send_message("threading_event", event_time, t.getName(), get_thread_id(t), "thread", + "start", "code_name", 0, None, parent=get_thread_id(t)) + + +class ThreadingLogger: + def __init__(self): + self.start_time = cur_time() + + def set_start_time(self, time): + self.start_time = time + + def log_event(self, frame): + write_log = False + self_obj = None + if "self" in frame.f_locals: + self_obj = frame.f_locals["self"] + if isinstance(self_obj, threading.Thread) or self_obj.__class__ == ObjectWrapper: + write_log = True + if hasattr(frame, "f_back") and frame.f_back is not None: + back = frame.f_back + if hasattr(back, "f_back") and back.f_back is not None: + back = back.f_back + if "self" in back.f_locals: + if isinstance(back.f_locals["self"], threading.Thread): + write_log = True + try: + if write_log: + t = threadingCurrentThread() + back = frame.f_back + if not back: + return + _, name, back_base = pydevd_file_utils.get_abs_path_real_path_and_base_from_frame(back) + event_time = cur_time() - self.start_time + method_name = frame.f_code.co_name + + if isinstance(self_obj, threading.Thread): + if not hasattr(self_obj, "_pydev_run_patched"): + wrap_attr(self_obj, "run") + if (method_name in THREAD_METHODS) and (back_base not in DONT_TRACE_THREADING or \ + (method_name in INNER_METHODS and back_base in INNER_FILES)): + thread_id = get_thread_id(self_obj) + name = self_obj.getName() + real_method = frame.f_code.co_name + parent = None + if real_method == "_stop": + if back_base in INNER_FILES and \ + back.f_code.co_name == "_wait_for_tstate_lock": + back = back.f_back.f_back + real_method = "stop" + if hasattr(self_obj, "_pydev_join_called"): + parent = get_thread_id(t) + elif real_method == "join": + # join called in the current thread, not in self object + if not self_obj.is_alive(): + return + thread_id = get_thread_id(t) + name = t.getName() + self_obj._pydev_join_called = True + + if real_method == "start": + parent = get_thread_id(t) + send_message("threading_event", event_time, name, thread_id, "thread", + real_method, back.f_code.co_filename, back.f_lineno, back, parent=parent) + # print(event_time, self_obj.getName(), thread_id, "thread", + # real_method, back.f_code.co_filename, back.f_lineno) + + if method_name == "pydev_after_run_call": + if hasattr(frame, "f_back") and frame.f_back is not None: + back = frame.f_back + if hasattr(back, "f_back") and back.f_back is not None: + back = back.f_back + if "self" in back.f_locals: + if isinstance(back.f_locals["self"], threading.Thread): + my_self_obj = frame.f_back.f_back.f_locals["self"] + my_back = frame.f_back.f_back + my_thread_id = get_thread_id(my_self_obj) + send_massage = True + if IS_PY3K and hasattr(my_self_obj, "_pydev_join_called"): + send_massage = False + # we can't detect stop after join in Python 2 yet + if send_massage: + send_message("threading_event", event_time, "Thread", my_thread_id, "thread", + "stop", my_back.f_code.co_filename, my_back.f_lineno, my_back, parent=None) + + if self_obj.__class__ == ObjectWrapper: + if back_base in DONT_TRACE_THREADING: + # do not trace methods called from threading + return + back_back_base = pydevd_file_utils.get_abs_path_real_path_and_base_from_frame(back.f_back)[-1] + back = back.f_back + if back_back_base in DONT_TRACE_THREADING: + # back_back_base is the file, where the method was called froms + return + if method_name == "__init__": + send_message("threading_event", event_time, t.getName(), get_thread_id(t), "lock", + method_name, back.f_code.co_filename, back.f_lineno, back, lock_id=str(id(frame.f_locals["self"]))) + if "attr" in frame.f_locals and \ + (frame.f_locals["attr"] in LOCK_METHODS or + frame.f_locals["attr"] in QUEUE_METHODS): + real_method = frame.f_locals["attr"] + if method_name == "call_begin": + real_method += "_begin" + elif method_name == "call_end": + real_method += "_end" + else: + return + if real_method == "release_end": + # do not log release end. Maybe use it later + return + send_message("threading_event", event_time, t.getName(), get_thread_id(t), "lock", + real_method, back.f_code.co_filename, back.f_lineno, back, lock_id=str(id(self_obj))) + + if real_method in ("put_end", "get_end"): + # fake release for queue, cause we don't call it directly + send_message("threading_event", event_time, t.getName(), get_thread_id(t), "lock", + "release", back.f_code.co_filename, back.f_lineno, back, lock_id=str(id(self_obj))) + # print(event_time, t.getName(), get_thread_id(t), "lock", + # real_method, back.f_code.co_filename, back.f_lineno) + + + except Exception: + traceback.print_exc() + + +class NameManager: + def __init__(self, name_prefix): + self.tasks = {} + self.last = 0 + self.prefix = name_prefix + + def get(self, id): + if id not in self.tasks: + self.last += 1 + self.tasks[id] = self.prefix + "-" + str(self.last) + return self.tasks[id] + + +class AsyncioLogger: + def __init__(self): + self.task_mgr = NameManager("Task") + self.coro_mgr = NameManager("Coro") + self.start_time = cur_time() + + def get_task_id(self, frame): + while frame is not None: + if "self" in frame.f_locals: + self_obj = frame.f_locals["self"] + if isinstance(self_obj, asyncio.Task): + method_name = frame.f_code.co_name + if method_name == "_step": + return id(self_obj) + frame = frame.f_back + return None + + def log_event(self, frame): + event_time = cur_time() - self.start_time + + # Debug loop iterations + # if isinstance(self_obj, asyncio.base_events.BaseEventLoop): + # if method_name == "_run_once": + # print("Loop iteration") + + if not hasattr(frame, "f_back") or frame.f_back is None: + return + back = frame.f_back + + if "self" in frame.f_locals: + self_obj = frame.f_locals["self"] + if isinstance(self_obj, asyncio.Task): + method_name = frame.f_code.co_name + if method_name == "set_result": + task_id = id(self_obj) + task_name = self.task_mgr.get(str(task_id)) + send_message("asyncio_event", event_time, task_name, task_name, "thread", "stop", frame.f_code.co_filename, + frame.f_lineno, frame) + + method_name = back.f_code.co_name + if method_name == "__init__": + task_id = id(self_obj) + task_name = self.task_mgr.get(str(task_id)) + send_message("asyncio_event", event_time, task_name, task_name, "thread", "start", frame.f_code.co_filename, + frame.f_lineno, frame) + + method_name = frame.f_code.co_name + if isinstance(self_obj, asyncio.Lock): + if method_name in ("acquire", "release"): + task_id = self.get_task_id(frame) + task_name = self.task_mgr.get(str(task_id)) + + if method_name == "acquire": + if not self_obj._waiters and not self_obj.locked(): + send_message("asyncio_event", event_time, task_name, task_name, "lock", + method_name+"_begin", frame.f_code.co_filename, frame.f_lineno, frame, lock_id=str(id(self_obj))) + if self_obj.locked(): + method_name += "_begin" + else: + method_name += "_end" + elif method_name == "release": + method_name += "_end" + + send_message("asyncio_event", event_time, task_name, task_name, "lock", + method_name, frame.f_code.co_filename, frame.f_lineno, frame, lock_id=str(id(self_obj))) + + if isinstance(self_obj, asyncio.Queue): + if method_name in ("put", "get", "_put", "_get"): + task_id = self.get_task_id(frame) + task_name = self.task_mgr.get(str(task_id)) + + if method_name == "put": + send_message("asyncio_event", event_time, task_name, task_name, "lock", + "acquire_begin", frame.f_code.co_filename, frame.f_lineno, frame, lock_id=str(id(self_obj))) + elif method_name == "_put": + send_message("asyncio_event", event_time, task_name, task_name, "lock", + "acquire_end", frame.f_code.co_filename, frame.f_lineno, frame, lock_id=str(id(self_obj))) + send_message("asyncio_event", event_time, task_name, task_name, "lock", + "release", frame.f_code.co_filename, frame.f_lineno, frame, lock_id=str(id(self_obj))) + elif method_name == "get": + back = frame.f_back + if back.f_code.co_name != "send": + send_message("asyncio_event", event_time, task_name, task_name, "lock", + "acquire_begin", frame.f_code.co_filename, frame.f_lineno, frame, lock_id=str(id(self_obj))) + else: + send_message("asyncio_event", event_time, task_name, task_name, "lock", + "acquire_end", frame.f_code.co_filename, frame.f_lineno, frame, lock_id=str(id(self_obj))) + send_message("asyncio_event", event_time, task_name, task_name, "lock", + "release", frame.f_code.co_filename, frame.f_lineno, frame, lock_id=str(id(self_obj))) diff --git a/ptvsd/pydevd/pydevd_concurrency_analyser/pydevd_thread_wrappers.py b/ptvsd/pydevd/pydevd_concurrency_analyser/pydevd_thread_wrappers.py new file mode 100644 index 00000000..435e2879 --- /dev/null +++ b/ptvsd/pydevd/pydevd_concurrency_analyser/pydevd_thread_wrappers.py @@ -0,0 +1,82 @@ +from _pydev_imps._pydev_saved_modules import threading + + +def wrapper(fun): + def pydev_after_run_call(): + pass + + def inner(*args, **kwargs): + fun(*args, **kwargs) + pydev_after_run_call() + return inner + + +def wrap_attr(obj, attr): + t_save_start = getattr(obj, attr) + setattr(obj, attr, wrapper(t_save_start)) + obj._pydev_run_patched = True + + +class ObjectWrapper(object): + def __init__(self, obj): + self.wrapped_object = obj + try: + import functools + functools.update_wrapper(self, obj) + except: + pass + + def __getattr__(self, attr): + orig_attr = getattr(self.wrapped_object, attr) #.__getattribute__(attr) + if callable(orig_attr): + def patched_attr(*args, **kwargs): + self.call_begin(attr) + result = orig_attr(*args, **kwargs) + self.call_end(attr) + if result == self.wrapped_object: + return self + return result + return patched_attr + else: + return orig_attr + + def call_begin(self, attr): + pass + + def call_end(self, attr): + pass + + def __enter__(self): + self.call_begin("__enter__") + self.wrapped_object.__enter__() + self.call_end("__enter__") + + def __exit__(self, exc_type, exc_val, exc_tb): + self.call_begin("__exit__") + self.wrapped_object.__exit__(exc_type, exc_val, exc_tb) + + +def factory_wrapper(fun): + def inner(*args, **kwargs): + obj = fun(*args, **kwargs) + return ObjectWrapper(obj) + return inner + + +def wrap_threads(): + # TODO: add wrappers for thread and _thread + # import _thread as mod + # print("Thread imported") + # mod.start_new_thread = wrapper(mod.start_new_thread) + import threading + threading.Lock = factory_wrapper(threading.Lock) + threading.RLock = factory_wrapper(threading.RLock) + + # queue patching + try: + import queue # @UnresolvedImport + queue.Queue = factory_wrapper(queue.Queue) + except: + import Queue + Queue.Queue = factory_wrapper(Queue.Queue) + diff --git a/ptvsd/pydevd/pydevd_file_utils.py b/ptvsd/pydevd/pydevd_file_utils.py new file mode 100644 index 00000000..82d6f771 --- /dev/null +++ b/ptvsd/pydevd/pydevd_file_utils.py @@ -0,0 +1,421 @@ +r''' + This module provides utilities to get the absolute filenames so that we can be sure that: + - The case of a file will match the actual file in the filesystem (otherwise breakpoints won't be hit). + - Providing means for the user to make path conversions when doing a remote debugging session in + one machine and debugging in another. + + To do that, the PATHS_FROM_ECLIPSE_TO_PYTHON constant must be filled with the appropriate paths. + + @note: + in this context, the server is where your python process is running + and the client is where eclipse is running. + + E.g.: + If the server (your python process) has the structure + /user/projects/my_project/src/package/module1.py + + and the client has: + c:\my_project\src\package\module1.py + + the PATHS_FROM_ECLIPSE_TO_PYTHON would have to be: + PATHS_FROM_ECLIPSE_TO_PYTHON = [(r'c:\my_project\src', r'/user/projects/my_project/src')] + + alternatively, this can be set with an environment variable from the command line: + set PATHS_FROM_ECLIPSE_TO_PYTHON=[['c:\my_project\src','/user/projects/my_project/src']] + + @note: DEBUG_CLIENT_SERVER_TRANSLATION can be set to True to debug the result of those translations + + @note: the case of the paths is important! Note that this can be tricky to get right when one machine + uses a case-independent filesystem and the other uses a case-dependent filesystem (if the system being + debugged is case-independent, 'normcase()' should be used on the paths defined in PATHS_FROM_ECLIPSE_TO_PYTHON). + + @note: all the paths with breakpoints must be translated (otherwise they won't be found in the server) + + @note: to enable remote debugging in the target machine (pydev extensions in the eclipse installation) + import pydevd;pydevd.settrace(host, stdoutToServer, stderrToServer, port, suspend) + + see parameter docs on pydevd.py + + @note: for doing a remote debugging session, all the pydevd_ files must be on the server accessible + through the PYTHONPATH (and the PATHS_FROM_ECLIPSE_TO_PYTHON only needs to be set on the target + machine for the paths that'll actually have breakpoints). +''' + + + + +from _pydevd_bundle.pydevd_constants import IS_PY2, IS_PY3K +from _pydev_bundle._pydev_filesystem_encoding import getfilesystemencoding +import json +import os +import os.path +import sys +import traceback + +os_normcase = os.path.normcase +basename = os.path.basename +exists = os.path.exists +join = os.path.join + +try: + rPath = os.path.realpath #@UndefinedVariable +except: + # jython does not support os.path.realpath + # realpath is a no-op on systems without islink support + rPath = os.path.abspath + +#defined as a list of tuples where the 1st element of the tuple is the path in the client machine +#and the 2nd element is the path in the server machine. +#see module docstring for more details. +try: + PATHS_FROM_ECLIPSE_TO_PYTHON = json.loads(os.environ.get('PATHS_FROM_ECLIPSE_TO_PYTHON', '[]')) +except Exception: + sys.stderr.write('Error loading PATHS_FROM_ECLIPSE_TO_PYTHON from environment variable.\n') + traceback.print_exc() + PATHS_FROM_ECLIPSE_TO_PYTHON = [] +else: + if not isinstance(PATHS_FROM_ECLIPSE_TO_PYTHON, list): + sys.stderr.write('Expected PATHS_FROM_ECLIPSE_TO_PYTHON loaded from environment variable to be a list.\n') + PATHS_FROM_ECLIPSE_TO_PYTHON = [] + else: + # Converting json lists to tuple + PATHS_FROM_ECLIPSE_TO_PYTHON = [tuple(x) for x in PATHS_FROM_ECLIPSE_TO_PYTHON] + + +#example: +#PATHS_FROM_ECLIPSE_TO_PYTHON = [ +# (r'd:\temp\temp_workspace_2\test_python\src\yyy\yyy', +# r'd:\temp\temp_workspace_2\test_python\src\hhh\xxx') +#] + + +normcase = os_normcase # May be rebound on set_ide_os + +convert_to_long_pathname = None +if sys.platform == 'win32': + try: + import ctypes + except ImportError: + pass + else: + def convert_to_long_pathname(filename): + buf = ctypes.create_unicode_buffer(260) + GetLongPathName = ctypes.windll.kernel32.GetLongPathNameW + if IS_PY2: + filename = unicode(filename, getfilesystemencoding()) + rv = GetLongPathName(filename, buf, 260) + if rv != 0 and rv <= 260: + return buf.value + return filename + + +def norm_case(filename): + # `normcase` doesn't lower case on Python 2 for non-English locale, but Java side does it, + # so we should do it manually + if '~' in filename and convert_to_long_pathname: + filename = convert_to_long_pathname(filename) + + filename = os_normcase(filename) + enc = getfilesystemencoding() + if IS_PY3K or enc is None or enc.lower() == "utf-8": + return filename + try: + return filename.decode(enc).lower().encode(enc) + except: + return filename + + +def set_ide_os(os): + ''' + We need to set the IDE os because the host where the code is running may be + actually different from the client (and the point is that we want the proper + paths to translate from the client to the server). + ''' + global normcase + if os == 'UNIX': + normcase = lambda f:f #Change to no-op if the client side is on unix/mac. + else: + if sys.platform == 'win32': + normcase = norm_case + else: + normcase = os_normcase + + # After setting the ide OS, apply the normcase to the existing paths. + + # Note: not using enumerate nor list comprehension because it may not be available in older python versions... + i = 0 + for path in PATHS_FROM_ECLIPSE_TO_PYTHON[:]: + PATHS_FROM_ECLIPSE_TO_PYTHON[i] = (normcase(path[0]), normcase(path[1])) + i += 1 + + +DEBUG_CLIENT_SERVER_TRANSLATION = False + +#caches filled as requested during the debug session +NORM_PATHS_CONTAINER = {} +NORM_PATHS_AND_BASE_CONTAINER = {} +NORM_FILENAME_TO_SERVER_CONTAINER = {} +NORM_FILENAME_TO_CLIENT_CONTAINER = {} + + +def _NormFile(filename): + abs_path, real_path = _NormPaths(filename) + return real_path + + +def _AbsFile(filename): + abs_path, real_path = _NormPaths(filename) + return abs_path + + +# Returns tuple of absolute path and real path for given filename +def _NormPaths(filename): + try: + return NORM_PATHS_CONTAINER[filename] + except KeyError: + abs_path = _NormPath(filename, os.path.abspath) + real_path = _NormPath(filename, rPath) + + NORM_PATHS_CONTAINER[filename] = abs_path, real_path + return abs_path, real_path + + +def _NormPath(filename, normpath): + r = normpath(filename) + #cache it for fast access later + ind = r.find('.zip') + if ind == -1: + ind = r.find('.egg') + if ind != -1: + ind+=4 + zip_path = r[:ind] + if r[ind] == "!": + ind+=1 + inner_path = r[ind:] + if inner_path.startswith('/') or inner_path.startswith('\\'): + inner_path = inner_path[1:] + r = join(normcase(zip_path), inner_path) + else: + r = normcase(r) + return r + + +ZIP_SEARCH_CACHE = {} +def exists(file): + if os.path.exists(file): + return file + + ind = file.find('.zip') + if ind == -1: + ind = file.find('.egg') + + if ind != -1: + ind+=4 + zip_path = file[:ind] + if file[ind] == "!": + ind+=1 + inner_path = file[ind:] + try: + zip = ZIP_SEARCH_CACHE[zip_path] + except KeyError: + try: + import zipfile + zip = zipfile.ZipFile(zip_path, 'r') + ZIP_SEARCH_CACHE[zip_path] = zip + except : + return None + + try: + if inner_path.startswith('/') or inner_path.startswith('\\'): + inner_path = inner_path[1:] + + info = zip.getinfo(inner_path.replace('\\', '/')) + + return join(zip_path, inner_path) + except KeyError: + return None + return None + + +#Now, let's do a quick test to see if we're working with a version of python that has no problems +#related to the names generated... +try: + try: + code = rPath.func_code + except AttributeError: + code = rPath.__code__ + if not exists(_NormFile(code.co_filename)): + sys.stderr.write('-------------------------------------------------------------------------------\n') + sys.stderr.write('pydev debugger: CRITICAL WARNING: This version of python seems to be incorrectly compiled (internal generated filenames are not absolute)\n') + sys.stderr.write('pydev debugger: The debugger may still function, but it will work slower and may miss breakpoints.\n') + sys.stderr.write('pydev debugger: Related bug: http://bugs.python.org/issue1666807\n') + sys.stderr.write('-------------------------------------------------------------------------------\n') + sys.stderr.flush() + + NORM_SEARCH_CACHE = {} + + initial_norm_paths = _NormPaths + def _NormPaths(filename): #Let's redefine _NormPaths to work with paths that may be incorrect + try: + return NORM_SEARCH_CACHE[filename] + except KeyError: + abs_path, real_path = initial_norm_paths(filename) + if not exists(real_path): + #We must actually go on and check if we can find it as if it was a relative path for some of the paths in the pythonpath + for path in sys.path: + abs_path, real_path = initial_norm_paths(join(path, filename)) + if exists(real_path): + break + else: + sys.stderr.write('pydev debugger: Unable to find real location for: %s\n' % (filename,)) + abs_path = filename + real_path = filename + + NORM_SEARCH_CACHE[filename] = abs_path, real_path + return abs_path, real_path + +except: + #Don't fail if there's something not correct here -- but at least print it to the user so that we can correct that + traceback.print_exc() + +norm_file_to_client = _AbsFile +norm_file_to_server = _NormFile + +def setup_client_server_paths(paths): + '''paths is the same format as PATHS_FROM_ECLIPSE_TO_PYTHON''' + + global NORM_FILENAME_TO_SERVER_CONTAINER + global NORM_FILENAME_TO_CLIENT_CONTAINER + global PATHS_FROM_ECLIPSE_TO_PYTHON + global norm_file_to_client + global norm_file_to_server + + NORM_FILENAME_TO_SERVER_CONTAINER = {} + NORM_FILENAME_TO_CLIENT_CONTAINER = {} + PATHS_FROM_ECLIPSE_TO_PYTHON = paths[:] + + if not PATHS_FROM_ECLIPSE_TO_PYTHON: + #no translation step needed (just inline the calls) + norm_file_to_client = _AbsFile + norm_file_to_server = _NormFile + return + + #Work on the client and server slashes. + eclipse_sep = None + python_sep = None + for eclipse_prefix, server_prefix in PATHS_FROM_ECLIPSE_TO_PYTHON: + if eclipse_sep is not None and python_sep is not None: + break + + if eclipse_sep is None: + for c in eclipse_prefix: + if c in ('/', '\\'): + eclipse_sep = c + break + + if python_sep is None: + for c in server_prefix: + if c in ('/', '\\'): + python_sep = c + break + + #If they're the same or one of them cannot be determined, just make it all None. + if eclipse_sep == python_sep or eclipse_sep is None or python_sep is None: + eclipse_sep = python_sep = None + + + #only setup translation functions if absolutely needed! + def _norm_file_to_server(filename): + #Eclipse will send the passed filename to be translated to the python process + #So, this would be 'NormFileFromEclipseToPython' + try: + return NORM_FILENAME_TO_SERVER_CONTAINER[filename] + except KeyError: + #used to translate a path from the client to the debug server + translated = normcase(filename) + for eclipse_prefix, server_prefix in PATHS_FROM_ECLIPSE_TO_PYTHON: + if translated.startswith(eclipse_prefix): + if DEBUG_CLIENT_SERVER_TRANSLATION: + sys.stderr.write('pydev debugger: replacing to server: %s\n' % (translated,)) + translated = translated.replace(eclipse_prefix, server_prefix) + if DEBUG_CLIENT_SERVER_TRANSLATION: + sys.stderr.write('pydev debugger: sent to server: %s\n' % (translated,)) + break + else: + if DEBUG_CLIENT_SERVER_TRANSLATION: + sys.stderr.write('pydev debugger: to server: unable to find matching prefix for: %s in %s\n' % \ + (translated, [x[0] for x in PATHS_FROM_ECLIPSE_TO_PYTHON])) + + #Note that when going to the server, we do the replace first and only later do the norm file. + if eclipse_sep is not None: + translated = translated.replace(eclipse_sep, python_sep) + translated = _NormFile(translated) + + NORM_FILENAME_TO_SERVER_CONTAINER[filename] = translated + return translated + + def _norm_file_to_client(filename): + #The result of this method will be passed to eclipse + #So, this would be 'NormFileFromPythonToEclipse' + try: + return NORM_FILENAME_TO_CLIENT_CONTAINER[filename] + except KeyError: + #used to translate a path from the debug server to the client + translated = _NormFile(filename) + for eclipse_prefix, python_prefix in PATHS_FROM_ECLIPSE_TO_PYTHON: + if translated.startswith(python_prefix): + if DEBUG_CLIENT_SERVER_TRANSLATION: + sys.stderr.write('pydev debugger: replacing to client: %s\n' % (translated,)) + translated = translated.replace(python_prefix, eclipse_prefix) + if DEBUG_CLIENT_SERVER_TRANSLATION: + sys.stderr.write('pydev debugger: sent to client: %s\n' % (translated,)) + break + else: + if DEBUG_CLIENT_SERVER_TRANSLATION: + sys.stderr.write('pydev debugger: to client: unable to find matching prefix for: %s in %s\n' % \ + (translated, [x[1] for x in PATHS_FROM_ECLIPSE_TO_PYTHON])) + + if eclipse_sep is not None: + translated = translated.replace(python_sep, eclipse_sep) + + #The resulting path is not in the python process, so, we cannot do a _NormFile here, + #only at the beginning of this method. + NORM_FILENAME_TO_CLIENT_CONTAINER[filename] = translated + return translated + + norm_file_to_server = _norm_file_to_server + norm_file_to_client = _norm_file_to_client + +setup_client_server_paths(PATHS_FROM_ECLIPSE_TO_PYTHON) + +# For given file f returns tuple of its absolute path, real path and base name +def get_abs_path_real_path_and_base_from_file(f): + try: + return NORM_PATHS_AND_BASE_CONTAINER[f] + except: + abs_path, real_path = _NormPaths(f) + base = basename(real_path) + ret = abs_path, real_path, base + NORM_PATHS_AND_BASE_CONTAINER[f] = ret + return ret + + +def get_abs_path_real_path_and_base_from_frame(frame): + try: + return NORM_PATHS_AND_BASE_CONTAINER[frame.f_code.co_filename] + except: + #This one is just internal (so, does not need any kind of client-server translation) + f = frame.f_code.co_filename + if f is not None and f.startswith (('build/bdist.','build\\bdist.')): + # files from eggs in Python 2.7 have paths like build/bdist.linux-x86_64/egg/ + f = frame.f_globals['__file__'] + if f is not None: + if f.endswith('.pyc'): + f = f[:-1] + elif f.endswith('$py.class'): + f = f[:-len('$py.class')] + '.py' + + ret = get_abs_path_real_path_and_base_from_file(f) + # Also cache based on the frame.f_code.co_filename (if we had it inside build/bdist it can make a difference). + NORM_PATHS_AND_BASE_CONTAINER[frame.f_code.co_filename] = ret + return ret diff --git a/ptvsd/pydevd/pydevd_plugins/__init__.py b/ptvsd/pydevd/pydevd_plugins/__init__.py new file mode 100644 index 00000000..afff0c07 --- /dev/null +++ b/ptvsd/pydevd/pydevd_plugins/__init__.py @@ -0,0 +1,5 @@ +try: + __import__('pkg_resources').declare_namespace(__name__) +except ImportError: + import pkgutil + __path__ = pkgutil.extend_path(__path__, __name__) diff --git a/ptvsd/pydevd/pydevd_plugins/django_debug.py b/ptvsd/pydevd/pydevd_plugins/django_debug.py new file mode 100644 index 00000000..0df8fc63 --- /dev/null +++ b/ptvsd/pydevd/pydevd_plugins/django_debug.py @@ -0,0 +1,442 @@ +from _pydevd_bundle.pydevd_comm import CMD_SET_BREAK, CMD_ADD_EXCEPTION_BREAK +import inspect +from _pydevd_bundle.pydevd_constants import STATE_SUSPEND, get_thread_id, dict_iter_items, DJANGO_SUSPEND, IS_PY2 +from pydevd_file_utils import get_abs_path_real_path_and_base_from_file, normcase +from _pydevd_bundle.pydevd_breakpoints import LineBreakpoint, get_exception_name +from _pydevd_bundle import pydevd_vars +import traceback +from _pydev_bundle import pydev_log +from _pydevd_bundle.pydevd_frame_utils import add_exception_to_frame, FCode, just_raised + +IS_DJANGO18 = False +IS_DJANGO19 = False +IS_DJANGO19_OR_HIGHER = False +try: + import django + version = django.VERSION + IS_DJANGO18 = version[0] == 1 and version[1] == 8 + IS_DJANGO19 = version[0] == 1 and version[1] == 9 + IS_DJANGO19_OR_HIGHER = ((version[0] == 1 and version[1] >= 9) or version[0] > 1) +except: + pass + + +class DjangoLineBreakpoint(LineBreakpoint): + def __init__(self, file, line, condition, func_name, expression): + self.file = file + LineBreakpoint.__init__(self, line, condition, func_name, expression) + + def is_triggered(self, template_frame_file, template_frame_line): + return self.file == template_frame_file and self.line == template_frame_line + + def __str__(self): + return "DjangoLineBreakpoint: %s-%d" %(self.file, self.line) + + +def add_line_breakpoint(plugin, pydb, type, file, line, condition, expression, func_name): + if type == 'django-line': + breakpoint = DjangoLineBreakpoint(file, line, condition, func_name, expression) + if not hasattr(pydb, 'django_breakpoints'): + _init_plugin_breaks(pydb) + return breakpoint, pydb.django_breakpoints + return None + +def add_exception_breakpoint(plugin, pydb, type, exception): + if type == 'django': + if not hasattr(pydb, 'django_exception_break'): + _init_plugin_breaks(pydb) + pydb.django_exception_break[exception] = True + pydb.set_tracing_for_untraced_contexts_if_not_frame_eval() + return True + return False + +def _init_plugin_breaks(pydb): + pydb.django_exception_break = {} + pydb.django_breakpoints = {} + +def remove_exception_breakpoint(plugin, pydb, type, exception): + if type == 'django': + try: + del pydb.django_exception_break[exception] + return True + except: + pass + return False + +def get_breakpoints(plugin, pydb, type): + if type == 'django-line': + return pydb.django_breakpoints + return None + +def _inherits(cls, *names): + if cls.__name__ in names: + return True + inherits_node = False + for base in inspect.getmro(cls): + if base.__name__ in names: + inherits_node = True + break + return inherits_node + + +def _is_django_render_call(frame): + try: + name = frame.f_code.co_name + if name != 'render': + return False + + if 'self' not in frame.f_locals: + return False + + cls = frame.f_locals['self'].__class__ + + inherits_node = _inherits(cls, 'Node') + + if not inherits_node: + return False + + clsname = cls.__name__ + if IS_DJANGO19: + # in Django 1.9 we need to save the flag that there is included template + if clsname == 'IncludeNode': + if 'context' in frame.f_locals: + context = frame.f_locals['context'] + context._has_included_template = True + + return clsname != 'TextNode' and clsname != 'NodeList' + except: + traceback.print_exc() + return False + + +def _is_django_context_get_call(frame): + try: + if 'self' not in frame.f_locals: + return False + + cls = frame.f_locals['self'].__class__ + + return _inherits(cls, 'BaseContext') + except: + traceback.print_exc() + return False + + +def _is_django_resolve_call(frame): + try: + name = frame.f_code.co_name + if name != '_resolve_lookup': + return False + + if 'self' not in frame.f_locals: + return False + + cls = frame.f_locals['self'].__class__ + + clsname = cls.__name__ + return clsname == 'Variable' + except: + traceback.print_exc() + return False + + +def _is_django_suspended(thread): + return thread.additional_info.suspend_type == DJANGO_SUSPEND + + +def suspend_django(main_debugger, thread, frame, cmd=CMD_SET_BREAK): + frame = DjangoTemplateFrame(frame) + + if frame.f_lineno is None: + return None + + pydevd_vars.add_additional_frame_by_id(get_thread_id(thread), {id(frame): frame}) + + main_debugger.set_suspend(thread, cmd) + thread.additional_info.suspend_type = DJANGO_SUSPEND + + return frame + + +def _find_django_render_frame(frame): + while frame is not None and not _is_django_render_call(frame): + frame = frame.f_back + + return frame + +#======================================================================================================================= +# Django Frame +#======================================================================================================================= + +def _read_file(filename): + # type: (str) -> str + if IS_PY2: + f = open(filename, 'r') + else: + f = open(filename, 'r', encoding='utf-8', errors='replace') + s = f.read() + f.close() + return s + + +def _offset_to_line_number(text, offset): + curLine = 1 + curOffset = 0 + while curOffset < offset: + if curOffset == len(text): + return -1 + c = text[curOffset] + if c == '\n': + curLine += 1 + elif c == '\r': + curLine += 1 + if curOffset < len(text) and text[curOffset + 1] == '\n': + curOffset += 1 + + curOffset += 1 + + return curLine + + +def _get_source_django_18_or_lower(frame): + # This method is usable only for the Django <= 1.8 + try: + node = frame.f_locals['self'] + if hasattr(node, 'source'): + return node.source + else: + if IS_DJANGO18: + # The debug setting was changed since Django 1.8 + pydev_log.error_once("WARNING: Template path is not available. Set the 'debug' option in the OPTIONS of a DjangoTemplates " + "backend.") + else: + # The debug setting for Django < 1.8 + pydev_log.error_once("WARNING: Template path is not available. Please set TEMPLATE_DEBUG=True in your settings.py to make " + "django template breakpoints working") + return None + + except: + pydev_log.debug(traceback.format_exc()) + return None + + +def _get_template_file_name(frame): + try: + if IS_DJANGO19: + # The Node source was removed since Django 1.9 + if 'context' in frame.f_locals: + context = frame.f_locals['context'] + if hasattr(context, '_has_included_template'): + # if there was included template we need to inspect the previous frames and find its name + back = frame.f_back + while back is not None and frame.f_code.co_name in ('render', '_render'): + locals = back.f_locals + if 'self' in locals: + self = locals['self'] + if self.__class__.__name__ == 'Template' and hasattr(self, 'origin') and \ + hasattr(self.origin, 'name'): + return normcase(self.origin.name) + back = back.f_back + else: + if hasattr(context, 'template') and hasattr(context.template, 'origin') and \ + hasattr(context.template.origin, 'name'): + return normcase(context.template.origin.name) + return None + elif IS_DJANGO19_OR_HIGHER: + # For Django 1.10 and later there is much simpler way to get template name + if 'self' in frame.f_locals: + self = frame.f_locals['self'] + if hasattr(self, 'origin') and hasattr(self.origin, 'name'): + return normcase(self.origin.name) + return None + + source = _get_source_django_18_or_lower(frame) + if source is None: + pydev_log.debug("Source is None\n") + return None + fname = source[0].name + + if fname == '': + pydev_log.debug("Source name is %s\n" % fname) + return None + else: + abs_path_real_path_and_base = get_abs_path_real_path_and_base_from_file(fname) + return abs_path_real_path_and_base[1] + except: + pydev_log.debug(traceback.format_exc()) + return None + + +def _get_template_line(frame): + if IS_DJANGO19_OR_HIGHER: + # The Node source was removed since Django 1.9 + self = frame.f_locals['self'] + if hasattr(self, 'token') and hasattr(self.token, 'lineno'): + return self.token.lineno + else: + return None + source = _get_source_django_18_or_lower(frame) + file_name = _get_template_file_name(frame) + try: + return _offset_to_line_number(_read_file(file_name), source[1][0]) + except: + return None + + +class DjangoTemplateFrame: + def __init__(self, frame): + file_name = _get_template_file_name(frame) + self.back_context = frame.f_locals['context'] + self.f_code = FCode('Django Template', file_name) + self.f_lineno = _get_template_line(frame) + self.f_back = frame + self.f_globals = {} + self.f_locals = self.collect_context(self.back_context) + self.f_trace = None + + def collect_context(self, context): + res = {} + try: + for d in context.dicts: + for k, v in d.items(): + res[k] = v + except AttributeError: + pass + return res + + def _change_variable(self, name, value): + for d in self.back_context.dicts: + for k, v in d.items(): + if k == name: + d[k] = value + + +def change_variable(plugin, frame, attr, expression): + if isinstance(frame, DjangoTemplateFrame): + result = eval(expression, frame.f_globals, frame.f_locals) + frame._change_variable(attr, result) + return result + return False + + +def _is_django_exception_break_context(frame): + try: + name = frame.f_code.co_name + except: + name = None + return name in ['_resolve_lookup', 'find_template'] + + +#======================================================================================================================= +# Django Step Commands +#======================================================================================================================= + +def can_not_skip(plugin, main_debugger, pydb_frame, frame): + return main_debugger.django_breakpoints and _is_django_render_call(frame) + + +def has_exception_breaks(plugin): + if len(plugin.main_debugger.django_exception_break) > 0: + return True + return False + + +def has_line_breaks(plugin): + for file, breakpoints in dict_iter_items(plugin.main_debugger.django_breakpoints): + if len(breakpoints) > 0: + return True + return False + + +def cmd_step_into(plugin, main_debugger, frame, event, args, stop_info, stop): + info = args[2] + thread = args[3] + plugin_stop = False + if _is_django_suspended(thread): + stop_info['django_stop'] = event == 'call' and _is_django_render_call(frame) + plugin_stop = stop_info['django_stop'] + stop = stop and _is_django_resolve_call(frame.f_back) and not _is_django_context_get_call(frame) + if stop: + info.pydev_django_resolve_frame = True # we remember that we've go into python code from django rendering frame + return stop, plugin_stop + + +def cmd_step_over(plugin, main_debugger, frame, event, args, stop_info, stop): + info = args[2] + thread = args[3] + plugin_stop = False + if _is_django_suspended(thread): + stop_info['django_stop'] = event == 'call' and _is_django_render_call(frame) + plugin_stop = stop_info['django_stop'] + stop = False + return stop, plugin_stop + else: + if event == 'return' and info.pydev_django_resolve_frame and _is_django_resolve_call(frame.f_back): + #we return to Django suspend mode and should not stop before django rendering frame + info.pydev_step_stop = frame.f_back + info.pydev_django_resolve_frame = False + thread.additional_info.suspend_type = DJANGO_SUSPEND + stop = info.pydev_step_stop is frame and event in ('line', 'return') + return stop, plugin_stop + + +def stop(plugin, main_debugger, frame, event, args, stop_info, arg, step_cmd): + main_debugger = args[0] + thread = args[3] + if 'django_stop' in stop_info and stop_info['django_stop']: + frame = suspend_django(main_debugger, thread, frame, step_cmd) + if frame: + main_debugger.do_wait_suspend(thread, frame, event, arg) + return True + return False + + +def get_breakpoint(plugin, main_debugger, pydb_frame, frame, event, args): + main_debugger = args[0] + filename = args[1] + info = args[2] + flag = False + django_breakpoint = None + new_frame = None + type = 'django' + + if event == 'call' and info.pydev_state != STATE_SUSPEND and \ + main_debugger.django_breakpoints and _is_django_render_call(frame): + filename = _get_template_file_name(frame) + pydev_log.debug("Django is rendering a template: %s\n" % filename) + django_breakpoints_for_file = main_debugger.django_breakpoints.get(filename) + if django_breakpoints_for_file: + pydev_log.debug("Breakpoints for that file: %s\n" % django_breakpoints_for_file) + template_line = _get_template_line(frame) + pydev_log.debug("Tracing template line: %s\n" % str(template_line)) + + if template_line in django_breakpoints_for_file: + django_breakpoint = django_breakpoints_for_file[template_line] + flag = True + new_frame = DjangoTemplateFrame(frame) + return flag, django_breakpoint, new_frame, type + + +def suspend(plugin, main_debugger, thread, frame, bp_type): + if bp_type == 'django': + return suspend_django(main_debugger, thread, frame) + return None + +def exception_break(plugin, main_debugger, pydb_frame, frame, args, arg): + main_debugger = args[0] + thread = args[3] + exception, value, trace = arg + if main_debugger.django_exception_break and \ + get_exception_name(exception) in ['VariableDoesNotExist', 'TemplateDoesNotExist', 'TemplateSyntaxError'] and \ + just_raised(trace) and _is_django_exception_break_context(frame): + render_frame = _find_django_render_frame(frame) + if render_frame: + suspend_frame = suspend_django(main_debugger, thread, render_frame, CMD_ADD_EXCEPTION_BREAK) + if suspend_frame: + add_exception_to_frame(suspend_frame, (exception, value, trace)) + flag = True + thread.additional_info.pydev_message = 'VariableDoesNotExist' + suspend_frame.f_back = frame + frame = suspend_frame + return (flag, frame) + return None \ No newline at end of file diff --git a/ptvsd/pydevd/pydevd_plugins/extensions/README.md b/ptvsd/pydevd/pydevd_plugins/extensions/README.md new file mode 100644 index 00000000..030e303e --- /dev/null +++ b/ptvsd/pydevd/pydevd_plugins/extensions/README.md @@ -0,0 +1,30 @@ +Extensions allow extending the debugger without modifying the debugger code. This is implemented with explicit namespace +packages. + +To implement your own extension: + +1. Ensure that the root folder of your extension is in sys.path (add it to PYTHONPATH) +2. Ensure that your module follows the directory structure below +3. The ``__init__.py`` files inside the pydevd_plugin and extension folder must contain the preamble below, +and nothing else. +Preamble: +```python +try: + __import__('pkg_resources').declare_namespace(__name__) +except ImportError: + import pkgutil + __path__ = pkgutil.extend_path(__path__, __name__) +``` +4. Your plugin name inside the extensions folder must start with `"pydevd_plugin"` +5. Implement one or more of the abstract base classes defined in `_pydevd_bundle.pydevd_extension_api`. This can be done +by either inheriting from them or registering with the abstract base class. + +* Directory structure: +``` +|-- root_directory-> must be on python path +| |-- pydevd_plugins +| | |-- __init__.py -> must contain preamble +| | |-- extensions +| | | |-- __init__.py -> must contain preamble +| | | |-- pydevd_plugin_plugin_name.py +``` \ No newline at end of file diff --git a/ptvsd/pydevd/pydevd_plugins/extensions/__init__.py b/ptvsd/pydevd/pydevd_plugins/extensions/__init__.py new file mode 100644 index 00000000..afff0c07 --- /dev/null +++ b/ptvsd/pydevd/pydevd_plugins/extensions/__init__.py @@ -0,0 +1,5 @@ +try: + __import__('pkg_resources').declare_namespace(__name__) +except ImportError: + import pkgutil + __path__ = pkgutil.extend_path(__path__, __name__) diff --git a/ptvsd/pydevd/pydevd_plugins/extensions/types/__init__.py b/ptvsd/pydevd/pydevd_plugins/extensions/types/__init__.py new file mode 100644 index 00000000..afff0c07 --- /dev/null +++ b/ptvsd/pydevd/pydevd_plugins/extensions/types/__init__.py @@ -0,0 +1,5 @@ +try: + __import__('pkg_resources').declare_namespace(__name__) +except ImportError: + import pkgutil + __path__ = pkgutil.extend_path(__path__, __name__) diff --git a/ptvsd/pydevd/pydevd_plugins/extensions/types/pydevd_helpers.py b/ptvsd/pydevd/pydevd_plugins/extensions/types/pydevd_helpers.py new file mode 100644 index 00000000..7c5a4fee --- /dev/null +++ b/ptvsd/pydevd/pydevd_plugins/extensions/types/pydevd_helpers.py @@ -0,0 +1,26 @@ +import sys + + +def find_cached_module(mod_name): + return sys.modules.get(mod_name, None) + +def find_mod_attr(mod_name, attr): + mod = find_cached_module(mod_name) + if mod is None: + return None + return getattr(mod, attr, None) + + +def find_class_name(val): + class_name = str(val.__class__) + if class_name.find('.') != -1: + class_name = class_name.split('.')[-1] + + elif class_name.find("'") != -1: #does not have '.' (could be something like ) + class_name = class_name[class_name.index("'") + 1:] + + if class_name.endswith("'>"): + class_name = class_name[:-2] + + return class_name + diff --git a/ptvsd/pydevd/pydevd_plugins/extensions/types/pydevd_plugin_numpy_types.py b/ptvsd/pydevd/pydevd_plugins/extensions/types/pydevd_plugin_numpy_types.py new file mode 100644 index 00000000..0a4dc8e9 --- /dev/null +++ b/ptvsd/pydevd/pydevd_plugins/extensions/types/pydevd_plugin_numpy_types.py @@ -0,0 +1,87 @@ +from _pydevd_bundle.pydevd_extension_api import TypeResolveProvider +from _pydevd_bundle.pydevd_resolver import defaultResolver, MAX_ITEMS_TO_HANDLE, TOO_LARGE_ATTR, TOO_LARGE_MSG +from .pydevd_helpers import find_mod_attr + + +# ======================================================================================================================= +# NdArrayResolver +# ======================================================================================================================= +class NdArrayResolver: pass + + +class NdArrayItemsContainer: pass + + +class NDArrayTypeResolveProvider(object): + def can_provide(self, type_object, type_name): + nd_array = find_mod_attr('numpy', 'ndarray') + return nd_array is not None and issubclass(type_object, nd_array) + + ''' + This resolves a numpy ndarray returning some metadata about the NDArray + ''' + + def is_numeric(self, obj): + if not hasattr(obj, 'dtype'): + return False + return obj.dtype.kind in 'biufc' + + def resolve(self, obj, attribute): + if attribute == '__internals__': + return defaultResolver.get_dictionary(obj) + if attribute == 'min': + if self.is_numeric(obj) and obj.size > 0: + return obj.min() + else: + return None + if attribute == 'max': + if self.is_numeric(obj) and obj.size > 0: + return obj.max() + else: + return None + if attribute == 'shape': + return obj.shape + if attribute == 'dtype': + return obj.dtype + if attribute == 'size': + return obj.size + if attribute.startswith('['): + container = NdArrayItemsContainer() + i = 0 + format_str = '%0' + str(int(len(str(len(obj))))) + 'd' + for item in obj: + setattr(container, format_str % i, item) + i += 1 + if i > MAX_ITEMS_TO_HANDLE: + setattr(container, TOO_LARGE_ATTR, TOO_LARGE_MSG) + break + return container + return None + + def get_dictionary(self, obj): + ret = dict() + ret['__internals__'] = defaultResolver.get_dictionary(obj) + if obj.size > 1024 * 1024: + ret['min'] = 'ndarray too big, calculating min would slow down debugging' + ret['max'] = 'ndarray too big, calculating max would slow down debugging' + elif obj.size == 0: + ret['min'] = 'array is empty' + ret['max'] = 'array is empty' + else: + if self.is_numeric(obj): + ret['min'] = obj.min() + ret['max'] = obj.max() + else: + ret['min'] = 'not a numeric object' + ret['max'] = 'not a numeric object' + ret['shape'] = obj.shape + ret['dtype'] = obj.dtype + ret['size'] = obj.size + ret['[0:%s] ' % (len(obj))] = list(obj[0:MAX_ITEMS_TO_HANDLE]) + return ret + + +import sys + +if not sys.platform.startswith("java"): + TypeResolveProvider.register(NDArrayTypeResolveProvider) diff --git a/ptvsd/pydevd/pydevd_plugins/extensions/types/pydevd_plugins_django_form_str.py b/ptvsd/pydevd/pydevd_plugins/extensions/types/pydevd_plugins_django_form_str.py new file mode 100644 index 00000000..8d64095e --- /dev/null +++ b/ptvsd/pydevd/pydevd_plugins/extensions/types/pydevd_plugins_django_form_str.py @@ -0,0 +1,16 @@ +from _pydevd_bundle.pydevd_extension_api import StrPresentationProvider +from .pydevd_helpers import find_mod_attr, find_class_name + + +class DjangoFormStr(object): + def can_provide(self, type_object, type_name): + form_class = find_mod_attr('django.forms', 'Form') + return form_class is not None and issubclass(type_object, form_class) + + def get_str(self, val): + return '%s: %r' % (find_class_name(val), val) + +import sys + +if not sys.platform.startswith("java"): + StrPresentationProvider.register(DjangoFormStr) diff --git a/ptvsd/pydevd/pydevd_plugins/jinja2_debug.py b/ptvsd/pydevd/pydevd_plugins/jinja2_debug.py new file mode 100644 index 00000000..c4fa8915 --- /dev/null +++ b/ptvsd/pydevd/pydevd_plugins/jinja2_debug.py @@ -0,0 +1,385 @@ +import traceback +from _pydevd_bundle.pydevd_breakpoints import LineBreakpoint, get_exception_name +from _pydevd_bundle.pydevd_constants import get_thread_id, STATE_SUSPEND, dict_iter_items, dict_keys, JINJA2_SUSPEND +from _pydevd_bundle.pydevd_comm import CMD_SET_BREAK, CMD_ADD_EXCEPTION_BREAK +from _pydevd_bundle import pydevd_vars +from pydevd_file_utils import get_abs_path_real_path_and_base_from_file +from _pydevd_bundle.pydevd_frame_utils import add_exception_to_frame, FCode + +class Jinja2LineBreakpoint(LineBreakpoint): + + def __init__(self, file, line, condition, func_name, expression): + self.file = file + LineBreakpoint.__init__(self, line, condition, func_name, expression) + + def is_triggered(self, template_frame_file, template_frame_line): + return self.file == template_frame_file and self.line == template_frame_line + + def __str__(self): + return "Jinja2LineBreakpoint: %s-%d" %(self.file, self.line) + + +def add_line_breakpoint(plugin, pydb, type, file, line, condition, expression, func_name): + result = None + if type == 'jinja2-line': + breakpoint = Jinja2LineBreakpoint(file, line, condition, func_name, expression) + if not hasattr(pydb, 'jinja2_breakpoints'): + _init_plugin_breaks(pydb) + result = breakpoint, pydb.jinja2_breakpoints + return result + return result + +def add_exception_breakpoint(plugin, pydb, type, exception): + if type == 'jinja2': + if not hasattr(pydb, 'jinja2_exception_break'): + _init_plugin_breaks(pydb) + pydb.jinja2_exception_break[exception] = True + pydb.set_tracing_for_untraced_contexts_if_not_frame_eval() + return True + return False + +def _init_plugin_breaks(pydb): + pydb.jinja2_exception_break = {} + pydb.jinja2_breakpoints = {} + +def remove_exception_breakpoint(plugin, pydb, type, exception): + if type == 'jinja2': + try: + del pydb.jinja2_exception_break[exception] + return True + except: + pass + return False + +def get_breakpoints(plugin, pydb, type): + if type == 'jinja2-line': + return pydb.jinja2_breakpoints + return None + + +def _is_jinja2_render_call(frame): + try: + name = frame.f_code.co_name + if "__jinja_template__" in frame.f_globals and name in ("root", "loop", "macro") or name.startswith("block_"): + return True + return False + except: + traceback.print_exc() + return False + + +def _suspend_jinja2(pydb, thread, frame, cmd=CMD_SET_BREAK, message=None): + frame = Jinja2TemplateFrame(frame) + + if frame.f_lineno is None: + return None + + pydevd_vars.add_additional_frame_by_id(get_thread_id(thread), {id(frame): frame}) + pydb.set_suspend(thread, cmd) + + thread.additional_info.suspend_type = JINJA2_SUSPEND + if cmd == CMD_ADD_EXCEPTION_BREAK: + # send exception name as message + if message: + message = str(message) + thread.additional_info.pydev_message = message + + return frame + +def _is_jinja2_suspended(thread): + return thread.additional_info.suspend_type == JINJA2_SUSPEND + +def _is_jinja2_context_call(frame): + return "_Context__obj" in frame.f_locals + +def _is_jinja2_internal_function(frame): + return 'self' in frame.f_locals and frame.f_locals['self'].__class__.__name__ in \ + ('LoopContext', 'TemplateReference', 'Macro', 'BlockReference') + +def _find_jinja2_render_frame(frame): + while frame is not None and not _is_jinja2_render_call(frame): + frame = frame.f_back + + return frame + + +#======================================================================================================================= +# Jinja2 Frame +#======================================================================================================================= + +class Jinja2TemplateFrame: + + def __init__(self, frame): + file_name = _get_jinja2_template_filename(frame) + self.back_context = None + if 'context' in frame.f_locals: + #sometimes we don't have 'context', e.g. in macros + self.back_context = frame.f_locals['context'] + self.f_code = FCode('template', file_name) + self.f_lineno = _get_jinja2_template_line(frame) + self.f_back = frame + self.f_globals = {} + self.f_locals = self.collect_context(frame) + self.f_trace = None + + def _get_real_var_name(self, orig_name): + # replace leading number for local variables + parts = orig_name.split('_') + if len(parts) > 1 and parts[0].isdigit(): + return parts[1] + return orig_name + + def collect_context(self, frame): + res = {} + for k, v in frame.f_locals.items(): + if not k.startswith('l_'): + res[k] = v + elif v and not _is_missing(v): + res[self._get_real_var_name(k[2:])] = v + if self.back_context is not None: + for k, v in self.back_context.items(): + res[k] = v + return res + + def _change_variable(self, frame, name, value): + in_vars_or_parents = False + if 'context' in frame.f_locals: + if name in frame.f_locals['context'].parent: + self.back_context.parent[name] = value + in_vars_or_parents = True + if name in frame.f_locals['context'].vars: + self.back_context.vars[name] = value + in_vars_or_parents = True + + l_name = 'l_' + name + if l_name in frame.f_locals: + if in_vars_or_parents: + frame.f_locals[l_name] = self.back_context.resolve(name) + else: + frame.f_locals[l_name] = value + + +def change_variable(plugin, frame, attr, expression): + if isinstance(frame, Jinja2TemplateFrame): + result = eval(expression, frame.f_globals, frame.f_locals) + frame._change_variable(frame.f_back, attr, result) + return result + return False + + +def _is_missing(item): + if item.__class__.__name__ == 'MissingType': + return True + return False + +def _find_render_function_frame(frame): + #in order to hide internal rendering functions + old_frame = frame + try: + while not ('self' in frame.f_locals and frame.f_locals['self'].__class__.__name__ == 'Template' and \ + frame.f_code.co_name == 'render'): + frame = frame.f_back + if frame is None: + return old_frame + return frame + except: + return old_frame + +def _get_jinja2_template_line(frame): + debug_info = None + if '__jinja_template__' in frame.f_globals: + _debug_info = frame.f_globals['__jinja_template__']._debug_info + if _debug_info != '': + #sometimes template contains only plain text + debug_info = frame.f_globals['__jinja_template__'].debug_info + + if debug_info is None: + return None + + lineno = frame.f_lineno + + for pair in debug_info: + if pair[1] == lineno: + return pair[0] + + return None + +def _get_jinja2_template_filename(frame): + if '__jinja_template__' in frame.f_globals: + fname = frame.f_globals['__jinja_template__'].filename + abs_path_real_path_and_base = get_abs_path_real_path_and_base_from_file(fname) + return abs_path_real_path_and_base[1] + return None + + +#======================================================================================================================= +# Jinja2 Step Commands +#======================================================================================================================= + + +def has_exception_breaks(plugin): + if len(plugin.main_debugger.jinja2_exception_break) > 0: + return True + return False + +def has_line_breaks(plugin): + for file, breakpoints in dict_iter_items(plugin.main_debugger.jinja2_breakpoints): + if len(breakpoints) > 0: + return True + return False + +def can_not_skip(plugin, pydb, pydb_frame, frame): + if pydb.jinja2_breakpoints and _is_jinja2_render_call(frame): + filename = _get_jinja2_template_filename(frame) + jinja2_breakpoints_for_file = pydb.jinja2_breakpoints.get(filename) + if jinja2_breakpoints_for_file: + return True + return False + + +def cmd_step_into(plugin, pydb, frame, event, args, stop_info, stop): + info = args[2] + thread = args[3] + plugin_stop = False + stop_info['jinja2_stop'] = False + if _is_jinja2_suspended(thread): + stop_info['jinja2_stop'] = event in ('call', 'line') and _is_jinja2_render_call(frame) + plugin_stop = stop_info['jinja2_stop'] + stop = False + if info.pydev_call_from_jinja2 is not None: + if _is_jinja2_internal_function(frame): + #if internal Jinja2 function was called, we sould continue debugging inside template + info.pydev_call_from_jinja2 = None + else: + #we go into python code from Jinja2 rendering frame + stop = True + + if event == 'call' and _is_jinja2_context_call(frame.f_back): + #we called function from context, the next step will be in function + info.pydev_call_from_jinja2 = 1 + + if event == 'return' and _is_jinja2_context_call(frame.f_back): + #we return from python code to Jinja2 rendering frame + info.pydev_step_stop = info.pydev_call_from_jinja2 + info.pydev_call_from_jinja2 = None + thread.additional_info.suspend_type = JINJA2_SUSPEND + stop = False + + #print "info.pydev_call_from_jinja2", info.pydev_call_from_jinja2, "stop_info", stop_info, \ + # "thread.additional_info.suspend_type", thread.additional_info.suspend_type + #print "event", event, "farme.locals", frame.f_locals + return stop, plugin_stop + + +def cmd_step_over(plugin, pydb, frame, event, args, stop_info, stop): + info = args[2] + thread = args[3] + plugin_stop = False + stop_info['jinja2_stop'] = False + if _is_jinja2_suspended(thread): + stop = False + + if info.pydev_call_inside_jinja2 is None: + if _is_jinja2_render_call(frame): + if event == 'call': + info.pydev_call_inside_jinja2 = frame.f_back + if event in ('line', 'return'): + info.pydev_call_inside_jinja2 = frame + else: + if event == 'line': + if _is_jinja2_render_call(frame) and info.pydev_call_inside_jinja2 is frame: + stop_info['jinja2_stop'] = True + plugin_stop = stop_info['jinja2_stop'] + if event == 'return': + if frame is info.pydev_call_inside_jinja2 and 'event' not in frame.f_back.f_locals: + info.pydev_call_inside_jinja2 = _find_jinja2_render_frame(frame.f_back) + return stop, plugin_stop + else: + if event == 'return' and _is_jinja2_context_call(frame.f_back): + #we return from python code to Jinja2 rendering frame + info.pydev_call_from_jinja2 = None + info.pydev_call_inside_jinja2 = _find_jinja2_render_frame(frame) + thread.additional_info.suspend_type = JINJA2_SUSPEND + stop = False + return stop, plugin_stop + #print "info.pydev_call_from_jinja2", info.pydev_call_from_jinja2, "stop", stop, "jinja_stop", jinja2_stop, \ + # "thread.additional_info.suspend_type", thread.additional_info.suspend_type + #print "event", event, "info.pydev_call_inside_jinja2", info.pydev_call_inside_jinja2 + #print "frame", frame, "frame.f_back", frame.f_back, "step_stop", info.pydev_step_stop + #print "is_context_call", _is_jinja2_context_call(frame) + #print "render", _is_jinja2_render_call(frame) + #print "-------------" + return stop, plugin_stop + + +def stop(plugin, pydb, frame, event, args, stop_info, arg, step_cmd): + pydb = args[0] + thread = args[3] + if 'jinja2_stop' in stop_info and stop_info['jinja2_stop']: + frame = _suspend_jinja2(pydb, thread, frame, step_cmd) + if frame: + pydb.do_wait_suspend(thread, frame, event, arg) + return True + return False + + +def get_breakpoint(plugin, pydb, pydb_frame, frame, event, args): + pydb= args[0] + filename = args[1] + info = args[2] + new_frame = None + jinja2_breakpoint = None + flag = False + type = 'jinja2' + if event == 'line' and info.pydev_state != STATE_SUSPEND and \ + pydb.jinja2_breakpoints and _is_jinja2_render_call(frame): + filename = _get_jinja2_template_filename(frame) + jinja2_breakpoints_for_file = pydb.jinja2_breakpoints.get(filename) + new_frame = Jinja2TemplateFrame(frame) + + if jinja2_breakpoints_for_file: + lineno = frame.f_lineno + template_lineno = _get_jinja2_template_line(frame) + if template_lineno is not None and template_lineno in jinja2_breakpoints_for_file: + jinja2_breakpoint = jinja2_breakpoints_for_file[template_lineno] + flag = True + new_frame = Jinja2TemplateFrame(frame) + + return flag, jinja2_breakpoint, new_frame, type + + +def suspend(plugin, pydb, thread, frame, bp_type): + if bp_type == 'jinja2': + return _suspend_jinja2(pydb, thread, frame) + return None + + +def exception_break(plugin, pydb, pydb_frame, frame, args, arg): + pydb = args[0] + thread = args[3] + exception, value, trace = arg + if pydb.jinja2_exception_break: + exception_type = dict_keys(pydb.jinja2_exception_break)[0] + if get_exception_name(exception) in ('UndefinedError', 'TemplateNotFound', 'TemplatesNotFound'): + #errors in rendering + render_frame = _find_jinja2_render_frame(frame) + if render_frame: + suspend_frame = _suspend_jinja2(pydb, thread, render_frame, CMD_ADD_EXCEPTION_BREAK, message=exception_type) + if suspend_frame: + add_exception_to_frame(suspend_frame, (exception, value, trace)) + flag = True + suspend_frame.f_back = frame + frame = suspend_frame + return flag, frame + elif get_exception_name(exception) in ('TemplateSyntaxError', 'TemplateAssertionError'): + #errors in compile time + name = frame.f_code.co_name + if name in ('template', 'top-level template code', '') or name.startswith('block '): + #Jinja2 translates exception info and creates fake frame on his own + pydb_frame.set_suspend(thread, CMD_ADD_EXCEPTION_BREAK) + add_exception_to_frame(frame, (exception, value, trace)) + thread.additional_info.suspend_type = JINJA2_SUSPEND + thread.additional_info.pydev_message = str(exception_type) + flag = True + return flag, frame + return None \ No newline at end of file diff --git a/ptvsd/pydevd/pydevd_tracing.py b/ptvsd/pydevd/pydevd_tracing.py new file mode 100644 index 00000000..94d4c8eb --- /dev/null +++ b/ptvsd/pydevd/pydevd_tracing.py @@ -0,0 +1,128 @@ +from _pydevd_bundle.pydevd_constants import get_frame +from _pydev_imps._pydev_saved_modules import thread, threading + +try: + import cStringIO as StringIO #may not always be available @UnusedImport +except: + try: + import StringIO #@Reimport + except: + import io as StringIO + + +import sys #@Reimport +import traceback + +_original_settrace = sys.settrace + +class TracingFunctionHolder: + '''This class exists just to keep some variables (so that we don't keep them in the global namespace). + ''' + _original_tracing = None + _warn = True + _lock = thread.allocate_lock() + _traceback_limit = 1 + _warnings_shown = {} + + +def get_exception_traceback_str(): + exc_info = sys.exc_info() + s = StringIO.StringIO() + traceback.print_exception(exc_info[0], exc_info[1], exc_info[2], file=s) + return s.getvalue() + +def _get_stack_str(frame): + + msg = '\nIf this is needed, please check: ' + \ + '\nhttp://pydev.blogspot.com/2007/06/why-cant-pydev-debugger-work-with.html' + \ + '\nto see how to restore the debug tracing back correctly.\n' + + if TracingFunctionHolder._traceback_limit: + s = StringIO.StringIO() + s.write('Call Location:\n') + traceback.print_stack(f=frame, limit=TracingFunctionHolder._traceback_limit, file=s) + msg = msg + s.getvalue() + + return msg + +def _internal_set_trace(tracing_func): + if TracingFunctionHolder._warn: + frame = get_frame() + if frame is not None and frame.f_back is not None: + if not frame.f_back.f_code.co_filename.lower().endswith('threading.py'): + + message = \ + '\nPYDEV DEBUGGER WARNING:' + \ + '\nsys.settrace() should not be used when the debugger is being used.' + \ + '\nThis may cause the debugger to stop working correctly.' + \ + '%s' % _get_stack_str(frame.f_back) + + if message not in TracingFunctionHolder._warnings_shown: + #only warn about each message once... + TracingFunctionHolder._warnings_shown[message] = 1 + sys.stderr.write('%s\n' % (message,)) + sys.stderr.flush() + + if TracingFunctionHolder._original_tracing: + TracingFunctionHolder._original_tracing(tracing_func) + + +def SetTrace(tracing_func, frame_eval_func=None, dummy_tracing_func=None): + if tracing_func is not None and frame_eval_func is not None: + # There is no need to set tracing function if frame evaluation is available + frame_eval_func() + tracing_func = dummy_tracing_func + + if TracingFunctionHolder._original_tracing is None: + #This may happen before replace_sys_set_trace_func is called. + sys.settrace(tracing_func) + return + + TracingFunctionHolder._lock.acquire() + try: + TracingFunctionHolder._warn = False + _internal_set_trace(tracing_func) + TracingFunctionHolder._warn = True + finally: + TracingFunctionHolder._lock.release() + + +def replace_sys_set_trace_func(): + if TracingFunctionHolder._original_tracing is None: + TracingFunctionHolder._original_tracing = sys.settrace + sys.settrace = _internal_set_trace + +def restore_sys_set_trace_func(): + if TracingFunctionHolder._original_tracing is not None: + sys.settrace = TracingFunctionHolder._original_tracing + TracingFunctionHolder._original_tracing = None + +def settrace_while_running_if_frame_eval(py_db, trace_func): + if not py_db.ready_to_run: + # do it if only debug session is started + return + + if py_db.frame_eval_func is None: + return + + threads = threading.enumerate() + try: + for t in threads: + if getattr(t, 'is_pydev_daemon_thread', False): + continue + additional_info = None + try: + additional_info = t.additional_info + except AttributeError: + pass # that's ok, no info currently set + if additional_info is None: + continue + + for frame in additional_info.iter_frames(t): + py_db.set_trace_for_frame_and_parents(frame, overwrite_prev_trace=True, dispatch_func=trace_func) + py_db.enable_cache_frames_without_breaks(False) + # sometimes (when script enters new frames too fast), we can't enable tracing only in the appropriate + # frame. So, if breakpoint was added during run, we should disable frame evaluation forever. + py_db.do_not_use_frame_eval = True + except: + traceback.print_exc() diff --git a/ptvsd/pydevd/pytest.ini b/ptvsd/pydevd/pytest.ini new file mode 100644 index 00000000..0796dc6e --- /dev/null +++ b/ptvsd/pydevd/pytest.ini @@ -0,0 +1,4 @@ +[pytest] +norecursedirs=tests_runfiles/samples +addopts=--capture=no -vv +testpaths=test_pydevd_reload tests tests_mainloop tests_python tests_runfiles \ No newline at end of file diff --git a/ptvsd/pydevd/runfiles.py b/ptvsd/pydevd/runfiles.py new file mode 100644 index 00000000..8a2506f3 --- /dev/null +++ b/ptvsd/pydevd/runfiles.py @@ -0,0 +1,303 @@ +''' +Entry point module (keep at root): + +Used to run with tests with unittest/pytest/nose. +''' + + +import os +try: + xrange +except: + xrange = range + +def main(): + import sys + + # Separate the nose params and the pydev params. + pydev_params = [] + other_test_framework_params = [] + found_other_test_framework_param = None + + NOSE_PARAMS = '--nose-params' + PY_TEST_PARAMS = '--py-test-params' + + for arg in sys.argv[1:]: + if not found_other_test_framework_param and arg != NOSE_PARAMS and arg != PY_TEST_PARAMS: + pydev_params.append(arg) + + else: + if not found_other_test_framework_param: + found_other_test_framework_param = arg + else: + other_test_framework_params.append(arg) + + + # Here we'll run either with nose or with the pydev_runfiles. + from _pydev_runfiles import pydev_runfiles + from _pydev_runfiles import pydev_runfiles_xml_rpc + from _pydevd_bundle import pydevd_constants + from pydevd_file_utils import _NormFile + + DEBUG = 0 + if DEBUG: + sys.stdout.write('Received parameters: %s\n' % (sys.argv,)) + sys.stdout.write('Params for pydev: %s\n' % (pydev_params,)) + if found_other_test_framework_param: + sys.stdout.write('Params for test framework: %s, %s\n' % (found_other_test_framework_param, other_test_framework_params)) + + try: + configuration = pydev_runfiles.parse_cmdline([sys.argv[0]] + pydev_params) + except: + sys.stderr.write('Command line received: %s\n' % (sys.argv,)) + raise + pydev_runfiles_xml_rpc.initialize_server(configuration.port) # Note that if the port is None, a Null server will be initialized. + + NOSE_FRAMEWORK = "nose" + PY_TEST_FRAMEWORK = "py.test" + test_framework = None # Default (pydev) + try: + if found_other_test_framework_param: + if found_other_test_framework_param == NOSE_PARAMS: + test_framework = NOSE_FRAMEWORK + import nose + + elif found_other_test_framework_param == PY_TEST_PARAMS: + test_framework = PY_TEST_FRAMEWORK + import pytest + + else: + raise ImportError('Test framework: %s not supported.' % (found_other_test_framework_param,)) + + else: + raise ImportError() + + except ImportError: + if found_other_test_framework_param: + raise + + test_framework = None + + # Clear any exception that may be there so that clients don't see it. + # See: https://sourceforge.net/tracker/?func=detail&aid=3408057&group_id=85796&atid=577329 + if hasattr(sys, 'exc_clear'): + sys.exc_clear() + + if not test_framework: + + return pydev_runfiles.main(configuration) # Note: still doesn't return a proper value. + + else: + # We'll convert the parameters to what nose or py.test expects. + # The supported parameters are: + # runfiles.py --config-file|-t|--tests dirs|files --nose-params xxx yyy zzz + # (all after --nose-params should be passed directly to nose) + + # In java: + # --tests = Constants.ATTR_UNITTEST_TESTS + # --config-file = Constants.ATTR_UNITTEST_CONFIGURATION_FILE + + + # The only thing actually handled here are the tests that we want to run, which we'll + # handle and pass as what the test framework expects. + + py_test_accept_filter = {} + files_to_tests = configuration.files_to_tests + + if files_to_tests: + # Handling through the file contents (file where each line is a test) + files_or_dirs = [] + for file, tests in files_to_tests.items(): + if test_framework == NOSE_FRAMEWORK: + for test in tests: + files_or_dirs.append(file + ':' + test) + + elif test_framework == PY_TEST_FRAMEWORK: + file = _NormFile(file) + py_test_accept_filter[file] = tests + files_or_dirs.append(file) + + else: + raise AssertionError('Cannot handle test framework: %s at this point.' % (test_framework,)) + + else: + if configuration.tests: + # Tests passed (works together with the files_or_dirs) + files_or_dirs = [] + for file in configuration.files_or_dirs: + if test_framework == NOSE_FRAMEWORK: + for t in configuration.tests: + files_or_dirs.append(file + ':' + t) + + elif test_framework == PY_TEST_FRAMEWORK: + file = _NormFile(file) + py_test_accept_filter[file] = configuration.tests + files_or_dirs.append(file) + + else: + raise AssertionError('Cannot handle test framework: %s at this point.' % (test_framework,)) + else: + # Only files or dirs passed (let it do the test-loading based on those paths) + files_or_dirs = configuration.files_or_dirs + + argv = other_test_framework_params + files_or_dirs + + + if test_framework == NOSE_FRAMEWORK: + # Nose usage: http://somethingaboutorange.com/mrl/projects/nose/0.11.2/usage.html + # show_stdout_option = ['-s'] + # processes_option = ['--processes=2'] + argv.insert(0, sys.argv[0]) + if DEBUG: + sys.stdout.write('Final test framework args: %s\n' % (argv[1:],)) + + from _pydev_runfiles import pydev_runfiles_nose + PYDEV_NOSE_PLUGIN_SINGLETON = pydev_runfiles_nose.start_pydev_nose_plugin_singleton(configuration) + argv.append('--with-pydevplugin') + # Return 'not' because it will return 'success' (so, exit == 0 if success) + return not nose.run(argv=argv, addplugins=[PYDEV_NOSE_PLUGIN_SINGLETON]) + + elif test_framework == PY_TEST_FRAMEWORK: + + if '--coverage_output_dir' in pydev_params and '--coverage_include' in pydev_params: + coverage_output_dir = pydev_params[pydev_params.index('--coverage_output_dir') + 1] + coverage_include = pydev_params[pydev_params.index('--coverage_include') + 1] + try: + import pytest_cov + except ImportError: + sys.stderr.write('To do a coverage run with pytest the pytest-cov library is needed (i.e.: pip install pytest-cov).\n\n') + raise + + argv.insert(0, '--cov-append') + argv.insert(1, '--cov-report=') + argv.insert(2, '--cov=%s' % (coverage_include,)) + + import time + os.environ['COVERAGE_FILE'] = os.path.join(coverage_output_dir, '.coverage.%s' % (time.time(),)) + + if DEBUG: + sys.stdout.write('Final test framework args: %s\n' % (argv,)) + sys.stdout.write('py_test_accept_filter: %s\n' % (py_test_accept_filter,)) + + def dotted(p): + # Helper to convert path to have dots instead of slashes + return os.path.normpath(p).replace(os.sep, "/").replace('/', '.') + + curr_dir = os.path.realpath('.') + curr_dotted = dotted(curr_dir) + '.' + + # Overcome limitation on py.test: + # When searching conftest if we have a structure as: + # /my_package + # /my_package/conftest.py + # /my_package/tests + # /my_package/tests/test_my_package.py + # The test_my_package won't have access to the conftest contents from the + # test_my_package.py file unless the working dir is set to /my_package. + # + # See related issue (for which we work-around below): + # https://bitbucket.org/hpk42/pytest/issue/639/conftest-being-loaded-twice-giving + + for path in sys.path: + path_dotted = dotted(path) + if curr_dotted.startswith(path_dotted): + os.chdir(path) + break + + for i in xrange(len(argv)): + arg = argv[i] + # Workaround bug in py.test: if we pass the full path it ends up importing conftest + # more than once (so, always work with relative paths). + if os.path.isfile(arg) or os.path.isdir(arg): + from os.path import relpath + try: + # May fail if on different drives + arg = relpath(arg) + except ValueError: + pass + else: + argv[i] = arg + + # To find our runfile helpers (i.e.: plugin)... + d = os.path.dirname(__file__) + if d not in sys.path: + sys.path.insert(0, d) + + import pickle, zlib, base64 + + # Update environment PYTHONPATH so that it finds our plugin if using xdist. + os.environ['PYTHONPATH'] = os.pathsep.join(sys.path) + + # Set what should be skipped in the plugin through an environment variable + s = base64.b64encode(zlib.compress(pickle.dumps(py_test_accept_filter))) + if pydevd_constants.IS_PY3K: + s = s.decode('ascii') # Must be str in py3. + os.environ['PYDEV_PYTEST_SKIP'] = s + + # Identifies the main pid (i.e.: if it's not the main pid it has to connect back to the + # main pid to give xml-rpc notifications). + os.environ['PYDEV_MAIN_PID'] = str(os.getpid()) + os.environ['PYDEV_PYTEST_SERVER'] = str(configuration.port) + + argv.append('-p') + argv.append('_pydev_runfiles.pydev_runfiles_pytest2') + if 'unittest' in sys.modules or 'unittest2' in sys.modules: + sys.stderr.write('pydev test runner error: imported unittest before running pytest.main\n') + return pytest.main(argv) + + else: + raise AssertionError('Cannot handle test framework: %s at this point.' % (test_framework,)) + + +if __name__ == '__main__': + try: + main() + finally: + try: + # The server is not a daemon thread, so, we have to ask for it to be killed! + from _pydev_runfiles import pydev_runfiles_xml_rpc + pydev_runfiles_xml_rpc.force_server_kill() + except: + pass # Ignore any errors here + + import sys + import threading + if hasattr(sys, '_current_frames') and hasattr(threading, 'enumerate'): + import time + import traceback + + class DumpThreads(threading.Thread): + def run(self): + time.sleep(10) + + thread_id_to_name = {} + try: + for t in threading.enumerate(): + thread_id_to_name[t.ident] = '%s (daemon: %s)' % (t.name, t.daemon) + except: + pass + + stack_trace = [ + '===============================================================================', + 'pydev pyunit runner: Threads still found running after tests finished', + '================================= Thread Dump ================================='] + + for thread_id, stack in sys._current_frames().items(): + stack_trace.append('\n-------------------------------------------------------------------------------') + stack_trace.append(" Thread %s" % thread_id_to_name.get(thread_id, thread_id)) + stack_trace.append('') + + if 'self' in stack.f_locals: + sys.stderr.write(str(stack.f_locals['self']) + '\n') + + for filename, lineno, name, line in traceback.extract_stack(stack): + stack_trace.append(' File "%s", line %d, in %s' % (filename, lineno, name)) + if line: + stack_trace.append(" %s" % (line.strip())) + stack_trace.append('\n=============================== END Thread Dump ===============================') + sys.stderr.write('\n'.join(stack_trace)) + + + dump_current_frames_thread = DumpThreads() + dump_current_frames_thread.setDaemon(True) # Daemon so that this thread doesn't halt it! + dump_current_frames_thread.start() diff --git a/ptvsd/pydevd/setup.py b/ptvsd/pydevd/setup.py new file mode 100644 index 00000000..3c60a9e4 --- /dev/null +++ b/ptvsd/pydevd/setup.py @@ -0,0 +1,176 @@ +r''' +Full setup, used to distribute the debugger backend to PyPi. + +Note that this is mostly so that users can do: + +pip install pydevd + +in a machine for doing remote-debugging, as a local installation with the IDE should have +everything already distributed. + +Reference on wheels: +https://hynek.me/articles/sharing-your-labor-of-love-pypi-quick-and-dirty/ +http://lucumr.pocoo.org/2014/1/27/python-on-wheels/ + +Another (no wheels): https://jamie.curle.io/blog/my-first-experience-adding-package-pypi/ + +New version: change version and then: + +rm dist/pydevd* + +C:\tools\Miniconda32\Scripts\activate py27_32 +python setup.py sdist bdist_wheel +deactivate +dir dist + +C:\tools\Miniconda32\Scripts\activate py34_32 +python setup.py sdist bdist_wheel +deactivate +dir dist + +C:\tools\Miniconda32\Scripts\activate py35_32 +python setup.py sdist bdist_wheel +deactivate +dir dist + +C:\tools\Miniconda32\Scripts\activate py36_32 +python setup.py sdist bdist_wheel +deactivate +dir dist + +C:\tools\Miniconda\Scripts\activate py27_64 +python setup.py sdist bdist_wheel +deactivate +dir dist + +C:\tools\Miniconda\Scripts\activate py34_64 +python setup.py sdist bdist_wheel +deactivate +dir dist + +C:\tools\Miniconda\Scripts\activate py35_64 +python setup.py sdist bdist_wheel +deactivate +dir dist + +C:\tools\Miniconda\Scripts\activate py36_64 +python setup.py sdist bdist_wheel +deactivate +dir dist + +twine upload dist/pydevd* + +git tag pydev_debugger_1_1_1 -a -m "PyDev.Debugger 1.1.1" +git push --tags +''' + + +from setuptools import setup +from setuptools.dist import Distribution +from distutils.extension import Extension +import os + +class BinaryDistribution(Distribution): + def is_pure(self): + return False + +data_files = [] + +def accept_file(f): + f = f.lower() + for ext in '.py .dll .so .dylib .txt .cpp .h .bat .c .sh .md .txt'.split(): + if f.endswith(ext): + return True + + return f in ['readme', 'makefile'] + +data_files.append(('pydevd_attach_to_process', [os.path.join('pydevd_attach_to_process', f) for f in os.listdir('pydevd_attach_to_process') if accept_file(f)])) +for root, dirs, files in os.walk("pydevd_attach_to_process"): + for d in dirs: + data_files.append((os.path.join(root, d), [os.path.join(root, d, f) for f in os.listdir(os.path.join(root, d)) if accept_file(f)])) + +import pydevd +version = pydevd.__version__ + +args = dict( + name='pydevd', + version=version, + description = 'PyDev.Debugger (used in PyDev and PyCharm)', + author='Fabio Zadrozny and others', + url='https://github.com/fabioz/PyDev.Debugger/', + license='EPL (Eclipse Public License)', + packages=[ + '_pydev_bundle', + '_pydev_imps', + '_pydev_runfiles', + '_pydevd_bundle', + '_pydevd_frame_eval', + 'pydev_ipython', + + # 'pydev_sitecustomize', -- Not actually a package (not added) + + # 'pydevd_attach_to_process', -- Not actually a package (included in MANIFEST.in) + + 'pydevd_concurrency_analyser', + 'pydevd_plugins', + 'pydevd_plugins.extensions', + ], + py_modules=[ + # 'interpreterInfo', -- Not needed for debugger + # 'pycompletionserver', -- Not needed for debugger + 'pydev_app_engine_debug_startup', + # 'pydev_coverage', -- Not needed for debugger + # 'pydev_pysrc', -- Not needed for debugger + 'pydev_run_in_console', + 'pydevconsole', + 'pydevd_file_utils', + 'pydevd', + 'pydevd_tracing', + # 'runfiles', -- Not needed for debugger + # 'setup_cython', -- Should not be included as a module + # 'setup', -- Should not be included as a module + ], + classifiers=[ + 'Development Status :: 6 - Mature', + 'Environment :: Console', + 'Intended Audience :: Developers', + + # It seems that the license is not recognized by Pypi, so, not categorizing it for now. + # https://bitbucket.org/pypa/pypi/issues/369/the-eclipse-public-license-superseeded + # 'License :: OSI Approved :: Eclipse Public License', + + 'Operating System :: MacOS :: MacOS X', + 'Operating System :: Microsoft :: Windows', + 'Operating System :: POSIX', + 'Programming Language :: Python', + 'Topic :: Software Development :: Debuggers', + ], + entry_points={ + 'console_scripts':[ + 'pydevd = pydevd:main', + ], + }, + data_files=data_files, + keywords=['pydev', 'pydevd', 'pydev.debugger'], + include_package_data=True, + zip_safe=False, +) + + + +import sys +try: + args_with_binaries = args.copy() + args_with_binaries.update(dict( + distclass=BinaryDistribution, + ext_modules=[ + # In this setup, don't even try to compile with cython, just go with the .c file which should've + # been properly generated from a tested version. + Extension('_pydevd_bundle.pydevd_cython', ["_pydevd_bundle/pydevd_cython.c",]) + ] + )) + setup(**args_with_binaries) +except: + # Compile failed: just setup without compiling cython deps. + setup(**args) + sys.stdout.write('Plain-python version of pydevd installed (cython speedups not available).\n') diff --git a/ptvsd/pydevd/setup_cython.py b/ptvsd/pydevd/setup_cython.py new file mode 100644 index 00000000..b7ee7cf9 --- /dev/null +++ b/ptvsd/pydevd/setup_cython.py @@ -0,0 +1,106 @@ +''' +A simpler setup version just to compile the speedup module. + +It should be used as: + +python setup_cython build_ext --inplace + +Note: the .c file and other generated files are regenerated from +the .pyx file by running "python build_tools/build.py" +''' + +import os +from setuptools import setup +import sys +os.chdir(os.path.dirname(os.path.abspath(__file__))) + + +def process_args(): + target_pydevd_name = None + target_frame_eval = None + force_cython = False + for i, arg in enumerate(sys.argv[:]): + if arg.startswith('--target-pyd-name='): + sys.argv.remove(arg) + target_pydevd_name = arg[len('--target-pyd-name='):] + if arg.startswith('--target-pyd-frame-eval='): + sys.argv.remove(arg) + target_frame_eval = arg[len('--target-pyd-frame-eval='):] + if arg == '--force-cython': + sys.argv.remove(arg) + force_cython = True + + return target_pydevd_name, target_frame_eval, force_cython + + +def build_extension(dir_name, extension_name, target_pydevd_name, force_cython, has_pxd=False): + pyx_file = os.path.join(os.path.dirname(__file__), dir_name, "%s.pyx" % (extension_name,)) + + if target_pydevd_name != extension_name: + # It MUST be there in this case! + # (otherwise we'll have unresolved externals because the .c file had another name initially). + import shutil + + # We must force cython in this case (but only in this case -- for the regular setup in the user machine, we + # should always compile the .c file). + force_cython = True + + new_pyx_file = os.path.join(os.path.dirname(__file__), dir_name, "%s.pyx" % (target_pydevd_name,)) + new_c_file = os.path.join(os.path.dirname(__file__), dir_name, "%s.c" % (target_pydevd_name,)) + shutil.copy(pyx_file, new_pyx_file) + pyx_file = new_pyx_file + if has_pxd: + pxd_file = os.path.join(os.path.dirname(__file__), dir_name, "%s.pxd" % (extension_name,)) + new_pxd_file = os.path.join(os.path.dirname(__file__), dir_name, "%s.pxd" % (target_pydevd_name,)) + shutil.copy(pxd_file, new_pxd_file) + assert os.path.exists(pyx_file) + + try: + if force_cython: + from Cython.Build import cythonize # @UnusedImport + ext_modules = cythonize([ + "%s/%s.pyx" % (dir_name, target_pydevd_name,), + ]) + else: + # Always compile the .c (and not the .pyx) file (which we should keep up-to-date by running build_tools/build.py). + from distutils.extension import Extension + ext_modules = [Extension('%s.%s' % (dir_name, target_pydevd_name,), [ + "%s/%s.c" % (dir_name, target_pydevd_name,), + ])] + + setup( + name='Cythonize', + ext_modules=ext_modules + ) + finally: + if target_pydevd_name != extension_name: + try: + os.remove(new_pyx_file) + except: + import traceback + traceback.print_exc() + try: + os.remove(new_c_file) + except: + import traceback + traceback.print_exc() + if has_pxd: + try: + os.remove(new_pxd_file) + except: + import traceback + traceback.print_exc() + + +target_pydevd_name, target_frame_eval, force_cython = process_args() + +extension_name = "pydevd_cython" +if target_pydevd_name is None: + target_pydevd_name = extension_name +build_extension("_pydevd_bundle", extension_name, target_pydevd_name, force_cython) + +if sys.version_info[:2] == (3, 6): + extension_name = "pydevd_frame_evaluator" + if target_frame_eval is None: + target_frame_eval = extension_name + build_extension("_pydevd_frame_eval", extension_name, target_frame_eval, force_cython, True) diff --git a/ptvsd/pydevd/stubs/_django_manager_body.py b/ptvsd/pydevd/stubs/_django_manager_body.py new file mode 100644 index 00000000..2bf47067 --- /dev/null +++ b/ptvsd/pydevd/stubs/_django_manager_body.py @@ -0,0 +1,414 @@ +# This is a dummy for code-completion purposes. + +def __unicode__(self): + """ + Return "app_label.model_label.manager_name". + """ + +def _copy_to_model(self, model): + """ + Makes a copy of the manager and assigns it to 'model', which should be + a child of the existing model (used when inheriting a manager from an + abstract base class). + """ + + +def _db(self): + """ + + """ + + +def _get_queryset_methods(cls, queryset_class): + """ + + """ + + +def _hints(self): + """ + dict() -> new empty dictionary + dict(mapping) -> new dictionary initialized from a mapping object's + (key, value) pairs + dict(iterable) -> new dictionary initialized as if via: + d = {} + for k, v in iterable: + d[k] = v + dict(**kwargs) -> new dictionary initialized with the name=value pairs + in the keyword argument list. For example: dict(one=1, two=2) + """ + + +def _inherited(self): + """ + + """ + + +def _insert(self, *args, **kwargs): + """ + Inserts a new record for the given model. This provides an interface to + the InsertQuery class and is how Model.save() is implemented. + """ + + +def _queryset_class(self): + """ + Represents a lazy database lookup for a set of objects. + """ + + +def _set_creation_counter(self): + """ + Sets the creation counter value for this instance and increments the + class-level copy. + """ + + +def _update(self, *args, **kwargs): + """ + A version of update that accepts field objects instead of field names. + Used primarily for model saving and not intended for use by general + code (it requires too much poking around at model internals to be + useful at that level). + """ + + +def aggregate(self, *args, **kwargs): + """ + Returns a dictionary containing the calculations (aggregation) + over the current queryset + + If args is present the expression is passed as a kwarg using + the Aggregate object's default alias. + """ + + +def all(self): + """ + @rtype: django.db.models.query.QuerySet + """ + + +def annotate(self, *args, **kwargs): + """ + Return a query set in which the returned objects have been annotated + with data aggregated from related fields. + """ + + +def bulk_create(self, *args, **kwargs): + """ + Inserts each of the instances into the database. This does *not* call + save() on each of the instances, does not send any pre/post save + signals, and does not set the primary key attribute if it is an + autoincrement field. + """ + + +def check(self, **kwargs): + """ + + """ + + +def complex_filter(self, *args, **kwargs): + """ + Returns a new QuerySet instance with filter_obj added to the filters. + + filter_obj can be a Q object (or anything with an add_to_query() + method) or a dictionary of keyword lookup arguments. + + This exists to support framework features such as 'limit_choices_to', + and usually it will be more natural to use other methods. + + @rtype: django.db.models.query.QuerySet + """ + + +def contribute_to_class(self, model, name): + """ + + """ + + +def count(self, *args, **kwargs): + """ + Performs a SELECT COUNT() and returns the number of records as an + integer. + + If the QuerySet is already fully cached this simply returns the length + of the cached results set to avoid multiple SELECT COUNT(*) calls. + """ + + +def create(self, *args, **kwargs): + """ + Creates a new object with the given kwargs, saving it to the database + and returning the created object. + """ + + +def creation_counter(self): + """ + + """ + + +def dates(self, *args, **kwargs): + """ + Returns a list of date objects representing all available dates for + the given field_name, scoped to 'kind'. + """ + + +def datetimes(self, *args, **kwargs): + """ + Returns a list of datetime objects representing all available + datetimes for the given field_name, scoped to 'kind'. + """ + + +def db(self): + """ + + """ + + +def db_manager(self, using=None, hints=None): + """ + + """ + + +def defer(self, *args, **kwargs): + """ + Defers the loading of data for certain fields until they are accessed. + The set of fields to defer is added to any existing set of deferred + fields. The only exception to this is if None is passed in as the only + parameter, in which case all deferrals are removed (None acts as a + reset option). + """ + + +def distinct(self, *args, **kwargs): + """ + Returns a new QuerySet instance that will select only distinct results. + + @rtype: django.db.models.query.QuerySet + """ + + +def earliest(self, *args, **kwargs): + """ + + """ + + +def exclude(self, *args, **kwargs): + """ + Returns a new QuerySet instance with NOT (args) ANDed to the existing + set. + + @rtype: django.db.models.query.QuerySet + """ + + +def exists(self, *args, **kwargs): + """ + + """ + + +def extra(self, *args, **kwargs): + """ + Adds extra SQL fragments to the query. + """ + + +def filter(self, *args, **kwargs): + """ + Returns a new QuerySet instance with the args ANDed to the existing + set. + + @rtype: django.db.models.query.QuerySet + """ + + +def first(self, *args, **kwargs): + """ + Returns the first object of a query, returns None if no match is found. + """ + + +def from_queryset(cls, queryset_class, class_name=None): + """ + + """ + + +def get(self, *args, **kwargs): + """ + Performs the query and returns a single object matching the given + keyword arguments. + """ + + +def get_or_create(self, *args, **kwargs): + """ + Looks up an object with the given kwargs, creating one if necessary. + Returns a tuple of (object, created), where created is a boolean + specifying whether an object was created. + """ + + +def get_queryset(self): + """ + Returns a new QuerySet object. Subclasses can override this method to + easily customize the behavior of the Manager. + + @rtype: django.db.models.query.QuerySet + """ + + +def in_bulk(self, *args, **kwargs): + """ + Returns a dictionary mapping each of the given IDs to the object with + that ID. + """ + + +def iterator(self, *args, **kwargs): + """ + An iterator over the results from applying this QuerySet to the + database. + """ + + +def last(self, *args, **kwargs): + """ + Returns the last object of a query, returns None if no match is found. + """ + + +def latest(self, *args, **kwargs): + """ + + """ + + +def model(self): + """ + MyModel(id) + """ + + +def none(self, *args, **kwargs): + """ + Returns an empty QuerySet. + + @rtype: django.db.models.query.QuerySet + """ + + +def only(self, *args, **kwargs): + """ + Essentially, the opposite of defer. Only the fields passed into this + method and that are not already specified as deferred are loaded + immediately when the queryset is evaluated. + """ + + +def order_by(self, *args, **kwargs): + """ + Returns a new QuerySet instance with the ordering changed. + + @rtype: django.db.models.query.QuerySet + """ + + +def prefetch_related(self, *args, **kwargs): + """ + Returns a new QuerySet instance that will prefetch the specified + Many-To-One and Many-To-Many related objects when the QuerySet is + evaluated. + + When prefetch_related() is called more than once, the list of lookups to + prefetch is appended to. If prefetch_related(None) is called, the list + is cleared. + + @rtype: django.db.models.query.QuerySet + """ + + +def raw(self, *args, **kwargs): + """ + + """ + + +def reverse(self, *args, **kwargs): + """ + Reverses the ordering of the QuerySet. + + @rtype: django.db.models.query.QuerySet + """ + + +def select_for_update(self, *args, **kwargs): + """ + Returns a new QuerySet instance that will select objects with a + FOR UPDATE lock. + + @rtype: django.db.models.query.QuerySet + """ + + +def select_related(self, *args, **kwargs): + """ + Returns a new QuerySet instance that will select related objects. + + If fields are specified, they must be ForeignKey fields and only those + related objects are included in the selection. + + If select_related(None) is called, the list is cleared. + + @rtype: django.db.models.query.QuerySet + """ + + +def update(self, *args, **kwargs): + """ + Updates all elements in the current QuerySet, setting all the given + fields to the appropriate values. + """ + + +def update_or_create(self, *args, **kwargs): + """ + Looks up an object with the given kwargs, updating one with defaults + if it exists, otherwise creates a new one. + Returns a tuple (object, created), where created is a boolean + specifying whether an object was created. + """ + + +def using(self, *args, **kwargs): + """ + Selects which database this QuerySet should execute its query against. + + @rtype: django.db.models.query.QuerySet + """ + + +def values(self, *args, **kwargs): + """ + + """ + + +def values_list(self, *args, **kwargs): + """ + + """ + diff --git a/ptvsd/pydevd/stubs/_get_tips.py b/ptvsd/pydevd/stubs/_get_tips.py new file mode 100644 index 00000000..b98e1c53 --- /dev/null +++ b/ptvsd/pydevd/stubs/_get_tips.py @@ -0,0 +1,280 @@ +import os.path +import inspect +import sys + +# completion types. +TYPE_IMPORT = '0' +TYPE_CLASS = '1' +TYPE_FUNCTION = '2' +TYPE_ATTR = '3' +TYPE_BUILTIN = '4' +TYPE_PARAM = '5' + +def _imp(name, log=None): + try: + return __import__(name) + except: + if '.' in name: + sub = name[0:name.rfind('.')] + + if log is not None: + log.AddContent('Unable to import', name, 'trying with', sub) + # log.AddContent('PYTHONPATH:') + # log.AddContent('\n'.join(sorted(sys.path))) + log.AddException() + + return _imp(sub, log) + else: + s = 'Unable to import module: %s - sys.path: %s' % (str(name), sys.path) + if log is not None: + log.AddContent(s) + log.AddException() + + raise ImportError(s) + + +IS_IPY = False +if sys.platform == 'cli': + IS_IPY = True + _old_imp = _imp + def _imp(name, log=None): + # We must add a reference in clr for .Net + import clr # @UnresolvedImport + initial_name = name + while '.' in name: + try: + clr.AddReference(name) + break # If it worked, that's OK. + except: + name = name[0:name.rfind('.')] + else: + try: + clr.AddReference(name) + except: + pass # That's OK (not dot net module). + + return _old_imp(initial_name, log) + + + +def GetFile(mod): + f = None + try: + f = inspect.getsourcefile(mod) or inspect.getfile(mod) + except: + if hasattr(mod, '__file__'): + f = mod.__file__ + if f.lower(f[-4:]) in ['.pyc', '.pyo']: + filename = f[:-4] + '.py' + if os.path.exists(filename): + f = filename + + return f + +def Find(name, log=None): + f = None + + mod = _imp(name, log) + parent = mod + foundAs = '' + + if inspect.ismodule(mod): + f = GetFile(mod) + + components = name.split('.') + + old_comp = None + for comp in components[1:]: + try: + # this happens in the following case: + # we have mx.DateTime.mxDateTime.mxDateTime.pyd + # but after importing it, mx.DateTime.mxDateTime shadows access to mxDateTime.pyd + mod = getattr(mod, comp) + except AttributeError: + if old_comp != comp: + raise + + if inspect.ismodule(mod): + f = GetFile(mod) + else: + if len(foundAs) > 0: + foundAs = foundAs + '.' + foundAs = foundAs + comp + + old_comp = comp + + return f, mod, parent, foundAs + + +def GenerateTip(data, log=None): + data = data.replace('\n', '') + if data.endswith('.'): + data = data.rstrip('.') + + f, mod, parent, foundAs = Find(data, log) + # print_ >> open('temp.txt', 'w'), f + tips = GenerateImportsTipForModule(mod) + return f, tips + + +def CheckChar(c): + if c == '-' or c == '.': + return '_' + return c + +def GenerateImportsTipForModule(obj_to_complete, dirComps=None, getattr=getattr, filter=lambda name:True): + ''' + @param obj_to_complete: the object from where we should get the completions + @param dirComps: if passed, we should not 'dir' the object and should just iterate those passed as a parameter + @param getattr: the way to get a given object from the obj_to_complete (used for the completer) + @param filter: a callable that receives the name and decides if it should be appended or not to the results + @return: list of tuples, so that each tuple represents a completion with: + name, doc, args, type (from the TYPE_* constants) + ''' + ret = [] + + if dirComps is None: + dirComps = dir(obj_to_complete) + if hasattr(obj_to_complete, '__dict__'): + dirComps.append('__dict__') + if hasattr(obj_to_complete, '__class__'): + dirComps.append('__class__') + + getCompleteInfo = True + + if len(dirComps) > 1000: + # ok, we don't want to let our users wait forever... + # no complete info for you... + + getCompleteInfo = False + + dontGetDocsOn = (float, int, str, tuple, list) + for d in dirComps: + + if d is None: + continue + + if not filter(d): + continue + + args = '' + + try: + obj = getattr(obj_to_complete, d) + except: # just ignore and get it without aditional info + ret.append((d, '', args, TYPE_BUILTIN)) + else: + + if getCompleteInfo: + retType = TYPE_BUILTIN + + # check if we have to get docs + getDoc = True + for class_ in dontGetDocsOn: + + if isinstance(obj, class_): + getDoc = False + break + + doc = '' + if getDoc: + # no need to get this info... too many constants are defined and + # makes things much slower (passing all that through sockets takes quite some time) + try: + doc = inspect.getdoc(obj) + if doc is None: + doc = '' + except: # may happen on jython when checking java classes (so, just ignore it) + doc = '' + + + if inspect.ismethod(obj) or inspect.isbuiltin(obj) or inspect.isfunction(obj) or inspect.isroutine(obj): + try: + args, vargs, kwargs, defaults = inspect.getargspec(obj) + except: + args, vargs, kwargs, defaults = (('self',), None, None, None) + if defaults is not None: + start_defaults_at = len(args) - len(defaults) + + + r = '' + for i, a in enumerate(args): + + if len(r) > 0: + r = r + ', ' + + r = r + str(a) + + if defaults is not None and i >= start_defaults_at: + default = defaults[i - start_defaults_at] + r += '=' +str(default) + + + others = '' + if vargs: + others += '*' + vargs + + if kwargs: + if others: + others+= ', ' + others += '**' + kwargs + + if others: + r+= ', ' + + + args = '(%s%s)' % (r, others) + retType = TYPE_FUNCTION + + elif inspect.isclass(obj): + retType = TYPE_CLASS + + elif inspect.ismodule(obj): + retType = TYPE_IMPORT + + else: + retType = TYPE_ATTR + + + # add token and doc to return - assure only strings. + ret.append((d, doc, args, retType)) + + + else: # getCompleteInfo == False + if inspect.ismethod(obj) or inspect.isbuiltin(obj) or inspect.isfunction(obj) or inspect.isroutine(obj): + retType = TYPE_FUNCTION + + elif inspect.isclass(obj): + retType = TYPE_CLASS + + elif inspect.ismodule(obj): + retType = TYPE_IMPORT + + else: + retType = TYPE_ATTR + # ok, no complete info, let's try to do this as fast and clean as possible + # so, no docs for this kind of information, only the signatures + ret.append((d, '', str(args), retType)) + + return ret + + + + +if __name__ == '__main__': + # To use when we have some object: i.e.: obj_to_complete=MyModel.objects + temp = ''' +def %(method_name)s%(args)s: + """ +%(doc)s + """ +''' + + for entry in GenerateImportsTipForModule(obj_to_complete): + import textwrap + doc = textwrap.dedent(entry[1]) + lines = [] + for line in doc.splitlines(): + lines.append(' ' + line) + doc = '\n'.join(lines) + print temp % dict(method_name=entry[0], args=entry[2] or '(self)', doc=doc) diff --git a/ptvsd/pydevd/stubs/pycompletion.py b/ptvsd/pydevd/stubs/pycompletion.py new file mode 100644 index 00000000..f9fb7733 --- /dev/null +++ b/ptvsd/pydevd/stubs/pycompletion.py @@ -0,0 +1,39 @@ +#!/usr/bin/python +''' +@author Radim Kubacki +''' +from _pydev_bundle import _pydev_imports_tipper +import traceback +import StringIO +import sys +import urllib +import pycompletionserver + + +#======================================================================================================================= +# GetImports +#======================================================================================================================= +def GetImports(module_name): + try: + processor = pycompletionserver.Processor() + data = urllib.unquote_plus(module_name) + def_file, completions = _pydev_imports_tipper.GenerateTip(data) + return processor.formatCompletionMessage(def_file, completions) + except: + s = StringIO.StringIO() + exc_info = sys.exc_info() + + traceback.print_exception(exc_info[0], exc_info[1], exc_info[2], limit=None, file=s) + err = s.getvalue() + pycompletionserver.dbg('Received error: ' + str(err), pycompletionserver.ERROR) + raise + + +#======================================================================================================================= +# main +#======================================================================================================================= +if __name__ == '__main__': + mod_name = sys.argv[1] + + print(GetImports(mod_name)) + diff --git a/ptvsd/pydevd/test_pydevd_reload/__init__.py b/ptvsd/pydevd/test_pydevd_reload/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/ptvsd/pydevd/test_pydevd_reload/test_pydevd_reload.py b/ptvsd/pydevd/test_pydevd_reload/test_pydevd_reload.py new file mode 100644 index 00000000..38a19196 --- /dev/null +++ b/ptvsd/pydevd/test_pydevd_reload/test_pydevd_reload.py @@ -0,0 +1,512 @@ +import os # @NoMove +import sys # @NoMove +import pytest + +from _pydevd_bundle import pydevd_reload +import tempfile +import unittest + + +SAMPLE_CODE = """ +class C: + def foo(self): + return 0 + + @classmethod + def bar(cls): + return (0, 0) + + @staticmethod + def stomp(): + return (0, 0, 0) + + def unchanged(self): + return 'unchanged' +""" + +from _pydevd_bundle.pydevd_constants import IS_JYTHON, IS_IRONPYTHON + +@pytest.mark.skipif(IS_JYTHON or IS_IRONPYTHON, reason='CPython related test') +class Test(unittest.TestCase): + + + def setUp(self): + unittest.TestCase.setUp(self) + self.tempdir = None + self.save_path = None + self.tempdir = tempfile.mkdtemp() + self.save_path = list(sys.path) + sys.path.append(self.tempdir) + try: + del sys.modules['x'] + except: + pass + + + def tearDown(self): + unittest.TestCase.tearDown(self) + sys.path = self.save_path + try: + del sys.modules['x'] + except: + pass + + def make_mod(self, name="x", repl=None, subst=None, sample=SAMPLE_CODE): + fn = os.path.join(self.tempdir, name + ".py") + f = open(fn, "w") + if repl is not None and subst is not None: + sample = sample.replace(repl, subst) + try: + f.write(sample) + finally: + f.close() + + + def test_pydevd_reload(self): + + self.make_mod() + import x # @UnresolvedImport + + C = x.C + COut = C + Cfoo = C.foo + Cbar = C.bar + Cstomp = C.stomp + + def check2(expected): + C = x.C + Cfoo = C.foo + Cbar = C.bar + Cstomp = C.stomp + b = C() + bfoo = b.foo + self.assertEqual(expected, b.foo()) + self.assertEqual(expected, bfoo()) + self.assertEqual(expected, Cfoo(b)) + + def check(expected): + b = COut() + bfoo = b.foo + self.assertEqual(expected, b.foo()) + self.assertEqual(expected, bfoo()) + self.assertEqual(expected, Cfoo(b)) + self.assertEqual((expected, expected), Cbar()) + self.assertEqual((expected, expected, expected), Cstomp()) + check2(expected) + + check(0) + + # modify mod and reload + count = 0 + while count < 1: + count += 1 + self.make_mod(repl="0", subst=str(count)) + pydevd_reload.xreload(x) + check(count) + + + def test_pydevd_reload2(self): + + self.make_mod() + import x # @UnresolvedImport + + c = x.C() + cfoo = c.foo + self.assertEqual(0, c.foo()) + self.assertEqual(0, cfoo()) + + self.make_mod(repl="0", subst='1') + pydevd_reload.xreload(x) + self.assertEqual(1, c.foo()) + self.assertEqual(1, cfoo()) + + def test_pydevd_reload3(self): + class F: + def m1(self): + return 1 + class G: + def m1(self): + return 2 + + self.assertEqual(F().m1(), 1) + pydevd_reload.Reload(None)._update(None, None, F, G) + self.assertEqual(F().m1(), 2) + + + def test_pydevd_reload4(self): + class F: + pass + F.m1 = lambda a:None + class G: + pass + G.m1 = lambda a:10 + + self.assertEqual(F().m1(), None) + pydevd_reload.Reload(None)._update(None, None, F, G) + self.assertEqual(F().m1(), 10) + + + + def test_if_code_obj_equals(self): + class F: + def m1(self): + return 1 + class G: + def m1(self): + return 1 + class H: + def m1(self): + return 2 + + if hasattr(F.m1, 'func_code'): + self.assertTrue(pydevd_reload.code_objects_equal(F.m1.func_code, G.m1.func_code)) + self.assertFalse(pydevd_reload.code_objects_equal(F.m1.func_code, H.m1.func_code)) + else: + self.assertTrue(pydevd_reload.code_objects_equal(F.m1.__code__, G.m1.__code__)) + self.assertFalse(pydevd_reload.code_objects_equal(F.m1.__code__, H.m1.__code__)) + + + + def test_metaclass(self): + + class Meta(type): + def __init__(cls, name, bases, attrs): + super(Meta, cls).__init__(name, bases, attrs) + + class F: + __metaclass__ = Meta + + def m1(self): + return 1 + + + class G: + __metaclass__ = Meta + + def m1(self): + return 2 + + self.assertEqual(F().m1(), 1) + pydevd_reload.Reload(None)._update(None, None, F, G) + self.assertEqual(F().m1(), 2) + + + + def test_change_hierarchy(self): + + class F(object): + + def m1(self): + return 1 + + + class B(object): + def super_call(self): + return 2 + + class G(B): + + def m1(self): + return self.super_call() + + self.assertEqual(F().m1(), 1) + old = pydevd_reload.notify_error + self._called = False + def on_error(*args): + self._called = True + try: + pydevd_reload.notify_error = on_error + pydevd_reload.Reload(None)._update(None, None, F, G) + self.assertTrue(self._called) + finally: + pydevd_reload.notify_error = old + + + def test_change_hierarchy_old_style(self): + + class F: + + def m1(self): + return 1 + + + class B: + def super_call(self): + return 2 + + class G(B): + + def m1(self): + return self.super_call() + + + self.assertEqual(F().m1(), 1) + old = pydevd_reload.notify_error + self._called = False + def on_error(*args): + self._called = True + try: + pydevd_reload.notify_error = on_error + pydevd_reload.Reload(None)._update(None, None, F, G) + self.assertTrue(self._called) + finally: + pydevd_reload.notify_error = old + + + def test_create_class(self): + SAMPLE_CODE1 = """ +class C: + def foo(self): + return 0 +""" + # Creating a new class and using it from old class + SAMPLE_CODE2 = """ +class B: + pass + +class C: + def foo(self): + return B +""" + + self.make_mod(sample=SAMPLE_CODE1) + import x # @UnresolvedImport + foo = x.C().foo + self.assertEqual(foo(), 0) + self.make_mod(sample=SAMPLE_CODE2) + pydevd_reload.xreload(x) + self.assertEqual(foo().__name__, 'B') + + def test_create_class2(self): + SAMPLE_CODE1 = """ +class C(object): + def foo(self): + return 0 +""" + # Creating a new class and using it from old class + SAMPLE_CODE2 = """ +class B(object): + pass + +class C(object): + def foo(self): + return B +""" + + self.make_mod(sample=SAMPLE_CODE1) + import x # @UnresolvedImport + foo = x.C().foo + self.assertEqual(foo(), 0) + self.make_mod(sample=SAMPLE_CODE2) + pydevd_reload.xreload(x) + self.assertEqual(foo().__name__, 'B') + + def test_parent_function(self): + SAMPLE_CODE1 = """ +class B(object): + def foo(self): + return 0 + +class C(B): + def call(self): + return self.foo() +""" + # Creating a new class and using it from old class + SAMPLE_CODE2 = """ +class B(object): + def foo(self): + return 0 + def bar(self): + return 'bar' + +class C(B): + def call(self): + return self.bar() +""" + + self.make_mod(sample=SAMPLE_CODE1) + import x # @UnresolvedImport + call = x.C().call + self.assertEqual(call(), 0) + self.make_mod(sample=SAMPLE_CODE2) + pydevd_reload.xreload(x) + self.assertEqual(call(), 'bar') + + + def test_update_constant(self): + SAMPLE_CODE1 = """ +CONSTANT = 1 + +class B(object): + def foo(self): + return CONSTANT +""" + SAMPLE_CODE2 = """ +CONSTANT = 2 + +class B(object): + def foo(self): + return CONSTANT +""" + + self.make_mod(sample=SAMPLE_CODE1) + import x # @UnresolvedImport + foo = x.B().foo + self.assertEqual(foo(), 1) + self.make_mod(sample=SAMPLE_CODE2) + pydevd_reload.xreload(x) + self.assertEqual(foo(), 1) #Just making it explicit we don't reload constants. + + + def test_update_constant_with_custom_code(self): + SAMPLE_CODE1 = """ +CONSTANT = 1 + +class B(object): + def foo(self): + return CONSTANT +""" + SAMPLE_CODE2 = """ +CONSTANT = 2 + +def __xreload_old_new__(namespace, name, old, new): + if name == 'CONSTANT': + namespace[name] = new + +class B(object): + def foo(self): + return CONSTANT +""" + + self.make_mod(sample=SAMPLE_CODE1) + import x # @UnresolvedImport + foo = x.B().foo + self.assertEqual(foo(), 1) + self.make_mod(sample=SAMPLE_CODE2) + pydevd_reload.xreload(x) + self.assertEqual(foo(), 2) #Actually updated it now! + + + def test_reload_custom_code_after_changes(self): + SAMPLE_CODE1 = """ +CONSTANT = 1 + +class B(object): + def foo(self): + return CONSTANT +""" + SAMPLE_CODE2 = """ +CONSTANT = 1 + +def __xreload_after_reload_update__(namespace): + namespace['CONSTANT'] = 2 + +class B(object): + def foo(self): + return CONSTANT +""" + + self.make_mod(sample=SAMPLE_CODE1) + import x # @UnresolvedImport + foo = x.B().foo + self.assertEqual(foo(), 1) + self.make_mod(sample=SAMPLE_CODE2) + pydevd_reload.xreload(x) + self.assertEqual(foo(), 2) #Actually updated it now! + + + def test_reload_custom_code_after_changes_in_class(self): + SAMPLE_CODE1 = """ + +class B(object): + CONSTANT = 1 + + def foo(self): + return self.CONSTANT +""" + SAMPLE_CODE2 = """ + + +class B(object): + CONSTANT = 1 + + @classmethod + def __xreload_after_reload_update__(cls): + cls.CONSTANT = 2 + + def foo(self): + return self.CONSTANT +""" + + self.make_mod(sample=SAMPLE_CODE1) + import x # @UnresolvedImport + foo = x.B().foo + self.assertEqual(foo(), 1) + self.make_mod(sample=SAMPLE_CODE2) + pydevd_reload.xreload(x) + self.assertEqual(foo(), 2) #Actually updated it now! + + + def test_update_constant_with_custom_code2(self): + SAMPLE_CODE1 = """ + +class B(object): + CONSTANT = 1 + + def foo(self): + return self.CONSTANT +""" + SAMPLE_CODE2 = """ + + +class B(object): + + CONSTANT = 2 + + def __xreload_old_new__(cls, name, old, new): + if name == 'CONSTANT': + cls.CONSTANT = new + __xreload_old_new__ = classmethod(__xreload_old_new__) + + def foo(self): + return self.CONSTANT +""" + + self.make_mod(sample=SAMPLE_CODE1) + import x # @UnresolvedImport + foo = x.B().foo + self.assertEqual(foo(), 1) + self.make_mod(sample=SAMPLE_CODE2) + pydevd_reload.xreload(x) + self.assertEqual(foo(), 2) #Actually updated it now! + + + def test_update_with_slots(self): + SAMPLE_CODE1 = """ +class B(object): + + __slots__ = ['bar'] + +""" + SAMPLE_CODE2 = """ +class B(object): + + __slots__ = ['bar', 'foo'] + + def m1(self): + self.bar = 10 + return 1 + +""" + + self.make_mod(sample=SAMPLE_CODE1) + import x # @UnresolvedImport + B = x.B + self.make_mod(sample=SAMPLE_CODE2) + pydevd_reload.xreload(x) + b = B() + self.assertEqual(1, b.m1()) + self.assertEqual(10, b.bar) + self.assertRaises(Exception, setattr, b, 'foo', 20) #__slots__ can't be updated + + diff --git a/ptvsd/pydevd/tests/__init__.py b/ptvsd/pydevd/tests/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/ptvsd/pydevd/tests/test_check_pydevconsole.py b/ptvsd/pydevd/tests/test_check_pydevconsole.py new file mode 100644 index 00000000..33d02567 --- /dev/null +++ b/ptvsd/pydevd/tests/test_check_pydevconsole.py @@ -0,0 +1,113 @@ +import threading +import unittest +import os +import pytest +import pydevconsole + +from _pydev_bundle.pydev_imports import xmlrpclib, SimpleXMLRPCServer +from _pydev_bundle.pydev_localhost import get_localhost + +try: + raw_input + raw_input_name = 'raw_input' +except NameError: + raw_input_name = 'input' + +try: + from IPython import core # @UnusedImport + has_ipython = True +except: + has_ipython = False + + +#======================================================================================================================= +# Test +#======================================================================================================================= +@pytest.mark.skipif(os.environ.get('TRAVIS') == 'true' or not has_ipython, reason='Too flaky on Travis (and requires IPython).') +class Test(unittest.TestCase): + + def start_client_thread(self, client_port): + class ClientThread(threading.Thread): + def __init__(self, client_port): + threading.Thread.__init__(self) + self.client_port = client_port + + def run(self): + class HandleRequestInput: + def RequestInput(self): + client_thread.requested_input = True + return 'RequestInput: OK' + + def NotifyFinished(self, *args, **kwargs): + client_thread.notified_finished += 1 + return 1 + + handle_request_input = HandleRequestInput() + + from _pydev_bundle import pydev_localhost + self.client_server = client_server = SimpleXMLRPCServer((pydev_localhost.get_localhost(), self.client_port), logRequests=False) + client_server.register_function(handle_request_input.RequestInput) + client_server.register_function(handle_request_input.NotifyFinished) + client_server.serve_forever() + + def shutdown(self): + return + self.client_server.shutdown() + + client_thread = ClientThread(client_port) + client_thread.requested_input = False + client_thread.notified_finished = 0 + client_thread.setDaemon(True) + client_thread.start() + return client_thread + + + def get_free_addresses(self): + from _pydev_bundle.pydev_localhost import get_socket_names + socket_names = get_socket_names(2, close=True) + return [socket_name[1] for socket_name in socket_names] + + def test_server(self): + # Just making sure that the singleton is created in this thread. + from _pydev_bundle.pydev_ipython_console_011 import get_pydev_frontend + get_pydev_frontend(get_localhost(), 0) + + client_port, server_port = self.get_free_addresses() + class ServerThread(threading.Thread): + def __init__(self, client_port, server_port): + threading.Thread.__init__(self) + self.client_port = client_port + self.server_port = server_port + + def run(self): + from _pydev_bundle import pydev_localhost + print('Starting server with:', pydev_localhost.get_localhost(), self.server_port, self.client_port) + pydevconsole.start_server(pydev_localhost.get_localhost(), self.server_port, self.client_port) + server_thread = ServerThread(client_port, server_port) + server_thread.setDaemon(True) + server_thread.start() + + client_thread = self.start_client_thread(client_port) #@UnusedVariable + + try: + import time + time.sleep(.3) #let's give it some time to start the threads + + from _pydev_bundle import pydev_localhost + server = xmlrpclib.Server('http://%s:%s' % (pydev_localhost.get_localhost(), server_port)) + server.execLine("import sys; print('Running with: %s %s' % (sys.executable or sys.platform, sys.version))") + server.execLine('class Foo:') + server.execLine(' pass') + server.execLine('') + server.execLine('foo = Foo()') + server.execLine('a = %s()' % raw_input_name) + initial = time.time() + while not client_thread.requested_input: + if time.time() - initial > 2: + raise AssertionError('Did not get the return asked before the timeout.') + time.sleep(.1) + frame_xml = server.getFrame() + self.assertTrue('RequestInput' in frame_xml, 'Did not fid RequestInput in:\n%s' % (frame_xml,)) + finally: + client_thread.shutdown() + diff --git a/ptvsd/pydevd/tests/test_get_referrers.py b/ptvsd/pydevd/tests/test_get_referrers.py new file mode 100644 index 00000000..b895ed2f --- /dev/null +++ b/ptvsd/pydevd/tests/test_get_referrers.py @@ -0,0 +1,120 @@ +import sys +import threading +import time +import unittest +import pytest +from _pydevd_bundle import pydevd_referrers +from _pydev_bundle.pydev_imports import StringIO + +try: + import gc + gc.get_referrers(unittest) + has_referrers = True +except NotImplementedError: + has_referrers = False + +# Only do get referrers tests if it's actually available. +@pytest.mark.skipif(not has_referrers, reason='gc.get_referrers not implemented') +class Test(unittest.TestCase): + + + def test_get_referrers1(self): + + container = [] + contained = [1, 2] + container.append(0) + container.append(contained) + + # Ok, we have the contained in this frame and inside the given list (which on turn is in this frame too). + # we should skip temporary references inside the get_referrer_info. + result = pydevd_referrers.get_referrer_info(contained) + assert 'list[1]' in result + pydevd_referrers.print_referrers(contained, stream=StringIO()) + + def test_get_referrers2(self): + + class MyClass(object): + def __init__(self): + pass + + contained = [1, 2] + obj = MyClass() + obj.contained = contained + del contained + + # Ok, we have the contained in this frame and inside the given list (which on turn is in this frame too). + # we should skip temporary references inside the get_referrer_info. + result = pydevd_referrers.get_referrer_info(obj.contained) + assert 'found_as="contained"' in result + assert 'MyClass' in result + + + def test_get_referrers3(self): + + class MyClass(object): + def __init__(self): + pass + + contained = [1, 2] + obj = MyClass() + obj.contained = contained + del contained + + # Ok, we have the contained in this frame and inside the given list (which on turn is in this frame too). + # we should skip temporary references inside the get_referrer_info. + result = pydevd_referrers.get_referrer_info(obj.contained) + assert 'found_as="contained"' in result + assert 'MyClass' in result + + + def test_get_referrers4(self): + + class MyClass(object): + def __init__(self): + pass + + obj = MyClass() + obj.me = obj + + # Let's see if we detect the cycle... + result = pydevd_referrers.get_referrer_info(obj) + assert 'found_as="me"' in result #Cyclic ref + + + def test_get_referrers5(self): + container = dict(a=[1]) + + # Let's see if we detect the cycle... + result = pydevd_referrers.get_referrer_info(container['a']) + assert 'test_get_referrers5' not in result #I.e.: NOT in the current method + assert 'found_as="a"' in result + assert 'dict' in result + assert str(id(container)) in result + + + def test_get_referrers6(self): + container = dict(a=[1]) + + def should_appear(obj): + # Let's see if we detect the cycle... + return pydevd_referrers.get_referrer_info(obj) + + result = should_appear(container['a']) + assert 'should_appear' in result + + + def test_get_referrers7(self): + + class MyThread(threading.Thread): + def run(self): + #Note: we do that because if we do + self.frame = sys._getframe() + + t = MyThread() + t.start() + while not hasattr(t, 'frame'): + time.sleep(0.01) + + result = pydevd_referrers.get_referrer_info(t.frame) + assert 'MyThread' in result + diff --git a/ptvsd/pydevd/tests/test_jyserver.py b/ptvsd/pydevd/tests/test_jyserver.py new file mode 100644 index 00000000..49309578 --- /dev/null +++ b/ptvsd/pydevd/tests/test_jyserver.py @@ -0,0 +1,140 @@ +''' +@author Fabio Zadrozny +''' +import sys +import unittest +import socket +import urllib +import pytest +import pycompletionserver + + +IS_JYTHON = sys.platform.find('java') != -1 +DEBUG = 0 + +def dbg(s): + if DEBUG: + sys.stdout.write('TEST %s\n' % s) + +@pytest.mark.skipif(not IS_JYTHON, reason='Jython related test') +class TestJython(unittest.TestCase): + + def test_it(self): + dbg('ok') + + + def test_message(self): + t = pycompletionserver.CompletionServer(0) + t.exit_process_on_kill = False + + l = [] + l.append(('Def', 'description' , 'args')) + l.append(('Def1', 'description1', 'args1')) + l.append(('Def2', 'description2', 'args2')) + + msg = t.processor.format_completion_message('test_jyserver.py', l) + + self.assertEqual('@@COMPLETIONS(test_jyserver.py,(Def,description,args),(Def1,description1,args1),(Def2,description2,args2))END@@', msg) + + l = [] + l.append(('Def', 'desc,,r,,i()ption', '')) + l.append(('Def(1', 'descriptio(n1', '')) + l.append(('De,f)2', 'de,s,c,ription2', '')) + msg = t.processor.format_completion_message(None, l) + expected = '@@COMPLETIONS(None,(Def,desc%2C%2Cr%2C%2Ci%28%29ption, ),(Def%281,descriptio%28n1, ),(De%2Cf%292,de%2Cs%2Cc%2Cription2, ))END@@' + + self.assertEqual(expected, msg) + + + def test_completion_sockets_and_messages(self): + dbg('test_completion_sockets_and_messages') + t, socket = self.create_connections() + self.socket = socket + dbg('connections created') + + try: + #now that we have the connections all set up, check the code completion messages. + msg = urllib.quote_plus('math') + + toWrite = '@@IMPORTS:%sEND@@' % msg + dbg('writing' + str(toWrite)) + socket.send(toWrite) #math completions + completions = self.read_msg() + dbg(urllib.unquote_plus(completions)) + + start = '@@COMPLETIONS(' + self.assertTrue(completions.startswith(start), '%s DOESNT START WITH %s' % (completions, start)) + self.assertTrue(completions.find('@@COMPLETIONS') != -1) + self.assertTrue(completions.find('END@@') != -1) + + + msg = urllib.quote_plus('__builtin__.str') + toWrite = '@@IMPORTS:%sEND@@' % msg + dbg('writing' + str(toWrite)) + socket.send(toWrite) #math completions + completions = self.read_msg() + dbg(urllib.unquote_plus(completions)) + + start = '@@COMPLETIONS(' + self.assertTrue(completions.startswith(start), '%s DOESNT START WITH %s' % (completions, start)) + self.assertTrue(completions.find('@@COMPLETIONS') != -1) + self.assertTrue(completions.find('END@@') != -1) + + + + finally: + try: + self.send_kill_msg(socket) + + + while not t.ended: + pass #wait until it receives the message and quits. + + + socket.close() + except: + pass + + def get_free_port(self): + from _pydev_bundle.pydev_localhost import get_socket_name + return get_socket_name(close=True)[1] + + def create_connections(self): + ''' + Creates the connections needed for testing. + ''' + p1 = self.get_free_port() + from thread import start_new_thread + t = pycompletionserver.CompletionServer(p1) + t.exit_process_on_kill = False + + start_new_thread(t.run, ()) + + server = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + server.bind((pycompletionserver.HOST, p1)) + server.listen(1) + + sock, _addr = server.accept() + + return t, sock + + def read_msg(self): + msg = '@@PROCESSING_END@@' + while msg.startswith('@@PROCESSING'): + msg = self.socket.recv(1024) + if msg.startswith('@@PROCESSING:'): + dbg('Status msg:' + str(msg)) + + while msg.find('END@@') == -1: + msg += self.socket.recv(1024) + + return msg + + def send_kill_msg(self, socket): + socket.send(pycompletionserver.MSG_KILL_SERVER) + + + + +# Run for jython in command line: +# c:\bin\jython2.7.0\bin\jython.exe -m py.test tests\test_jyserver.py diff --git a/ptvsd/pydevd/tests/test_jysimpleTipper.py b/ptvsd/pydevd/tests/test_jysimpleTipper.py new file mode 100644 index 00000000..7d5df76d --- /dev/null +++ b/ptvsd/pydevd/tests/test_jysimpleTipper.py @@ -0,0 +1,236 @@ +import unittest +import os +import sys +import pytest + +# Note: ant.jar and junit.jar must be in the PYTHONPATH (see jython_test_deps) + +IS_JYTHON = False +if sys.platform.find('java') != -1: + IS_JYTHON = True + from _pydev_bundle._pydev_jy_imports_tipper import ismethod + from _pydev_bundle._pydev_jy_imports_tipper import isclass + from _pydev_bundle._pydev_jy_imports_tipper import dir_obj + from _pydev_bundle import _pydev_jy_imports_tipper + from java.lang.reflect import Method #@UnresolvedImport + from java.lang import System #@UnresolvedImport + from java.lang import String #@UnresolvedImport + from java.lang.System import arraycopy #@UnresolvedImport + from java.lang.System import out #@UnresolvedImport + import java.lang.String #@UnresolvedImport + import org.python.core.PyDictionary #@UnresolvedImport + + +__DBG = 0 +def dbg(s): + if __DBG: + sys.stdout.write('%s\n' % (s,)) + + + +@pytest.mark.skipif(not IS_JYTHON, reason='Jython related test') +class TestMod(unittest.TestCase): + + def assert_args(self, tok, args, tips): + for a in tips: + if tok == a[0]: + self.assertEqual(args, a[2]) + return + raise AssertionError('%s not in %s', tok, tips) + + def assert_in(self, tok, tips): + self.assertEqual(4, len(tips[0])) + for a in tips: + if tok == a[0]: + return a + s = '' + for a in tips: + s += str(a) + s += '\n' + raise AssertionError('%s not in %s' % (tok, s)) + + def test_imports1a(self): + f, tip = _pydev_jy_imports_tipper.generate_tip('java.util.HashMap') + assert f.endswith('rt.jar') + + def test_imports1c(self): + f, tip = _pydev_jy_imports_tipper.generate_tip('java.lang.Class') + assert f.endswith('rt.jar') + + def test_imports1b(self): + try: + f, tip = _pydev_jy_imports_tipper.generate_tip('__builtin__.m') + self.fail('err') + except: + pass + + def test_imports1(self): + f, tip = _pydev_jy_imports_tipper.generate_tip('junit.framework.TestCase') + assert f.endswith('junit.jar') + ret = self.assert_in('assertEqual', tip) +# self.assertEqual('', ret[2]) + + def test_imports2(self): + f, tip = _pydev_jy_imports_tipper.generate_tip('junit.framework') + assert f.endswith('junit.jar') + ret = self.assert_in('TestCase', tip) + self.assertEqual('', ret[2]) + + def test_imports2a(self): + f, tip = _pydev_jy_imports_tipper.generate_tip('org.apache.tools.ant') + assert f.endswith('ant.jar') + ret = self.assert_in('Task', tip) + self.assertEqual('', ret[2]) + + def test_imports3(self): + f, tip = _pydev_jy_imports_tipper.generate_tip('os') + assert f.endswith('os.py') + ret = self.assert_in('path', tip) + self.assertEqual('', ret[2]) + + def test_tip_on_string(self): + f, tip = _pydev_jy_imports_tipper.generate_tip('string') + self.assert_in('join', tip) + self.assert_in('uppercase', tip) + + def test_imports(self): + tip = _pydev_jy_imports_tipper.generate_tip('__builtin__')[1] + self.assert_in('tuple' , tip) + self.assert_in('RuntimeError' , tip) + self.assert_in('RuntimeWarning' , tip) + + def test_imports5(self): + f, tip = _pydev_jy_imports_tipper.generate_tip('java.lang') + assert f.endswith('rt.jar') + tup = self.assert_in('String' , tip) + self.assertEqual(str(_pydev_jy_imports_tipper.TYPE_CLASS), tup[3]) + + tip = _pydev_jy_imports_tipper.generate_tip('java')[1] + tup = self.assert_in('lang' , tip) + self.assertEqual(str(_pydev_jy_imports_tipper.TYPE_IMPORT), tup[3]) + + tip = _pydev_jy_imports_tipper.generate_tip('java.lang.String')[1] + tup = self.assert_in('indexOf' , tip) + self.assertEqual(str(_pydev_jy_imports_tipper.TYPE_FUNCTION), tup[3]) + + tip = _pydev_jy_imports_tipper.generate_tip('java.lang.String')[1] + tup = self.assert_in('charAt' , tip) + self.assertEqual(str(_pydev_jy_imports_tipper.TYPE_FUNCTION), tup[3]) + self.assertEqual('(int)', tup[2]) + + tup = self.assert_in('format' , tip) + self.assertEqual(str(_pydev_jy_imports_tipper.TYPE_FUNCTION), tup[3]) + self.assertEqual('(string, objectArray)', tup[2]) + self.assertTrue(tup[1].find('[Ljava.lang.Object;') == -1) + + tup = self.assert_in('getBytes', tip) + self.assertEqual(str(_pydev_jy_imports_tipper.TYPE_FUNCTION), tup[3]) + assert '[B' not in tup[1] + assert 'byte[]' in tup[1] + + f, tip = _pydev_jy_imports_tipper.generate_tip('__builtin__.str') + assert f.endswith('jython.jar') + self.assert_in('find' , tip) + + f, tip = _pydev_jy_imports_tipper.generate_tip('__builtin__.dict') + assert f.endswith('jython.jar') + self.assert_in('get' , tip) + + +@pytest.mark.skipif(not IS_JYTHON, reason='Jython related test') +class TestSearch(unittest.TestCase): + + def test_search_on_jython(self): + assert _pydev_jy_imports_tipper.search_definition('os')[0][0].split(os.sep)[-1] in ('javaos.py', 'os.py') + self.assertEqual(0, _pydev_jy_imports_tipper.search_definition('os')[0][1]) + + assert _pydev_jy_imports_tipper.search_definition('os.makedirs')[0][0].split(os.sep)[-1] in ('javaos.py', 'os.py') + self.assertNotEqual(0, _pydev_jy_imports_tipper.search_definition('os.makedirs')[0][1]) + + #print _pydev_jy_imports_tipper.search_definition('os.makedirs') + +@pytest.mark.skipif(not IS_JYTHON, reason='Jython related test') +class TestCompl(unittest.TestCase): + + def test_getting_info_on_jython(self): + + dbg('\n\n--------------------------- java') + assert not ismethod(java)[0] + assert not isclass(java) + assert _pydev_jy_imports_tipper.ismodule(java) + + dbg('\n\n--------------------------- java.lang') + assert not ismethod(java.lang)[0] + assert not isclass(java.lang) + assert _pydev_jy_imports_tipper.ismodule(java.lang) + + dbg('\n\n--------------------------- Method') + assert not ismethod(Method)[0] + assert isclass(Method) + + dbg('\n\n--------------------------- System') + assert not ismethod(System)[0] + assert isclass(System) + + dbg('\n\n--------------------------- String') + assert not ismethod(System)[0] + assert isclass(String) + assert len(dir_obj(String)) > 10 + + dbg('\n\n--------------------------- arraycopy') + isMet = ismethod(arraycopy) + assert isMet[0] + assert isMet[1][0].basic_as_str() == "function:arraycopy args=['java.lang.Object', 'int', 'java.lang.Object', 'int', 'int'], varargs=None, kwargs=None, docs:None" + assert not isclass(arraycopy) + + dbg('\n\n--------------------------- out') + isMet = ismethod(out) + assert not isMet[0] + assert not isclass(out) + + dbg('\n\n--------------------------- out.println') + isMet = ismethod(out.println) #@UndefinedVariable + assert isMet[0] + assert len(isMet[1]) == 10 + self.assertEqual(isMet[1][0].basic_as_str(), "function:println args=[], varargs=None, kwargs=None, docs:None") + assert isMet[1][1].basic_as_str() == "function:println args=['long'], varargs=None, kwargs=None, docs:None" + assert not isclass(out.println) #@UndefinedVariable + + dbg('\n\n--------------------------- str') + isMet = ismethod(str) + #the code below should work, but is failing on jython 22a1 + #assert isMet[0] + #assert isMet[1][0].basic_as_str() == "function:str args=['org.python.core.PyObject'], varargs=None, kwargs=None, docs:None" + assert not isclass(str) + + + def met1(): + a = 3 + return a + + dbg('\n\n--------------------------- met1') + isMet = ismethod(met1) + assert isMet[0] + assert isMet[1][0].basic_as_str() == "function:met1 args=[], varargs=None, kwargs=None, docs:None" + assert not isclass(met1) + + def met2(arg1, arg2, *vararg, **kwarg): + '''docmet2''' + + a = 1 + return a + + dbg('\n\n--------------------------- met2') + isMet = ismethod(met2) + assert isMet[0] + assert isMet[1][0].basic_as_str() == "function:met2 args=['arg1', 'arg2'], varargs=vararg, kwargs=kwarg, docs:docmet2" + assert not isclass(met2) + + +# Run for jython in command line: + +# On Windows: +# c:/bin/jython2.7.0/bin/jython.exe -Dpython.path=jython_test_deps/ant.jar;jython_test_deps/junit.jar -m py.test tests/test_jysimpleTipper.py + +# On Linux (different path separator for jars) +# jython -Dpython.path=jython_test_deps/ant.jar:jython_test_deps/junit.jar -m py.test tests/test_jysimpleTipper.py diff --git a/ptvsd/pydevd/tests/test_pydev_ipython_011.py b/ptvsd/pydevd/tests/test_pydev_ipython_011.py new file mode 100644 index 00000000..dca346cc --- /dev/null +++ b/ptvsd/pydevd/tests/test_pydev_ipython_011.py @@ -0,0 +1,313 @@ +import sys +import unittest +import threading +import os +from _pydev_bundle.pydev_imports import SimpleXMLRPCServer +from _pydev_bundle.pydev_localhost import get_localhost +from _pydev_bundle.pydev_console_utils import StdIn +import socket +import time +from _pydevd_bundle import pydevd_io +import pytest + +try: + xrange +except: + xrange = range + +def eq_(a, b): + if a != b: + raise AssertionError('%s != %s' % (a, b)) + +try: + from IPython import core + has_ipython = True +except: + has_ipython = False + +@pytest.mark.skipif(not has_ipython, reason='IPython not available') +class TestBase(unittest.TestCase): + + + def setUp(self): + from _pydev_bundle.pydev_ipython_console_011 import get_pydev_frontend + + # PyDevFrontEnd depends on singleton in IPython, so you + # can't make multiple versions. So we reuse self.front_end for + # all the tests + self.front_end = get_pydev_frontend(get_localhost(), 0) + + from pydev_ipython.inputhook import set_return_control_callback + set_return_control_callback(lambda:True) + self.front_end.clear_buffer() + + def tearDown(self): + pass + + def add_exec(self, code, expected_more=False): + more = self.front_end.add_exec(code) + eq_(expected_more, more) + + def redirect_stdout(self): + from IPython.utils import io + + self.original_stdout = sys.stdout + sys.stdout = io.stdout = pydevd_io.IOBuf() + + def restore_stdout(self): + from IPython.utils import io + io.stdout = sys.stdout = self.original_stdout + + +@pytest.mark.skipif(not has_ipython, reason='IPython not available') +class TestPyDevFrontEnd(TestBase): + + def testAddExec_1(self): + self.add_exec('if True:', True) + + + def testAddExec_2(self): + #Change: 'more' must now be controlled in the client side after the initial 'True' returned. + self.add_exec('if True:\n testAddExec_a = 10\n', False) + assert 'testAddExec_a' in self.front_end.get_namespace() + + + def testAddExec_3(self): + assert 'testAddExec_x' not in self.front_end.get_namespace() + self.add_exec('if True:\n testAddExec_x = 10\n\n') + assert 'testAddExec_x' in self.front_end.get_namespace() + eq_(self.front_end.get_namespace()['testAddExec_x'], 10) + + + def test_get_namespace(self): + assert 'testGetNamespace_a' not in self.front_end.get_namespace() + self.add_exec('testGetNamespace_a = 10') + assert 'testGetNamespace_a' in self.front_end.get_namespace() + eq_(self.front_end.get_namespace()['testGetNamespace_a'], 10) + + + def test_complete(self): + unused_text, matches = self.front_end.complete('%') + assert len(matches) > 1, 'at least one magic should appear in completions' + + + def test_complete_does_not_do_python_matches(self): + # Test that IPython's completions do not do the things that + # PyDev's completions will handle + self.add_exec('testComplete_a = 5') + self.add_exec('testComplete_b = 10') + self.add_exec('testComplete_c = 15') + unused_text, matches = self.front_end.complete('testComplete_') + assert len(matches) == 0 + + + def testGetCompletions_1(self): + # Test the merged completions include the standard completions + self.add_exec('testComplete_a = 5') + self.add_exec('testComplete_b = 10') + self.add_exec('testComplete_c = 15') + res = self.front_end.getCompletions('testComplete_', 'testComplete_') + matches = [f[0] for f in res] + assert len(matches) == 3 + eq_(set(['testComplete_a', 'testComplete_b', 'testComplete_c']), set(matches)) + + + def testGetCompletions_2(self): + # Test that we get IPython completions in results + # we do this by checking kw completion which PyDev does + # not do by default + self.add_exec('def ccc(ABC=123): pass') + res = self.front_end.getCompletions('ccc(', '') + matches = [f[0] for f in res] + assert 'ABC=' in matches + + + def testGetCompletions_3(self): + # Test that magics return IPYTHON magic as type + res = self.front_end.getCompletions('%cd', '%cd') + assert len(res) == 1 + eq_(res[0][3], '12') # '12' == IToken.TYPE_IPYTHON_MAGIC + assert len(res[0][1]) > 100, 'docstring for %cd should be a reasonably long string' + +@pytest.mark.skipif(not has_ipython, reason='IPython not available') +class TestRunningCode(TestBase): + + def test_print(self): + self.redirect_stdout() + try: + self.add_exec('print("output")') + eq_(sys.stdout.getvalue(), 'output\n') + finally: + self.restore_stdout() + + + def testQuestionMark_1(self): + self.redirect_stdout() + try: + self.add_exec('?') + assert len(sys.stdout.getvalue()) > 1000, 'IPython help should be pretty big' + finally: + self.restore_stdout() + + + def testQuestionMark_2(self): + self.redirect_stdout() + try: + self.add_exec('int?') + assert sys.stdout.getvalue().find('Convert') != -1 + finally: + self.restore_stdout() + + + + def test_gui(self): + try: + import Tkinter + except: + return + else: + from pydev_ipython.inputhook import get_inputhook + assert get_inputhook() is None + self.add_exec('%gui tk') + # we can't test the GUI works here because we aren't connected to XML-RPC so + # nowhere for hook to run + assert get_inputhook() is not None + self.add_exec('%gui none') + assert get_inputhook() is None + + + def test_history(self): + ''' Make sure commands are added to IPython's history ''' + self.redirect_stdout() + try: + self.add_exec('a=1') + self.add_exec('b=2') + _ih = self.front_end.get_namespace()['_ih'] + eq_(_ih[-1], 'b=2') + eq_(_ih[-2], 'a=1') + + self.add_exec('history') + hist = sys.stdout.getvalue().split('\n') + eq_(hist[-1], '') + eq_(hist[-2], 'history') + eq_(hist[-3], 'b=2') + eq_(hist[-4], 'a=1') + finally: + self.restore_stdout() + + + def test_edit(self): + ''' Make sure we can issue an edit command''' + if os.environ.get('TRAVIS') == 'true': + # This test is too flaky on travis. + return + + from _pydev_bundle.pydev_ipython_console_011 import get_pydev_frontend + + called_RequestInput = [False] + called_IPythonEditor = [False] + def start_client_thread(client_port): + class ClientThread(threading.Thread): + def __init__(self, client_port): + threading.Thread.__init__(self) + self.client_port = client_port + def run(self): + class HandleRequestInput: + def RequestInput(self): + called_RequestInput[0] = True + return '\n' + def IPythonEditor(self, name, line): + called_IPythonEditor[0] = (name, line) + return True + + handle_request_input = HandleRequestInput() + + from _pydev_bundle import pydev_localhost + self.client_server = client_server = SimpleXMLRPCServer( + (pydev_localhost.get_localhost(), self.client_port), logRequests=False) + client_server.register_function(handle_request_input.RequestInput) + client_server.register_function(handle_request_input.IPythonEditor) + client_server.serve_forever() + + def shutdown(self): + return + self.client_server.shutdown() + + client_thread = ClientThread(client_port) + client_thread.setDaemon(True) + client_thread.start() + return client_thread + + # PyDevFrontEnd depends on singleton in IPython, so you + # can't make multiple versions. So we reuse self.front_end for + # all the tests + s = socket.socket() + s.bind(('', 0)) + self.client_port = client_port = s.getsockname()[1] + s.close() + self.front_end = get_pydev_frontend(get_localhost(), client_port) + + client_thread = start_client_thread(self.client_port) + orig_stdin = sys.stdin + sys.stdin = StdIn(self, get_localhost(), self.client_port) + try: + filename = 'made_up_file.py' + self.add_exec('%edit ' + filename) + + for i in xrange(10): + if called_IPythonEditor[0] == (os.path.abspath(filename), '0'): + break + time.sleep(.1) + + if not called_IPythonEditor[0]: + # File "/home/travis/miniconda/lib/python3.3/site-packages/IPython/core/interactiveshell.py", line 2883, in run_code + # exec(code_obj, self.user_global_ns, self.user_ns) + # File "", line 1, in + # get_ipython().magic('edit made_up_file.py') + # File "/home/travis/miniconda/lib/python3.3/site-packages/IPython/core/interactiveshell.py", line 2205, in magic + # return self.run_line_magic(magic_name, magic_arg_s) + # File "/home/travis/miniconda/lib/python3.3/site-packages/IPython/core/interactiveshell.py", line 2126, in run_line_magic + # result = fn(*args,**kwargs) + # File "", line 2, in edit + # File "/home/travis/miniconda/lib/python3.3/site-packages/IPython/core/magic.py", line 193, in + # call = lambda f, *a, **k: f(*a, **k) + # File "/home/travis/miniconda/lib/python3.3/site-packages/IPython/core/magics/code.py", line 662, in edit + # self.shell.hooks.editor(filename,lineno) + # File "/home/travis/build/fabioz/PyDev.Debugger/pydev_ipython_console_011.py", line 70, in call_editor + # server.IPythonEditor(filename, str(line)) + # File "/home/travis/miniconda/lib/python3.3/xmlrpc/client.py", line 1090, in __call__ + # return self.__send(self.__name, args) + # File "/home/travis/miniconda/lib/python3.3/xmlrpc/client.py", line 1419, in __request + # verbose=self.__verbose + # File "/home/travis/miniconda/lib/python3.3/xmlrpc/client.py", line 1132, in request + # return self.single_request(host, handler, request_body, verbose) + # File "/home/travis/miniconda/lib/python3.3/xmlrpc/client.py", line 1143, in single_request + # http_conn = self.send_request(host, handler, request_body, verbose) + # File "/home/travis/miniconda/lib/python3.3/xmlrpc/client.py", line 1255, in send_request + # self.send_content(connection, request_body) + # File "/home/travis/miniconda/lib/python3.3/xmlrpc/client.py", line 1285, in send_content + # connection.endheaders(request_body) + # File "/home/travis/miniconda/lib/python3.3/http/client.py", line 1061, in endheaders + # self._send_output(message_body) + # File "/home/travis/miniconda/lib/python3.3/http/client.py", line 906, in _send_output + # self.send(msg) + # File "/home/travis/miniconda/lib/python3.3/http/client.py", line 844, in send + # self.connect() + # File "/home/travis/miniconda/lib/python3.3/http/client.py", line 822, in connect + # self.timeout, self.source_address) + # File "/home/travis/miniconda/lib/python3.3/socket.py", line 435, in create_connection + # raise err + # File "/home/travis/miniconda/lib/python3.3/socket.py", line 426, in create_connection + # sock.connect(sa) + # ConnectionRefusedError: [Errno 111] Connection refused + + # I.e.: just warn that the test failing, don't actually fail. + sys.stderr.write('Test failed: this test is brittle in travis because sometimes the connection is refused (as above) and we do not have a callback.\n') + return + + eq_(called_IPythonEditor[0], (os.path.abspath(filename), '0')) + assert called_RequestInput[0], "Make sure the 'wait' parameter has been respected" + finally: + sys.stdin = orig_stdin + client_thread.shutdown() + diff --git a/ptvsd/pydevd/tests/test_pydevconsole.py b/ptvsd/pydevd/tests/test_pydevconsole.py new file mode 100644 index 00000000..4b5069b2 --- /dev/null +++ b/ptvsd/pydevd/tests/test_pydevconsole.py @@ -0,0 +1,247 @@ +import threading +import unittest +import sys +import pydevconsole +from _pydev_bundle.pydev_imports import xmlrpclib, SimpleXMLRPCServer +from _pydevd_bundle import pydevd_io + +try: + raw_input + raw_input_name = 'raw_input' +except NameError: + raw_input_name = 'input' + +#======================================================================================================================= +# Test +#======================================================================================================================= +class Test(unittest.TestCase): + + def test_console_hello(self): + self.original_stdout = sys.stdout + sys.stdout = pydevd_io.IOBuf() + try: + sys.stdout.encoding = sys.stdin.encoding + except AttributeError: + # In Python 3 encoding is not writable (whereas in Python 2 it doesn't exist). + pass + + try: + client_port, _server_port = self.get_free_addresses() + client_thread = self.start_client_thread(client_port) #@UnusedVariable + import time + time.sleep(.3) #let's give it some time to start the threads + + from _pydev_bundle import pydev_localhost + interpreter = pydevconsole.InterpreterInterface(pydev_localhost.get_localhost(), client_port, threading.currentThread()) + + (result,) = interpreter.hello("Hello pydevconsole") + self.assertEqual(result, "Hello eclipse") + finally: + sys.stdout = self.original_stdout + + + def test_console_requests(self): + self.original_stdout = sys.stdout + sys.stdout = pydevd_io.IOBuf() + + try: + client_port, _server_port = self.get_free_addresses() + client_thread = self.start_client_thread(client_port) #@UnusedVariable + import time + time.sleep(.3) #let's give it some time to start the threads + + from _pydev_bundle import pydev_localhost + from _pydev_bundle.pydev_console_utils import CodeFragment + + interpreter = pydevconsole.InterpreterInterface(pydev_localhost.get_localhost(), client_port, threading.currentThread()) + sys.stdout = pydevd_io.IOBuf() + interpreter.add_exec(CodeFragment('class Foo:\n CONSTANT=1\n')) + interpreter.add_exec(CodeFragment('foo=Foo()')) + interpreter.add_exec(CodeFragment('foo.__doc__=None')) + interpreter.add_exec(CodeFragment('val = %s()' % (raw_input_name,))) + interpreter.add_exec(CodeFragment('50')) + interpreter.add_exec(CodeFragment('print (val)')) + found = sys.stdout.getvalue().split() + try: + self.assertEqual(['50', 'input_request'], found) + except: + try: + self.assertEqual(['input_request'], found) #IPython + except: + self.assertEqual([u'50', u'input_request'], found[1:]) # IPython 5.1 + self.assertTrue(found[0].startswith(u'Out')) + + comps = interpreter.getCompletions('foo.', 'foo.') + self.assertTrue( + ('CONSTANT', '', '', '3') in comps or ('CONSTANT', '', '', '4') in comps, \ + 'Found: %s' % comps + ) + + comps = interpreter.getCompletions('"".', '"".') + self.assertTrue( + ('__add__', 'x.__add__(y) <==> x+y', '', '3') in comps or + ('__add__', '', '', '4') in comps or + ('__add__', 'x.__add__(y) <==> x+y\r\nx.__add__(y) <==> x+y', '()', '2') in comps or + ('__add__', 'x.\n__add__(y) <==> x+yx.\n__add__(y) <==> x+y', '()', '2'), + 'Did not find __add__ in : %s' % (comps,) + ) + + + completions = interpreter.getCompletions('', '') + for c in completions: + if c[0] == 'AssertionError': + break + else: + self.fail('Could not find AssertionError') + + completions = interpreter.getCompletions('Assert', 'Assert') + for c in completions: + if c[0] == 'RuntimeError': + self.fail('Did not expect to find RuntimeError there') + + self.assertTrue(('__doc__', None, '', '3') not in interpreter.getCompletions('foo.CO', 'foo.')) + + comps = interpreter.getCompletions('va', 'va') + self.assertTrue(('val', '', '', '3') in comps or ('val', '', '', '4') in comps) + + interpreter.add_exec(CodeFragment('s = "mystring"')) + + desc = interpreter.getDescription('val') + self.assertTrue(desc.find('str(object) -> string') >= 0 or + desc == "'input_request'" or + desc.find('str(string[, encoding[, errors]]) -> str') >= 0 or + desc.find('str(Char* value)') >= 0 or + desc.find('str(object=\'\') -> string') >= 0 or + desc.find('str(value: Char*)') >= 0 or + desc.find('str(object=\'\') -> str') >= 0 or + desc.find('The most base type') >= 0 # Jython 2.7 is providing this :P + , + 'Could not find what was needed in %s' % desc) + + desc = interpreter.getDescription('val.join') + self.assertTrue(desc.find('S.join(sequence) -> string') >= 0 or + desc.find('S.join(sequence) -> str') >= 0 or + desc.find('S.join(iterable) -> string') >= 0 or + desc == "" or + desc == "" or + desc.find('str join(str self, list sequence)') >= 0 or + desc.find('S.join(iterable) -> str') >= 0 or + desc.find('join(self: str, sequence: list) -> str') >= 0, + "Could not recognize: %s" % (desc,)) + finally: + sys.stdout = self.original_stdout + + + def start_client_thread(self, client_port): + class ClientThread(threading.Thread): + def __init__(self, client_port): + threading.Thread.__init__(self) + self.client_port = client_port + + def run(self): + class HandleRequestInput: + def RequestInput(self): + client_thread.requested_input = True + return 'input_request' + + def NotifyFinished(self, *args, **kwargs): + client_thread.notified_finished += 1 + return 1 + + handle_request_input = HandleRequestInput() + + from _pydev_bundle import pydev_localhost + client_server = SimpleXMLRPCServer((pydev_localhost.get_localhost(), self.client_port), logRequests=False) + client_server.register_function(handle_request_input.RequestInput) + client_server.register_function(handle_request_input.NotifyFinished) + client_server.serve_forever() + + client_thread = ClientThread(client_port) + client_thread.requested_input = False + client_thread.notified_finished = 0 + client_thread.setDaemon(True) + client_thread.start() + return client_thread + + + def start_debugger_server_thread(self, debugger_port, socket_code): + class DebuggerServerThread(threading.Thread): + def __init__(self, debugger_port, socket_code): + threading.Thread.__init__(self) + self.debugger_port = debugger_port + self.socket_code = socket_code + def run(self): + import socket + s = socket.socket() + s.bind(('', debugger_port)) + s.listen(1) + socket, unused_addr = s.accept() + socket_code(socket) + + debugger_thread = DebuggerServerThread(debugger_port, socket_code) + debugger_thread.setDaemon(True) + debugger_thread.start() + return debugger_thread + + + def get_free_addresses(self): + from _pydev_bundle.pydev_localhost import get_socket_names + socket_names = get_socket_names(2, True) + port0 = socket_names[0][1] + port1 = socket_names[1][1] + + assert port0 != port1 + assert port0 > 0 + assert port1 > 0 + + return port0, port1 + + + def test_server(self): + self.original_stdout = sys.stdout + sys.stdout = pydevd_io.IOBuf() + try: + client_port, server_port = self.get_free_addresses() + class ServerThread(threading.Thread): + def __init__(self, client_port, server_port): + threading.Thread.__init__(self) + self.client_port = client_port + self.server_port = server_port + + def run(self): + from _pydev_bundle import pydev_localhost + pydevconsole.start_server(pydev_localhost.get_localhost(), self.server_port, self.client_port) + server_thread = ServerThread(client_port, server_port) + server_thread.setDaemon(True) + server_thread.start() + + client_thread = self.start_client_thread(client_port) #@UnusedVariable + + import time + time.sleep(.3) #let's give it some time to start the threads + sys.stdout = pydevd_io.IOBuf() + + from _pydev_bundle import pydev_localhost + server = xmlrpclib.Server('http://%s:%s' % (pydev_localhost.get_localhost(), server_port)) + server.execLine('class Foo:') + server.execLine(' pass') + server.execLine('') + server.execLine('foo = Foo()') + server.execLine('a = %s()' % (raw_input_name,)) + server.execLine('print (a)') + initial = time.time() + while not client_thread.requested_input: + if time.time() - initial > 2: + raise AssertionError('Did not get the return asked before the timeout.') + time.sleep(.1) + + found = sys.stdout.getvalue() + while ['input_request'] != found.split(): + found += sys.stdout.getvalue() + if time.time() - initial > 2: + break + time.sleep(.1) + self.assertEqual(['input_request'], found.split()) + finally: + sys.stdout = self.original_stdout + diff --git a/ptvsd/pydevd/tests/test_pyserver.py b/ptvsd/pydevd/tests/test_pyserver.py new file mode 100644 index 00000000..ab0b181b --- /dev/null +++ b/ptvsd/pydevd/tests/test_pyserver.py @@ -0,0 +1,170 @@ +''' +@author Fabio Zadrozny +''' +import sys +import pytest +from _pydev_imps._pydev_saved_modules import thread + +start_new_thread = thread.start_new_thread + + +IS_PYTHON_3_ONWARDS = sys.version_info[0] >= 3 +IS_JYTHON = sys.platform.find('java') != -1 + +try: + import __builtin__ #@UnusedImport + BUILTIN_MOD = '__builtin__' +except ImportError: + BUILTIN_MOD = 'builtins' + + +if not IS_JYTHON: + import pycompletionserver + import socket + if not IS_PYTHON_3_ONWARDS: + from urllib import quote_plus, unquote_plus + def send(s, msg): + s.send(msg) + else: + from urllib.parse import quote_plus, unquote_plus #Python 3.0 + def send(s, msg): + s.send(bytearray(msg, 'utf-8')) + +import unittest + +@pytest.mark.skipif(IS_JYTHON, reason='Not applicable to Jython') +class TestCPython(unittest.TestCase): + + def test_message(self): + t = pycompletionserver.CompletionServer(0) + + l = [] + l.append(('Def', 'description' , 'args')) + l.append(('Def1', 'description1', 'args1')) + l.append(('Def2', 'description2', 'args2')) + + msg = t.processor.format_completion_message(None, l) + + self.assertEqual('@@COMPLETIONS(None,(Def,description,args),(Def1,description1,args1),(Def2,description2,args2))END@@', msg) + l = [] + l.append(('Def', 'desc,,r,,i()ption', '')) + l.append(('Def(1', 'descriptio(n1', '')) + l.append(('De,f)2', 'de,s,c,ription2', '')) + msg = t.processor.format_completion_message(None, l) + self.assertEqual('@@COMPLETIONS(None,(Def,desc%2C%2Cr%2C%2Ci%28%29ption, ),(Def%281,descriptio%28n1, ),(De%2Cf%292,de%2Cs%2Cc%2Cription2, ))END@@', msg) + def create_connections(self, p1=50002): + ''' + Creates the connections needed for testing. + ''' + server = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + server.bind((pycompletionserver.HOST, p1)) + server.listen(1) #socket to receive messages. + + t = pycompletionserver.CompletionServer(p1) + t.exit_process_on_kill = False + start_new_thread(t.run, ()) + + s, addr = server.accept() + + return t, s + + + def read_msg(self): + finish = False + msg = '' + while finish == False: + m = self.socket.recv(1024 * 4) + if IS_PYTHON_3_ONWARDS: + m = m.decode('utf-8') + if m.startswith('@@PROCESSING'): + sys.stdout.write('Status msg: %s\n' % (msg,)) + else: + msg += m + + if msg.find('END@@') != -1: + finish = True + + return msg + + def test_completion_sockets_and_messages(self): + t, socket = self.create_connections() + self.socket = socket + + try: + #now that we have the connections all set up, check the code completion messages. + msg = quote_plus('math') + send(socket, '@@IMPORTS:%sEND@@' % msg) #math completions + completions = self.read_msg() + #print_ unquote_plus(completions) + + #math is a builtin and because of that, it starts with None as a file + start = '@@COMPLETIONS(None,(__doc__,' + start_2 = '@@COMPLETIONS(None,(__name__,' + if '/math.so,' in completions or\ + '/math.cpython-33m.so,' in completions or \ + '/math.cpython-34m.so,' in completions or \ + 'math.cpython-35m' in completions or \ + 'math.cpython-36m' in completions: + return + self.assertTrue(completions.startswith(start) or completions.startswith(start_2), '%s DOESNT START WITH %s' % (completions, (start, start_2))) + + self.assertTrue('@@COMPLETIONS' in completions) + self.assertTrue('END@@' in completions) + + + #now, test i + msg = quote_plus('%s.list' % BUILTIN_MOD) + send(socket, "@@IMPORTS:%s\nEND@@" % msg) + found = self.read_msg() + self.assertTrue('sort' in found, 'Could not find sort in: %s' % (found,)) + + #now, test search + msg = quote_plus('inspect.ismodule') + send(socket, '@@SEARCH%sEND@@' % msg) #math completions + found = self.read_msg() + self.assertTrue('inspect.py' in found) + for i in range(33, 100): + if str(i) in found: + break + else: + self.fail('Could not find the ismodule line in %s' % (found,)) + + #now, test search + msg = quote_plus('inspect.CO_NEWLOCALS') + send(socket, '@@SEARCH%sEND@@' % msg) #math completions + found = self.read_msg() + self.assertTrue('inspect.py' in found) + self.assertTrue('CO_NEWLOCALS' in found) + + #now, test search + msg = quote_plus('inspect.BlockFinder.tokeneater') + send(socket, '@@SEARCH%sEND@@' % msg) + found = self.read_msg() + self.assertTrue('inspect.py' in found) +# self.assertTrue('CO_NEWLOCALS' in found) + + #reload modules test +# send(socket, '@@RELOAD_MODULES_END@@') +# ok = self.read_msg() +# self.assertEqual('@@MSG_OK_END@@' , ok) +# this test is not executed because it breaks our current enviroment. + + finally: + try: + sys.stdout.write('succedded...sending kill msg\n') + self.send_kill_msg(socket) + + +# while not hasattr(t, 'ended'): +# pass #wait until it receives the message and quits. + + + socket.close() + self.socket.close() + except: + pass + + def send_kill_msg(self, socket): + socket.send(pycompletionserver.MSG_KILL_SERVER) + + diff --git a/ptvsd/pydevd/tests/test_simpleTipper.py b/ptvsd/pydevd/tests/test_simpleTipper.py new file mode 100644 index 00000000..8ea86060 --- /dev/null +++ b/ptvsd/pydevd/tests/test_simpleTipper.py @@ -0,0 +1,202 @@ +''' +@author Fabio Zadrozny +''' +from _pydev_bundle import _pydev_imports_tipper +import inspect +import pytest +import sys +import unittest + +try: + import __builtin__ #@UnusedImport + BUILTIN_MOD = '__builtin__' +except ImportError: + BUILTIN_MOD = 'builtins' + + +IS_JYTHON = sys.platform.find('java') != -1 + +HAS_WX = False + + +@pytest.mark.skipif(IS_JYTHON, reason='CPython related test') +class TestCPython(unittest.TestCase): + + def p(self, t): + for a in t: + sys.stdout.write('%s\n' % (a,)) + + def test_imports3(self): + tip = _pydev_imports_tipper.generate_tip('os') + ret = self.assert_in('path', tip) + self.assertEqual('', ret[2]) + + def test_imports2(self): + try: + tip = _pydev_imports_tipper.generate_tip('OpenGL.GLUT') + self.assert_in('glutDisplayFunc', tip) + self.assert_in('glutInitDisplayMode', tip) + except ImportError: + pass + + def test_imports4(self): + try: + tip = _pydev_imports_tipper.generate_tip('mx.DateTime.mxDateTime.mxDateTime') + self.assert_in('now', tip) + except ImportError: + pass + + def test_imports5(self): + tip = _pydev_imports_tipper.generate_tip('%s.list' % BUILTIN_MOD) + s = self.assert_in('sort', tip) + self.check_args( + s, + '(cmp=None, key=None, reverse=False)', + '(self, object cmp, object key, bool reverse)', + '(self, cmp: object, key: object, reverse: bool)', + '(key=None, reverse=False)', + ) + + def test_imports2a(self): + tips = _pydev_imports_tipper.generate_tip('%s.RuntimeError' % BUILTIN_MOD) + self.assert_in('__doc__', tips) + + def test_imports2b(self): + try: + file + except: + pass + else: + tips = _pydev_imports_tipper.generate_tip('%s' % BUILTIN_MOD) + t = self.assert_in('file' , tips) + self.assertTrue('->' in t[1].strip() or 'file' in t[1]) + + def test_imports2c(self): + try: + file # file is not available on py 3 + except: + pass + else: + tips = _pydev_imports_tipper.generate_tip('%s.file' % BUILTIN_MOD) + t = self.assert_in('readlines' , tips) + self.assertTrue('->' in t[1] or 'sizehint' in t[1]) + + def test_imports(self): + ''' + You can print_ the results to check... + ''' + if HAS_WX: + tip = _pydev_imports_tipper.generate_tip('wxPython.wx') + self.assert_in('wxApp' , tip) + + tip = _pydev_imports_tipper.generate_tip('wxPython.wx.wxApp') + + try: + tip = _pydev_imports_tipper.generate_tip('qt') + self.assert_in('QWidget' , tip) + self.assert_in('QDialog' , tip) + + tip = _pydev_imports_tipper.generate_tip('qt.QWidget') + self.assert_in('rect' , tip) + self.assert_in('rect' , tip) + self.assert_in('AltButton' , tip) + + tip = _pydev_imports_tipper.generate_tip('qt.QWidget.AltButton') + self.assert_in('__xor__' , tip) + + tip = _pydev_imports_tipper.generate_tip('qt.QWidget.AltButton.__xor__') + self.assert_in('__class__' , tip) + except ImportError: + pass + + tip = _pydev_imports_tipper.generate_tip(BUILTIN_MOD) +# for t in tip[1]: +# print_ t + self.assert_in('object' , tip) + self.assert_in('tuple' , tip) + self.assert_in('list' , tip) + self.assert_in('RuntimeError' , tip) + self.assert_in('RuntimeWarning' , tip) + + # Remove cmp as it's not available on py 3 + #t = self.assert_in('cmp' , tip) + #self.check_args(t, '(x, y)', '(object x, object y)', '(x: object, y: object)') #args + + t = self.assert_in('isinstance' , tip) + self.check_args(t, '(object, class_or_type_or_tuple)', '(object o, type typeinfo)', '(o: object, typeinfo: type)', '(obj, class_or_tuple)') #args + + t = self.assert_in('compile' , tip) + self.check_args(t, '(source, filename, mode)', '()', '(o: object, name: str, val: object)', '(source, filename, mode, flags, dont_inherit, optimize)') #args + + t = self.assert_in('setattr' , tip) + self.check_args(t, '(object, name, value)', '(object o, str name, object val)', '(o: object, name: str, val: object)', '(obj, name, value)') #args + + try: + import compiler + compiler_module = 'compiler' + except ImportError: + try: + import ast + compiler_module = 'ast' + except ImportError: + compiler_module = None + + if compiler_module is not None: #Not available in iron python + tip = _pydev_imports_tipper.generate_tip(compiler_module) + if compiler_module == 'compiler': + self.assert_args('parse', '(buf, mode)', tip) + self.assert_args('walk', '(tree, visitor, walker, verbose)', tip) + self.assert_in('parseFile' , tip) + else: + self.assert_args('parse', '(source, filename, mode)', tip) + self.assert_args('walk', '(node)', tip) + self.assert_in('parse' , tip) + + + def check_args(self, t, *expected): + for x in expected: + if x == t[2]: + return + self.fail('Found: %s. Expected: %s' % (t[2], expected)) + + + def assert_args(self, tok, args, tips): + for a in tips[1]: + if tok == a[0]: + self.assertEqual(args, a[2]) + return + raise AssertionError('%s not in %s', tok, tips) + + def assert_in(self, tok, tips): + for a in tips[1]: + if tok == a[0]: + return a + raise AssertionError('%s not in %s' % (tok, tips)) + + + def test_search(self): + s = _pydev_imports_tipper.search_definition('inspect.ismodule') + (f, line, col), foundAs = s + self.assertTrue(line > 0) + + + def test_dot_net_libraries(self): + if sys.platform == 'cli': + tip = _pydev_imports_tipper.generate_tip('System.Drawing') + self.assert_in('Brushes' , tip) + + tip = _pydev_imports_tipper.generate_tip('System.Drawing.Brushes') + self.assert_in('Aqua' , tip) + + + def test_inspect(self): + + class C(object): + def metA(self, a, b): + pass + + obj = C.metA + if inspect.ismethod (obj): + pass +# print_ obj.im_func +# print_ inspect.getargspec(obj.im_func) diff --git a/ptvsd/pydevd/tests_mainloop/README b/ptvsd/pydevd/tests_mainloop/README new file mode 100644 index 00000000..65e699b9 --- /dev/null +++ b/ptvsd/pydevd/tests_mainloop/README @@ -0,0 +1,4 @@ +# Parts of IPython, files from: https://github.com/ipython/ipython/tree/rel-1.0.0/examples/lib +# The files in this folder are manual tests for main loop integration + +# These tests have been modified to work in the PyDev Console context diff --git a/ptvsd/pydevd/tests_mainloop/__init__.py b/ptvsd/pydevd/tests_mainloop/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/ptvsd/pydevd/tests_mainloop/gui-glut.py b/ptvsd/pydevd/tests_mainloop/gui-glut.py new file mode 100644 index 00000000..34a16b45 --- /dev/null +++ b/ptvsd/pydevd/tests_mainloop/gui-glut.py @@ -0,0 +1,52 @@ +#!/usr/bin/env python +"""Simple GLUT example to manually test event loop integration. + +To run this: +1) Enable the PyDev GUI event loop integration for glut +2) do an execfile on this script +3) ensure you have a working GUI simultaneously with an + interactive console +4) run: gl.glClearColor(1,1,1,1) +""" + +if __name__ == '__main__': + + #!/usr/bin/env python + import sys + import OpenGL.GL as gl + import OpenGL.GLUT as glut + + def close(): + glut.glutDestroyWindow(glut.glutGetWindow()) + + def display(): + gl.glClear (gl.GL_COLOR_BUFFER_BIT | gl.GL_DEPTH_BUFFER_BIT) + glut.glutSwapBuffers() + + def resize(width,height): + gl.glViewport(0, 0, width, height+4) + gl.glMatrixMode(gl.GL_PROJECTION) + gl.glLoadIdentity() + gl.glOrtho(0, width, 0, height+4, -1, 1) + gl.glMatrixMode(gl.GL_MODELVIEW) + + if glut.glutGetWindow() > 0: + interactive = True + glut.glutInit(sys.argv) + glut.glutInitDisplayMode(glut.GLUT_DOUBLE | + glut.GLUT_RGBA | + glut.GLUT_DEPTH) + else: + interactive = False + + glut.glutCreateWindow('gui-glut') + glut.glutDisplayFunc(display) + glut.glutReshapeFunc(resize) + # This is necessary on osx to be able to close the window + # (else the close button is disabled) + if sys.platform == 'darwin' and not bool(glut.HAVE_FREEGLUT): + glut.glutWMCloseFunc(close) + gl.glClearColor(0,0,0,1) + + if not interactive: + glut.glutMainLoop() diff --git a/ptvsd/pydevd/tests_mainloop/gui-gtk.py b/ptvsd/pydevd/tests_mainloop/gui-gtk.py new file mode 100644 index 00000000..6df5c782 --- /dev/null +++ b/ptvsd/pydevd/tests_mainloop/gui-gtk.py @@ -0,0 +1,35 @@ +#!/usr/bin/env python +"""Simple GTK example to manually test event loop integration. + +To run this: +1) Enable the PyDev GUI event loop integration for gtk +2) do an execfile on this script +3) ensure you have a working GUI simultaneously with an + interactive console +""" + +if __name__ == '__main__': + import pygtk + pygtk.require('2.0') + import gtk + + + def hello_world(wigdet, data=None): + print("Hello World") + + def delete_event(widget, event, data=None): + return False + + def destroy(widget, data=None): + gtk.main_quit() + + window = gtk.Window(gtk.WINDOW_TOPLEVEL) + window.connect("delete_event", delete_event) + window.connect("destroy", destroy) + button = gtk.Button("Hello World") + button.connect("clicked", hello_world, None) + + window.add(button) + button.show() + window.show() + diff --git a/ptvsd/pydevd/tests_mainloop/gui-gtk3.py b/ptvsd/pydevd/tests_mainloop/gui-gtk3.py new file mode 100644 index 00000000..6351d523 --- /dev/null +++ b/ptvsd/pydevd/tests_mainloop/gui-gtk3.py @@ -0,0 +1,33 @@ +#!/usr/bin/env python +"""Simple Gtk example to manually test event loop integration. + +To run this: +1) Enable the PyDev GUI event loop integration for gtk3 +2) do an execfile on this script +3) ensure you have a working GUI simultaneously with an + interactive console +""" + +if __name__ == '__main__': + from gi.repository import Gtk + + + def hello_world(wigdet, data=None): + print("Hello World") + + def delete_event(widget, event, data=None): + return False + + def destroy(widget, data=None): + Gtk.main_quit() + + window = Gtk.Window(Gtk.WindowType.TOPLEVEL) + window.connect("delete_event", delete_event) + window.connect("destroy", destroy) + button = Gtk.Button("Hello World") + button.connect("clicked", hello_world, None) + + window.add(button) + button.show() + window.show() + diff --git a/ptvsd/pydevd/tests_mainloop/gui-pyglet.py b/ptvsd/pydevd/tests_mainloop/gui-pyglet.py new file mode 100644 index 00000000..70f1a7f6 --- /dev/null +++ b/ptvsd/pydevd/tests_mainloop/gui-pyglet.py @@ -0,0 +1,28 @@ +#!/usr/bin/env python +"""Simple pyglet example to manually test event loop integration. + +To run this: +1) Enable the PyDev GUI event loop integration for pyglet +2) do an execfile on this script +3) ensure you have a working GUI simultaneously with an + interactive console +""" + +if __name__ == '__main__': + import pyglet + + + window = pyglet.window.Window() + label = pyglet.text.Label('Hello, world', + font_name='Times New Roman', + font_size=36, + x=window.width//2, y=window.height//2, + anchor_x='center', anchor_y='center') + @window.event + def on_close(): + window.close() + + @window.event + def on_draw(): + window.clear() + label.draw() diff --git a/ptvsd/pydevd/tests_mainloop/gui-qt.py b/ptvsd/pydevd/tests_mainloop/gui-qt.py new file mode 100644 index 00000000..30fc48d3 --- /dev/null +++ b/ptvsd/pydevd/tests_mainloop/gui-qt.py @@ -0,0 +1,36 @@ +#!/usr/bin/env python +"""Simple Qt4 example to manually test event loop integration. + +To run this: +1) Enable the PyDev GUI event loop integration for qt +2) do an execfile on this script +3) ensure you have a working GUI simultaneously with an + interactive console + +Ref: Modified from http://zetcode.com/tutorials/pyqt4/firstprograms/ +""" + +if __name__ == '__main__': + import sys + from PyQt4 import QtGui, QtCore + + class SimpleWindow(QtGui.QWidget): + def __init__(self, parent=None): + QtGui.QWidget.__init__(self, parent) + + self.setGeometry(300, 300, 200, 80) + self.setWindowTitle('Hello World') + + quit = QtGui.QPushButton('Close', self) + quit.setGeometry(10, 10, 60, 35) + + self.connect(quit, QtCore.SIGNAL('clicked()'), + self, QtCore.SLOT('close()')) + + if __name__ == '__main__': + app = QtCore.QCoreApplication.instance() + if app is None: + app = QtGui.QApplication([]) + + sw = SimpleWindow() + sw.show() diff --git a/ptvsd/pydevd/tests_mainloop/gui-tk.py b/ptvsd/pydevd/tests_mainloop/gui-tk.py new file mode 100644 index 00000000..4cef45f9 --- /dev/null +++ b/ptvsd/pydevd/tests_mainloop/gui-tk.py @@ -0,0 +1,33 @@ +#!/usr/bin/env python +"""Simple Tk example to manually test event loop integration. + +To run this: +1) Enable the PyDev GUI event loop integration for tk +2) do an execfile on this script +3) ensure you have a working GUI simultaneously with an + interactive console +""" + +if __name__ == '__main__': + + try: + from Tkinter import * + except: + # Python 3 + from tkinter import * + + class MyApp: + + def __init__(self, root): + frame = Frame(root) + frame.pack() + + self.button = Button(frame, text="Hello", command=self.hello_world) + self.button.pack(side=LEFT) + + def hello_world(self): + print("Hello World!") + + root = Tk() + + app = MyApp(root) diff --git a/ptvsd/pydevd/tests_mainloop/gui-wx.py b/ptvsd/pydevd/tests_mainloop/gui-wx.py new file mode 100644 index 00000000..dfd35d84 --- /dev/null +++ b/ptvsd/pydevd/tests_mainloop/gui-wx.py @@ -0,0 +1,103 @@ +#!/usr/bin/env python +""" +A Simple wx example to test PyDev's event loop integration. + +To run this: +1) Enable the PyDev GUI event loop integration for wx +2) do an execfile on this script +3) ensure you have a working GUI simultaneously with an + interactive console + +Ref: Modified from wxPython source code wxPython/samples/simple/simple.py +""" + +if __name__ == '__main__': + + import wx + + + class MyFrame(wx.Frame): + """ + This is MyFrame. It just shows a few controls on a wxPanel, + and has a simple menu. + """ + def __init__(self, parent, title): + wx.Frame.__init__(self, parent, -1, title, + pos=(150, 150), size=(350, 200)) + + # Create the menubar + menuBar = wx.MenuBar() + + # and a menu + menu = wx.Menu() + + # add an item to the menu, using \tKeyName automatically + # creates an accelerator, the third param is some help text + # that will show up in the statusbar + menu.Append(wx.ID_EXIT, "E&xit\tAlt-X", "Exit this simple sample") + + # bind the menu event to an event handler + self.Bind(wx.EVT_MENU, self.on_time_to_close, id=wx.ID_EXIT) + + # and put the menu on the menubar + menuBar.Append(menu, "&File") + self.SetMenuBar(menuBar) + + self.CreateStatusBar() + + # Now create the Panel to put the other controls on. + panel = wx.Panel(self) + + # and a few controls + text = wx.StaticText(panel, -1, "Hello World!") + text.SetFont(wx.Font(14, wx.SWISS, wx.NORMAL, wx.BOLD)) + text.SetSize(text.GetBestSize()) + btn = wx.Button(panel, -1, "Close") + funbtn = wx.Button(panel, -1, "Just for fun...") + + # bind the button events to handlers + self.Bind(wx.EVT_BUTTON, self.on_time_to_close, btn) + self.Bind(wx.EVT_BUTTON, self.on_fun_button, funbtn) + + # Use a sizer to layout the controls, stacked vertically and with + # a 10 pixel border around each + sizer = wx.BoxSizer(wx.VERTICAL) + sizer.Add(text, 0, wx.ALL, 10) + sizer.Add(btn, 0, wx.ALL, 10) + sizer.Add(funbtn, 0, wx.ALL, 10) + panel.SetSizer(sizer) + panel.Layout() + + + def on_time_to_close(self, evt): + """Event handler for the button click.""" + print("See ya later!") + self.Close() + + def on_fun_button(self, evt): + """Event handler for the button click.""" + print("Having fun yet?") + + + class MyApp(wx.App): + def OnInit(self): + frame = MyFrame(None, "Simple wxPython App") + self.SetTopWindow(frame) + + print("Print statements go to this stdout window by default.") + + frame.Show(True) + return True + + + if __name__ == '__main__': + + app = wx.GetApp() + if app is None: + app = MyApp(redirect=False, clearSigInt=False) + else: + frame = MyFrame(None, "Simple wxPython App") + app.SetTopWindow(frame) + print("Print statements go to this stdout window by default.") + frame.Show(True) + diff --git a/ptvsd/pydevd/tests_python/__init__.py b/ptvsd/pydevd/tests_python/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/ptvsd/pydevd/tests_python/_bytecode_many_names_example.py b/ptvsd/pydevd/tests_python/_bytecode_many_names_example.py new file mode 100644 index 00000000..6221725a --- /dev/null +++ b/ptvsd/pydevd/tests_python/_bytecode_many_names_example.py @@ -0,0 +1,268 @@ + + +def foo(): + a0 = 1 + a1 = 1 + a2 = 1 + a3 = 1 + a4 = 1 + a5 = 1 + a6 = 1 + a7 = 1 + a8 = 1 + a9 = 1 + a10 = 1 + a11 = 1 + a12 = 1 + a13 = 1 + a14 = 1 + a15 = 1 + a16 = 1 + a17 = 1 + a18 = 1 + a19 = 1 + a20 = 1 + a21 = 1 + a22 = 1 + a23 = 1 + a24 = 1 + a25 = 1 + a26 = 1 + a27 = 1 + a28 = 1 + a29 = 1 + a30 = 1 + a31 = 1 + a32 = 1 + a33 = 1 + a34 = 1 + a35 = 1 + a36 = 1 + a37 = 1 + a38 = 1 + a39 = 1 + a40 = 1 + a41 = 1 + a42 = 1 + a43 = 1 + a44 = 1 + a45 = 1 + a46 = 1 + a47 = 1 + a48 = 1 + a49 = 1 + a50 = 1 + a51 = 1 + a52 = 1 + a53 = 1 + a54 = 1 + a55 = 1 + a56 = 1 + a57 = 1 + a58 = 1 + a59 = 1 + a60 = 1 + a61 = 1 + a62 = 1 + a63 = 1 + a64 = 1 + a65 = 1 + a66 = 1 + a67 = 1 + a68 = 1 + a69 = 1 + a70 = 1 + a71 = 1 + a72 = 1 + a73 = 1 + a74 = 1 + a75 = 1 + a76 = 1 + a77 = 1 + a78 = 1 + a79 = 1 + a80 = 1 + a81 = 1 + a82 = 1 + a83 = 1 + a84 = 1 + a85 = 1 + a86 = 1 + a87 = 1 + a88 = 1 + a89 = 1 + a90 = 1 + a91 = 1 + a92 = 1 + a93 = 1 + a94 = 1 + a95 = 1 + a96 = 1 + a97 = 1 + a98 = 1 + a99 = 1 + a100 = 1 + a101 = 1 + a102 = 1 + a103 = 1 + a104 = 1 + a105 = 1 + a106 = 1 + a107 = 1 + a108 = 1 + a109 = 1 + a110 = 1 + a111 = 1 + a112 = 1 + a113 = 1 + a114 = 1 + a115 = 1 + a116 = 1 + a117 = 1 + a118 = 1 + a119 = 1 + a120 = 1 + a121 = 1 + a122 = 1 + a123 = 1 + a124 = 1 + a125 = 1 + a126 = 1 + a127 = 1 + a128 = 1 + a129 = 1 + a130 = 1 + a131 = 1 + a132 = 1 + a133 = 1 + a134 = 1 + a135 = 1 + a136 = 1 + a137 = 1 + a138 = 1 + a139 = 1 + a140 = 1 + a141 = 1 + a142 = 1 + a143 = 1 + a144 = 1 + a145 = 1 + a146 = 1 + a147 = 1 + a148 = 1 + a149 = 1 + a150 = 1 + a151 = 1 + a152 = 1 + a153 = 1 + a154 = 1 + a155 = 1 + a156 = 1 + a157 = 1 + a158 = 1 + a159 = 1 + a160 = 1 + a161 = 1 + a162 = 1 + a163 = 1 + a164 = 1 + a165 = 1 + a166 = 1 + a167 = 1 + a168 = 1 + a169 = 1 + a170 = 1 + a171 = 1 + a172 = 1 + a173 = 1 + a174 = 1 + a175 = 1 + a176 = 1 + a177 = 1 + a178 = 1 + a179 = 1 + a180 = 1 + a181 = 1 + a182 = 1 + a183 = 1 + a184 = 1 + a185 = 1 + a186 = 1 + a187 = 1 + a188 = 1 + a189 = 1 + a190 = 1 + a191 = 1 + a192 = 1 + a193 = 1 + a194 = 1 + a195 = 1 + a196 = 1 + a197 = 1 + a198 = 1 + a199 = 1 + a200 = 1 + a201 = 1 + a202 = 1 + a203 = 1 + a204 = 1 + a205 = 1 + a206 = 1 + a207 = 1 + a208 = 1 + a209 = 1 + a210 = 1 + a211 = 1 + a212 = 1 + a213 = 1 + a214 = 1 + a215 = 1 + a216 = 1 + a217 = 1 + a218 = 1 + a219 = 1 + a220 = 1 + a221 = 1 + a222 = 1 + a223 = 1 + a224 = 1 + a225 = 1 + a226 = 1 + a227 = 1 + a228 = 1 + a229 = 1 + a230 = 1 + a231 = 1 + a232 = 1 + a233 = 1 + a234 = 1 + a235 = 1 + a236 = 1 + a237 = 1 + a238 = 1 + a239 = 1 + a240 = 1 + a241 = 1 + a242 = 1 + a243 = 1 + a244 = 1 + a245 = 1 + a246 = 1 + a247 = 1 + a248 = 1 + a249 = 1 + a250 = 1 + a251 = 1 + a252 = 1 + a253 = 1 + a254 = 1 + a255 = 1 + a256 = 1 + a257 = 1 + a258 = 1 + a259 = 1 + b = a1 + a2 + a260 = 1 + a261 = 1 + return b + diff --git a/ptvsd/pydevd/tests_python/_bytecode_overflow_example.py b/ptvsd/pydevd/tests_python/_bytecode_overflow_example.py new file mode 100644 index 00000000..84f126df --- /dev/null +++ b/ptvsd/pydevd/tests_python/_bytecode_overflow_example.py @@ -0,0 +1,98 @@ +import re + +en_lang_symbols = r'[^\w!@#$%\^-_+=|\}{][\"\';:?\/><.,&)(*\s`\u2019]' +en_words_basic = [] +en_words = [] + +TRACE_MESSAGE = "Trace called" + + +def tracing(): + print(TRACE_MESSAGE) + + +def call_tracing(): + tracing() + + +class Dummy: + non_en_words_limit = 3 + + @staticmethod + def fun(text): + words = tuple(w[0].lower() for w in re.finditer(r'[a-zA-Z]+', text)) + non_en_pass = [] + for i, word in enumerate(words): + non_en = [] + if not (word in en_words_basic + or (word.endswith('s') and word[:-1] in en_words_basic) + or (word.endswith('ed') and word[:-2] in en_words_basic) + or (word.endswith('ing') and word[:-3] in en_words_basic) + or word in en_words + or (word.endswith('s') and word[:-1] in en_words) + or (word.endswith('ed') and word[:-2] in en_words) + or (word.endswith('ing') and word[:-3] in en_words) + ): + + non_en.append(word) + non_en_pass.append(word) + for j in range(1, Dummy.non_en_words_limit): + if i + j >= len(words): + break + word = words[i + j] + + if (word in en_words_basic + or (word.endswith('s') and word[:-1] in en_words_basic) + or (word.endswith('ed') and word[:-2] in en_words_basic) + or (word.endswith('ing') and word[:-3] in en_words_basic) + or word in en_words + or (word.endswith('s') and word[:-1] in en_words) + or (word.endswith('ed') and word[:-2] in en_words) + or (word.endswith('ing') and word[:-3] in en_words) + ): + break + else: + non_en.append(word) + non_en_pass.append(word) + + +class DummyTracing: + non_en_words_limit = 3 + + @staticmethod + def fun(text): + words = tuple(w[0].lower() for w in re.finditer(r'[a-zA-Z]+', text)) + tracing() + non_en_pass = [] + for i, word in enumerate(words): + non_en = [] + if not (word in en_words_basic + or (word.endswith('s') and word[:-1] in en_words_basic) + or (word.endswith('ed') and word[:-2] in en_words_basic) + or (word.endswith('ing') and word[:-3] in en_words_basic) + or word in en_words + or (word.endswith('s') and word[:-1] in en_words) + or (word.endswith('ed') and word[:-2] in en_words) + or (word.endswith('ing') and word[:-3] in en_words) + ): + + non_en.append(word) + non_en_pass.append(word) + for j in range(1, Dummy.non_en_words_limit): + if i + j >= len(words): + break + word = words[i + j] + if (word in en_words_basic + or (word.endswith('s') and word[:-1] in en_words_basic) + or (word.endswith('ed') and word[:-2] in en_words_basic) + or (word.endswith('ing') and word[:-3] in en_words_basic) + or word in en_words + or (word.endswith('s') and word[:-1] in en_words) + or (word.endswith('ed') and word[:-2] in en_words) + or (word.endswith('ing') and word[:-3] in en_words) + ): + break + else: + non_en.append(word) + non_en_pass.append(word) + diff --git a/ptvsd/pydevd/tests_python/_debugger_case1.py b/ptvsd/pydevd/tests_python/_debugger_case1.py new file mode 100644 index 00000000..7ef80626 --- /dev/null +++ b/ptvsd/pydevd/tests_python/_debugger_case1.py @@ -0,0 +1,61 @@ +import sys +import weakref + +def set_up(): + observable = Observable() + observer = Observer() + observable.add_observer(observer) + return observable + + +class Observable(object): + def __init__(self): + self.observers = [] + + def add_observer(self, observer): + sys.stdout.write( 'observer %s\n' % (observer,)) + ref = weakref.ref(observer) + self.observers.append(ref) + sys.stdout.write('weakref: %s\n' % (ref(),)) + + def Notify(self): + for o in self.observers: + o = o() + + + try: + import gc + except ImportError: + o = None #some jython does not have gc, so, there's no sense testing this in it + else: + try: + gc.get_referrers(o) + except: + o = None #jython and ironpython do not have get_referrers + + if o is not None: + sys.stdout.write('still observing %s\n' % (o,)) + sys.stdout.write('number of referrers: %s\n' % len(gc.get_referrers(o))) + frame = gc.get_referrers(o)[0] + frame_referrers = gc.get_referrers(frame) + sys.stdout.write('frame referrer %s\n' % (frame_referrers,)) + referrers1 = gc.get_referrers(frame_referrers[1]) + sys.stdout.write('%s\n' % (referrers1,)) + sys.stderr.write('TEST FAILED: The observer should have died, even when running in debug\n') + else: + sys.stdout.write('TEST SUCEEDED: observer died\n') + + sys.stdout.flush() + sys.stderr.flush() + +class Observer(object): + pass + + +def main(): + observable = set_up() + observable.Notify() + + +if __name__ == '__main__': + main() diff --git a/ptvsd/pydevd/tests_python/_debugger_case10.py b/ptvsd/pydevd/tests_python/_debugger_case10.py new file mode 100644 index 00000000..323dedaa --- /dev/null +++ b/ptvsd/pydevd/tests_python/_debugger_case10.py @@ -0,0 +1,18 @@ +def Method1(): + print('m1') + print('m1') + +def Method1a(): + print('m1a') + print('m1a') + +def Method2(): + print('m2 before') + Method1() + Method1a() + print('m2 after') + + +if __name__ == '__main__': + Method2() + print('TEST SUCEEDED!') diff --git a/ptvsd/pydevd/tests_python/_debugger_case13.py b/ptvsd/pydevd/tests_python/_debugger_case13.py new file mode 100644 index 00000000..dbdbbd4c --- /dev/null +++ b/ptvsd/pydevd/tests_python/_debugger_case13.py @@ -0,0 +1,43 @@ + +class TestProperty(object): + def __init__(self, name = "Default"): + self._x = None + self.name = name + + def get_name(self): + return self.__name + + + def set_name(self, value): + self.__name = value + + + def del_name(self): + del self.__name + name = property(get_name, set_name, del_name, "name's docstring") + + @property + def x(self): + return self._x + + @x.setter + def x(self, value): + self._x = value + + @x.deleter + def x(self): + del self._x + +def main(): + """ + """ + testObj = TestProperty() + testObj.x = 10 + val = testObj.x + + testObj.name = "Pydev" + debugType = testObj.name + print('TEST SUCEEDED!') + +if __name__ == '__main__': + main() \ No newline at end of file diff --git a/ptvsd/pydevd/tests_python/_debugger_case14.py b/ptvsd/pydevd/tests_python/_debugger_case14.py new file mode 100644 index 00000000..2a5e181b --- /dev/null +++ b/ptvsd/pydevd/tests_python/_debugger_case14.py @@ -0,0 +1,29 @@ + +class Car(object): + """A car class""" + def __init__(self, model, make, color): + self.model = model + self.make = make + self.color = color + self.price = None + + def get_price(self): + return self.price + + def set_price(self, value): + self.price = value + +availableCars = [] +def main(): + global availableCars + + #Create a new car obj + carObj = Car("Maruti SX4", "2011", "Black") + carObj.set_price(950000) # Set price + # Add this to available cars + availableCars.append(carObj) + + print('TEST SUCEEDED') + +if __name__ == '__main__': + main() diff --git a/ptvsd/pydevd/tests_python/_debugger_case15.py b/ptvsd/pydevd/tests_python/_debugger_case15.py new file mode 100644 index 00000000..2a5e181b --- /dev/null +++ b/ptvsd/pydevd/tests_python/_debugger_case15.py @@ -0,0 +1,29 @@ + +class Car(object): + """A car class""" + def __init__(self, model, make, color): + self.model = model + self.make = make + self.color = color + self.price = None + + def get_price(self): + return self.price + + def set_price(self, value): + self.price = value + +availableCars = [] +def main(): + global availableCars + + #Create a new car obj + carObj = Car("Maruti SX4", "2011", "Black") + carObj.set_price(950000) # Set price + # Add this to available cars + availableCars.append(carObj) + + print('TEST SUCEEDED') + +if __name__ == '__main__': + main() diff --git a/ptvsd/pydevd/tests_python/_debugger_case15_execfile.py b/ptvsd/pydevd/tests_python/_debugger_case15_execfile.py new file mode 100644 index 00000000..7123209a --- /dev/null +++ b/ptvsd/pydevd/tests_python/_debugger_case15_execfile.py @@ -0,0 +1 @@ +f=lambda x: 'val=%s' % x diff --git a/ptvsd/pydevd/tests_python/_debugger_case16.py b/ptvsd/pydevd/tests_python/_debugger_case16.py new file mode 100644 index 00000000..5622813a --- /dev/null +++ b/ptvsd/pydevd/tests_python/_debugger_case16.py @@ -0,0 +1,12 @@ +# this test requires numpy to be installed +import numpy + +def main(): + smallarray = numpy.arange(100) * 1 + 1j + bigarray = numpy.arange(100000).reshape((10,10000)) # 100 thousand + hugearray = numpy.arange(10000000) # 10 million + + pass # location of breakpoint after all arrays defined + +main() +print('TEST SUCEEDED') diff --git a/ptvsd/pydevd/tests_python/_debugger_case17.py b/ptvsd/pydevd/tests_python/_debugger_case17.py new file mode 100644 index 00000000..0177683c --- /dev/null +++ b/ptvsd/pydevd/tests_python/_debugger_case17.py @@ -0,0 +1,38 @@ +def get_here(): + a = 10 + +def foo(func): + return func + +def m1(): # @DontTrace + get_here() + +# @DontTrace +def m2(): + get_here() + +# @DontTrace +@foo +def m3(): + get_here() + +@foo +@foo +def m4(): # @DontTrace + get_here() + + +def main(): + + m1() + + m2() + + m3() + + m4() + +if __name__ == '__main__': + main() + + print('TEST SUCEEDED') diff --git a/ptvsd/pydevd/tests_python/_debugger_case17a.py b/ptvsd/pydevd/tests_python/_debugger_case17a.py new file mode 100644 index 00000000..fa3ea0ef --- /dev/null +++ b/ptvsd/pydevd/tests_python/_debugger_case17a.py @@ -0,0 +1,15 @@ +def m1(): + _a = 'm1' + +def m2(): # @DontTrace + m1() + _a = 'm2' + +def m3(): + m2() + _a = 'm3' + +if __name__ == '__main__': + m3() + + print('TEST SUCEEDED') diff --git a/ptvsd/pydevd/tests_python/_debugger_case18.py b/ptvsd/pydevd/tests_python/_debugger_case18.py new file mode 100644 index 00000000..c221039f --- /dev/null +++ b/ptvsd/pydevd/tests_python/_debugger_case18.py @@ -0,0 +1,23 @@ +import sys + +def m2(a): + a = 10 + b = 20 #Break here and set a = 40 + c = 30 + + def function2(): + print(a) + + return a + + +def m1(a): + return m2(a) + + +if __name__ == '__main__': + found = m1(10) + if found == 40: + print('TEST SUCEEDED') + else: + raise AssertionError('Expected variable to be changed to 40. Found: %s' % (found,)) diff --git a/ptvsd/pydevd/tests_python/_debugger_case19.py b/ptvsd/pydevd/tests_python/_debugger_case19.py new file mode 100644 index 00000000..07ac951f --- /dev/null +++ b/ptvsd/pydevd/tests_python/_debugger_case19.py @@ -0,0 +1,10 @@ +class A: + + def __init__(self): + self.__var = 10 + +if __name__ == '__main__': + a = A() + print(a._A__var) + # Evaluate 'a.__var' should give a._A__var_ + print('TEST SUCEEDED') diff --git a/ptvsd/pydevd/tests_python/_debugger_case2.py b/ptvsd/pydevd/tests_python/_debugger_case2.py new file mode 100644 index 00000000..e47a5e21 --- /dev/null +++ b/ptvsd/pydevd/tests_python/_debugger_case2.py @@ -0,0 +1,24 @@ + +def Call4(): + print('Start Call4') + print('End Call4') + +def Call3(): + print('Start Call3') + Call4() + print('End Call3') + +def Call2(): + print('Start Call2') + Call3() + print('End Call2 - a') + print('End Call2 - b') + +def Call1(): + print('Start Call1') + Call2() + print('End Call1') + +if __name__ == '__main__': + Call1() + print('TEST SUCEEDED!') diff --git a/ptvsd/pydevd/tests_python/_debugger_case3.py b/ptvsd/pydevd/tests_python/_debugger_case3.py new file mode 100644 index 00000000..aa05032f --- /dev/null +++ b/ptvsd/pydevd/tests_python/_debugger_case3.py @@ -0,0 +1,8 @@ +import time +if __name__ == '__main__': + for i in range(15): + print('here') + time.sleep(.2) + + print('TEST SUCEEDED') + diff --git a/ptvsd/pydevd/tests_python/_debugger_case4.py b/ptvsd/pydevd/tests_python/_debugger_case4.py new file mode 100644 index 00000000..009da4a6 --- /dev/null +++ b/ptvsd/pydevd/tests_python/_debugger_case4.py @@ -0,0 +1,8 @@ +import time +if __name__ == '__main__': + for i in range(10): + print('here %s' % i) + time.sleep(1) + + print('TEST SUCEEDED') + diff --git a/ptvsd/pydevd/tests_python/_debugger_case56.py b/ptvsd/pydevd/tests_python/_debugger_case56.py new file mode 100644 index 00000000..e5de28d9 --- /dev/null +++ b/ptvsd/pydevd/tests_python/_debugger_case56.py @@ -0,0 +1,9 @@ +def Call2(): + print('Call2') + +def Call1(a): + print('Call1') + +if __name__ == '__main__': + Call1(Call2()) + print('TEST SUCEEDED!') diff --git a/ptvsd/pydevd/tests_python/_debugger_case7.py b/ptvsd/pydevd/tests_python/_debugger_case7.py new file mode 100644 index 00000000..499d8d76 --- /dev/null +++ b/ptvsd/pydevd/tests_python/_debugger_case7.py @@ -0,0 +1,8 @@ +def Call(): + variable_for_test_1 = 10 + variable_for_test_2 = 20 + variable_for_test_3 = 30 + +if __name__ == '__main__': + Call() + print('TEST SUCEEDED!') diff --git a/ptvsd/pydevd/tests_python/_debugger_case89.py b/ptvsd/pydevd/tests_python/_debugger_case89.py new file mode 100644 index 00000000..e22361d5 --- /dev/null +++ b/ptvsd/pydevd/tests_python/_debugger_case89.py @@ -0,0 +1,16 @@ +def Method1(): + print('m1') + +def Method2(): + print('m2 before') + Method1() + print('m2 after') + +def Method3(): + print('m3 before') + Method2() + print('m3 after') + +if __name__ == '__main__': + Method3() + print('TEST SUCEEDED!') diff --git a/ptvsd/pydevd/tests_python/_debugger_case_event_ext.py b/ptvsd/pydevd/tests_python/_debugger_case_event_ext.py new file mode 100644 index 00000000..66b9ba63 --- /dev/null +++ b/ptvsd/pydevd/tests_python/_debugger_case_event_ext.py @@ -0,0 +1 @@ +# File empty. Output is in the extension itself \ No newline at end of file diff --git a/ptvsd/pydevd/tests_python/_debugger_case_m_switch.py b/ptvsd/pydevd/tests_python/_debugger_case_m_switch.py new file mode 100644 index 00000000..a5b3706a --- /dev/null +++ b/ptvsd/pydevd/tests_python/_debugger_case_m_switch.py @@ -0,0 +1,3 @@ +from . import _debugger_case_m_switch_2 +print(_debugger_case_m_switch_2.ClassToBeImported) +print('TEST SUCEEDED!') \ No newline at end of file diff --git a/ptvsd/pydevd/tests_python/_debugger_case_m_switch_2.py b/ptvsd/pydevd/tests_python/_debugger_case_m_switch_2.py new file mode 100644 index 00000000..99c9f4cd --- /dev/null +++ b/ptvsd/pydevd/tests_python/_debugger_case_m_switch_2.py @@ -0,0 +1,2 @@ +class ClassToBeImported(object): + pass \ No newline at end of file diff --git a/ptvsd/pydevd/tests_python/_debugger_case_module_entry_point.py b/ptvsd/pydevd/tests_python/_debugger_case_module_entry_point.py new file mode 100644 index 00000000..a69ce52b --- /dev/null +++ b/ptvsd/pydevd/tests_python/_debugger_case_module_entry_point.py @@ -0,0 +1,2 @@ +def main(): + print('TEST SUCEEDED!') \ No newline at end of file diff --git a/ptvsd/pydevd/tests_python/_debugger_case_qthread1.py b/ptvsd/pydevd/tests_python/_debugger_case_qthread1.py new file mode 100644 index 00000000..c0e7eb63 --- /dev/null +++ b/ptvsd/pydevd/tests_python/_debugger_case_qthread1.py @@ -0,0 +1,28 @@ +import time +import sys + +try: + from PySide import QtCore # @UnresolvedImport +except: + try: + from PyQt4 import QtCore + except: + from PyQt5 import QtCore + +# Subclassing QThread +# http://doc.qt.nokia.com/latest/qthread.html +class AThread(QtCore.QThread): + + def run(self): + count = 0 + while count < 5: + time.sleep(.5) + print("Increasing", count);sys.stdout.flush() + count += 1 + +app = QtCore.QCoreApplication([]) +thread = AThread() +thread.finished.connect(app.exit) +thread.start() +app.exec_() +print('TEST SUCEEDED!') \ No newline at end of file diff --git a/ptvsd/pydevd/tests_python/_debugger_case_qthread2.py b/ptvsd/pydevd/tests_python/_debugger_case_qthread2.py new file mode 100644 index 00000000..d80446ad --- /dev/null +++ b/ptvsd/pydevd/tests_python/_debugger_case_qthread2.py @@ -0,0 +1,38 @@ +import time +import sys + +try: + from PySide import QtCore # @UnresolvedImport +except: + try: + from PyQt4 import QtCore + except: + from PyQt5 import QtCore + +# Subclassing QObject and using moveToThread +# http://labs.qt.nokia.com/2007/07/05/qthreads-no-longer-abstract/ +class SomeObject(QtCore.QObject): + + try: + finished = QtCore.Signal() # @UndefinedVariable + except: + finished = QtCore.pyqtSignal() # @UndefinedVariable + + def long_running(self): + count = 0 + while count < 5: + time.sleep(.5) + print("Increasing") + count += 1 + self.finished.emit() + +app = QtCore.QCoreApplication([]) +objThread = QtCore.QThread() +obj = SomeObject() +obj.moveToThread(objThread) +obj.finished.connect(objThread.quit) +objThread.started.connect(obj.long_running) +objThread.finished.connect(app.exit) +objThread.start() +app.exec_() +print('TEST SUCEEDED!') \ No newline at end of file diff --git a/ptvsd/pydevd/tests_python/_debugger_case_qthread3.py b/ptvsd/pydevd/tests_python/_debugger_case_qthread3.py new file mode 100644 index 00000000..4513db84 --- /dev/null +++ b/ptvsd/pydevd/tests_python/_debugger_case_qthread3.py @@ -0,0 +1,33 @@ +import time +import sys + +try: + from PySide import QtCore # @UnresolvedImport +except: + try: + from PyQt4 import QtCore + except: + from PyQt5 import QtCore + +# Using a QRunnable +# http://doc.qt.nokia.com/latest/qthreadpool.html +# Note that a QRunnable isn't a subclass of QObject and therefore does +# not provide signals and slots. +class Runnable(QtCore.QRunnable): + + def run(self): + count = 0 + app = QtCore.QCoreApplication.instance() + while count < 5: + print("Increasing") + time.sleep(.5) + count += 1 + app.quit() + + +app = QtCore.QCoreApplication([]) +runnable = Runnable() +QtCore.QThreadPool.globalInstance().start(runnable) +app.exec_() +QtCore.QThreadPool.globalInstance().waitForDone() +print('TEST SUCEEDED!') \ No newline at end of file diff --git a/ptvsd/pydevd/tests_python/_debugger_case_qthread4.py b/ptvsd/pydevd/tests_python/_debugger_case_qthread4.py new file mode 100644 index 00000000..f76da7aa --- /dev/null +++ b/ptvsd/pydevd/tests_python/_debugger_case_qthread4.py @@ -0,0 +1,41 @@ +try: + from PySide import QtCore +except: + try: + from PyQt4 import QtCore + except: + from PyQt5 import QtCore + +class TestObject(QtCore.QObject): + """ + Test class providing some non-argument signal + """ + + try: + testSignal = QtCore.Signal() # @UndefinedVariable + except: + testSignal = QtCore.pyqtSignal() # @UndefinedVariable + + +class TestThread(QtCore.QThread): + + def run(self): + QtCore.QThread.sleep(4) + print('Done sleeping') + +def on_start(): + print('On start called1') + print('On start called2') + +app = QtCore.QCoreApplication([]) +some_thread = TestThread() +some_object = TestObject() + +# connect QThread.started to the signal +some_thread.started.connect(some_object.testSignal) +some_object.testSignal.connect(on_start) +some_thread.finished.connect(app.quit) + +some_thread.start() +app.exec_() +print('TEST SUCEEDED!') diff --git a/ptvsd/pydevd/tests_python/_debugger_case_remote.py b/ptvsd/pydevd/tests_python/_debugger_case_remote.py new file mode 100644 index 00000000..dd35330b --- /dev/null +++ b/ptvsd/pydevd/tests_python/_debugger_case_remote.py @@ -0,0 +1,14 @@ +if __name__ == '__main__': + import os + import sys + root_dirname = os.path.dirname(os.path.dirname(__file__)) + + if root_dirname not in sys.path: + sys.path.append(root_dirname) + + import pydevd + print('before pydevd.settrace') + pydevd.settrace(port=8787) + print('after pydevd.settrace') + print('TEST SUCEEDED!') + \ No newline at end of file diff --git a/ptvsd/pydevd/tests_python/_debugger_case_remote_1.py b/ptvsd/pydevd/tests_python/_debugger_case_remote_1.py new file mode 100644 index 00000000..3478b76c --- /dev/null +++ b/ptvsd/pydevd/tests_python/_debugger_case_remote_1.py @@ -0,0 +1,27 @@ +if __name__ == '__main__': + import subprocess + import sys + import os + import _debugger_case_remote_2 + root_dirname = os.path.dirname(os.path.dirname(__file__)) + + if root_dirname not in sys.path: + sys.path.append(root_dirname) + + import pydevd + + print('before pydevd.settrace') + sys.stdout.flush() + pydevd.settrace(port=8787, patch_multiprocessing=True) + print('after pydevd.settrace') + sys.stdout.flush() + f = _debugger_case_remote_2.__file__ + if f.endswith('.pyc'): + f = f[:-1] + elif f.endswith('$py.class'): + f = f[:-len('$py.class')] + '.py' + print('before call') + sys.stdout.flush() + subprocess.check_call([sys.executable, '-u', f]) + print('after call') + sys.stdout.flush() diff --git a/ptvsd/pydevd/tests_python/_debugger_case_remote_2.py b/ptvsd/pydevd/tests_python/_debugger_case_remote_2.py new file mode 100644 index 00000000..d25bb771 --- /dev/null +++ b/ptvsd/pydevd/tests_python/_debugger_case_remote_2.py @@ -0,0 +1,11 @@ +if __name__ == '__main__': + print('Run as main: %s' % (__file__,)) + import sys + sys.stdout.flush() + import pydevd + # Just check that we're already connected + assert pydevd.GetGlobalDebugger() is not None + print('finish') + sys.stdout.flush() + print('TEST SUCEEDED!') + sys.stdout.flush() \ No newline at end of file diff --git a/ptvsd/pydevd/tests_python/_debugger_case_set_next_statement.py b/ptvsd/pydevd/tests_python/_debugger_case_set_next_statement.py new file mode 100644 index 00000000..145f36d5 --- /dev/null +++ b/ptvsd/pydevd/tests_python/_debugger_case_set_next_statement.py @@ -0,0 +1,10 @@ +def method(): + a = 1 + print('call %s' % (a,)) + a = 2 + print('call %s' % (a,)) + a = 3 + +if __name__ == '__main__': + method() + print('TEST SUCEEDED!') diff --git a/ptvsd/pydevd/tests_python/_debugger_case_type_ext.py b/ptvsd/pydevd/tests_python/_debugger_case_type_ext.py new file mode 100644 index 00000000..4a9b6949 --- /dev/null +++ b/ptvsd/pydevd/tests_python/_debugger_case_type_ext.py @@ -0,0 +1,8 @@ +class Rect(object): + def __init__(self, l, w): + super(Rect, self).__init__() + self.length = l + self.width = w +my_rect=Rect(5, 10) +print('TEST SUCEEDED!') + diff --git a/ptvsd/pydevd/tests_python/_performance_1.py b/ptvsd/pydevd/tests_python/_performance_1.py new file mode 100644 index 00000000..7665064a --- /dev/null +++ b/ptvsd/pydevd/tests_python/_performance_1.py @@ -0,0 +1,33 @@ +import time + +try: + xrange +except: + xrange = range + +def method2(): + i = 1 + +def method(): + + for i in xrange(200000): + method2() + + if False: + # Unreachable breakpoint here + pass + +def caller(): + start_time = time.time() + method() + print('TotalTime>>%s<<' % (time.time()-start_time,)) + +if __name__ == '__main__': + import sys + if '--regular-trace' in sys.argv: + def trace_dispatch(frame, event, arg): + return trace_dispatch + sys.settrace(trace_dispatch) + + caller() # Initial breakpoint for a step-over here + print('TEST SUCEEDED') diff --git a/ptvsd/pydevd/tests_python/_performance_2.py b/ptvsd/pydevd/tests_python/_performance_2.py new file mode 100644 index 00000000..6e689177 --- /dev/null +++ b/ptvsd/pydevd/tests_python/_performance_2.py @@ -0,0 +1,26 @@ +import time +start_time = time.time() + +try: + xrange # @UndefinedVariable +except NameError: + xrange = range + +from itertools import groupby +from random import randrange + +letters = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' + +# create an array of random strings of 40 characters each +l = sorted([''.join([letters[randrange(0, 26)] for _ in range(40)]) for _ in xrange(10000)]) + +# group by the first two characters +g = {k: list(v) for k, v in groupby(l, lambda x: x[:2])} + +print(len(g.get('AA'))) + +if False: + pass # Breakpoint here + +print('TotalTime>>%s<<' % (time.time()-start_time,)) +print('TEST SUCEEDED') \ No newline at end of file diff --git a/ptvsd/pydevd/tests_python/_performance_3.py b/ptvsd/pydevd/tests_python/_performance_3.py new file mode 100644 index 00000000..4ac1eced --- /dev/null +++ b/ptvsd/pydevd/tests_python/_performance_3.py @@ -0,0 +1,20 @@ +import time +start_time = time.time() + +try: + xrange # @UndefinedVariable +except NameError: + xrange = range + +# do some busy work in parallel +print("Started main task") +x = 0 +for i in xrange(1000000): + x += 1 +print("Completed main task") + +if False: + pass # Breakpoint here + +print('TotalTime>>%s<<' % (time.time()-start_time,)) +print('TEST SUCEEDED') \ No newline at end of file diff --git a/ptvsd/pydevd/tests_python/debugger_unittest.py b/ptvsd/pydevd/tests_python/debugger_unittest.py new file mode 100644 index 00000000..f27f3e5b --- /dev/null +++ b/ptvsd/pydevd/tests_python/debugger_unittest.py @@ -0,0 +1,605 @@ +try: + from urllib import quote, quote_plus, unquote_plus +except ImportError: + from urllib.parse import quote, quote_plus, unquote_plus #@UnresolvedImport + + +import os +import socket +import subprocess +import sys +import threading +import time + +from _pydev_bundle import pydev_localhost + + +IS_PY3K = sys.version_info[0] >= 3 + +# Note: copied (don't import because we want it to be independent on the actual code because of backward compatibility). +CMD_RUN = 101 +CMD_LIST_THREADS = 102 +CMD_THREAD_CREATE = 103 +CMD_THREAD_KILL = 104 +CMD_THREAD_SUSPEND = 105 +CMD_THREAD_RUN = 106 +CMD_STEP_INTO = 107 +CMD_STEP_OVER = 108 +CMD_STEP_RETURN = 109 +CMD_GET_VARIABLE = 110 +CMD_SET_BREAK = 111 +CMD_REMOVE_BREAK = 112 +CMD_EVALUATE_EXPRESSION = 113 +CMD_GET_FRAME = 114 +CMD_EXEC_EXPRESSION = 115 +CMD_WRITE_TO_CONSOLE = 116 +CMD_CHANGE_VARIABLE = 117 +CMD_RUN_TO_LINE = 118 +CMD_RELOAD_CODE = 119 +CMD_GET_COMPLETIONS = 120 + +# Note: renumbered (conflicted on merge) +CMD_CONSOLE_EXEC = 121 +CMD_ADD_EXCEPTION_BREAK = 122 +CMD_REMOVE_EXCEPTION_BREAK = 123 +CMD_LOAD_SOURCE = 124 +CMD_ADD_DJANGO_EXCEPTION_BREAK = 125 +CMD_REMOVE_DJANGO_EXCEPTION_BREAK = 126 +CMD_SET_NEXT_STATEMENT = 127 +CMD_SMART_STEP_INTO = 128 +CMD_EXIT = 129 +CMD_SIGNATURE_CALL_TRACE = 130 + + + +CMD_SET_PY_EXCEPTION = 131 +CMD_GET_FILE_CONTENTS = 132 +CMD_SET_PROPERTY_TRACE = 133 +# Pydev debug console commands +CMD_EVALUATE_CONSOLE_EXPRESSION = 134 +CMD_RUN_CUSTOM_OPERATION = 135 +CMD_GET_BREAKPOINT_EXCEPTION = 136 +CMD_STEP_CAUGHT_EXCEPTION = 137 +CMD_SEND_CURR_EXCEPTION_TRACE = 138 +CMD_SEND_CURR_EXCEPTION_TRACE_PROCEEDED = 139 +CMD_IGNORE_THROWN_EXCEPTION_AT = 140 +CMD_ENABLE_DONT_TRACE = 141 +CMD_SHOW_CONSOLE = 142 + +CMD_GET_ARRAY = 143 +CMD_STEP_INTO_MY_CODE = 144 +CMD_GET_CONCURRENCY_EVENT = 145 + +CMD_VERSION = 501 +CMD_RETURN = 502 +CMD_ERROR = 901 + + + +# Always True (because otherwise when we do have an error, it's hard to diagnose). +SHOW_WRITES_AND_READS = True +SHOW_OTHER_DEBUG_INFO = True +SHOW_STDOUT = True + + +try: + from thread import start_new_thread +except ImportError: + from _thread import start_new_thread # @UnresolvedImport + +try: + xrange +except: + xrange = range + + +#======================================================================================================================= +# ReaderThread +#======================================================================================================================= +class ReaderThread(threading.Thread): + + def __init__(self, sock): + threading.Thread.__init__(self) + try: + from queue import Queue + except ImportError: + from Queue import Queue + + self.setDaemon(True) + self.sock = sock + self._queue = Queue() + self.all_received = [] + self._kill = False + + def get_next_message(self, context_messag): + try: + msg = self._queue.get(block=True, timeout=15) + except: + raise AssertionError('No message was written in 15 seconds. Error message:\n%s' % (context_messag,)) + else: + frame = sys._getframe().f_back + frame_info = ' -- File "%s", line %s, in %s\n' % (frame.f_code.co_filename, frame.f_lineno, frame.f_code.co_name) + frame_info += ' -- File "%s", line %s, in %s\n' % (frame.f_back.f_code.co_filename, frame.f_back.f_lineno, frame.f_back.f_code.co_name) + frame = None + sys.stdout.write('Message returned in get_next_message(): %s -- ctx: %s, returned to:\n%s\n' % (msg, context_messag, frame_info)) + return msg + + def run(self): + try: + buf = '' + while not self._kill: + l = self.sock.recv(1024) + if IS_PY3K: + l = l.decode('utf-8') + self.all_received.append(l) + buf += l + + while '\n' in buf: + # Print each part... + i = buf.index('\n')+1 + last_received = buf[:i] + buf = buf[i:] + + if SHOW_WRITES_AND_READS: + print('Test Reader Thread Received %s' % (last_received, )) + + self._queue.put(last_received) + except: + pass # ok, finished it + finally: + del self.all_received[:] + + def do_kill(self): + self._kill = True + if hasattr(self, 'sock'): + self.sock.close() + + +class DebuggerRunner(object): + + def get_command_line(self): + ''' + Returns the base command line (i.e.: ['python.exe', '-u']) + ''' + raise NotImplementedError + + def add_command_line_args(self, args): + writer_thread = self.writer_thread + port = int(writer_thread.port) + + localhost = pydev_localhost.get_localhost() + ret = args + [ + writer_thread.get_pydevd_file(), + '--DEBUG_RECORD_SOCKET_READS', + '--qt-support', + '--client', + localhost, + '--port', + str(port), + ] + + if writer_thread.IS_MODULE: + ret += ['--module'] + + ret = ret + ['--file'] + writer_thread.get_command_line_args() + return ret + + def check_case(self, writer_thread_class): + writer_thread = writer_thread_class() + try: + writer_thread.start() + for _i in xrange(40000): + if hasattr(writer_thread, 'port'): + break + time.sleep(.01) + self.writer_thread = writer_thread + + args = self.get_command_line() + + args = self.add_command_line_args(args) + + if SHOW_OTHER_DEBUG_INFO: + print('executing', ' '.join(args)) + + ret = self.run_process(args, writer_thread) + finally: + writer_thread.do_kill() + writer_thread.log = [] + + stdout = ret['stdout'] + stderr = ret['stderr'] + writer_thread.additional_output_checks(''.join(stdout), ''.join(stderr)) + return ret + + def create_process(self, args, writer_thread): + process = subprocess.Popen( + args, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + cwd=writer_thread.get_cwd() if writer_thread is not None else '.', + env=writer_thread.get_environ() if writer_thread is not None else None, + ) + return process + + def run_process(self, args, writer_thread): + process = self.create_process(args, writer_thread) + stdout = [] + stderr = [] + finish = [False] + + try: + def read(stream, buffer): + for line in stream.readlines(): + if finish[0]: + return + if IS_PY3K: + line = line.decode('utf-8') + + if SHOW_STDOUT: + sys.stdout.write('stdout: %s' % (line,)) + buffer.append(line) + + start_new_thread(read, (process.stdout, stdout)) + + + if SHOW_OTHER_DEBUG_INFO: + print('Both processes started') + + # polls can fail (because the process may finish and the thread still not -- so, we give it some more chances to + # finish successfully). + initial_time = time.time() + shown_intermediate = False + while True: + if process.poll() is not None: + break + else: + if writer_thread is not None: + if not writer_thread.isAlive(): + if writer_thread.FORCE_KILL_PROCESS_WHEN_FINISHED_OK: + process.kill() + continue + + if not shown_intermediate and (time.time() - initial_time > 10): + print('Warning: writer thread exited and process still did not (%.2fs seconds elapsed).' % (time.time() - initial_time,)) + shown_intermediate = True + + if time.time() - initial_time > 20: + process.kill() + time.sleep(.2) + self.fail_with_message( + "The other process should've exited but still didn't (%.2fs seconds timeout for process to exit)." % (time.time() - initial_time,), + stdout, stderr, writer_thread + ) + time.sleep(.2) + + + if writer_thread is not None: + if not writer_thread.FORCE_KILL_PROCESS_WHEN_FINISHED_OK: + poll = process.poll() + if poll < 0: + self.fail_with_message( + "The other process exited with error code: " + str(poll), stdout, stderr, writer_thread) + + + if stdout is None: + self.fail_with_message( + "The other process may still be running -- and didn't give any output.", stdout, stderr, writer_thread) + + check = 0 + while 'TEST SUCEEDED' not in ''.join(stdout): + check += 1 + if check == 50: + self.fail_with_message("TEST SUCEEDED not found in stdout.", stdout, stderr, writer_thread) + time.sleep(.1) + + for _i in xrange(100): + if not writer_thread.finished_ok: + time.sleep(.1) + + if not writer_thread.finished_ok: + self.fail_with_message( + "The thread that was doing the tests didn't finish successfully.", stdout, stderr, writer_thread) + finally: + finish[0] = True + + return {'stdout':stdout, 'stderr':stderr} + + def fail_with_message(self, msg, stdout, stderr, writerThread): + raise AssertionError(msg+ + "\n\n===========================\nStdout: \n"+''.join(stdout)+ + "\n\n===========================\nStderr:"+''.join(stderr)+ + "\n\n===========================\nLog:\n"+'\n'.join(getattr(writerThread, 'log', []))) + + + +#======================================================================================================================= +# AbstractWriterThread +#======================================================================================================================= +class AbstractWriterThread(threading.Thread): + + FORCE_KILL_PROCESS_WHEN_FINISHED_OK = False + IS_MODULE = False + + def __init__(self): + threading.Thread.__init__(self) + self.setDaemon(True) + self.finished_ok = False + self._next_breakpoint_id = 0 + self.log = [] + + def additional_output_checks(self, stdout, stderr): + pass + + def get_environ(self): + return None + + def get_pydevd_file(self): + dirname = os.path.dirname(__file__) + dirname = os.path.dirname(dirname) + return os.path.abspath(os.path.join(dirname, 'pydevd.py')) + + def get_cwd(self): + return os.path.dirname(self.get_pydevd_file()) + + def get_command_line_args(self): + return [self.TEST_FILE] + + def do_kill(self): + if hasattr(self, 'server_socket'): + self.server_socket.close() + + if hasattr(self, 'reader_thread'): + # if it's not created, it's not there... + self.reader_thread.do_kill() + if hasattr(self, 'sock'): + self.sock.close() + + def write(self, s): + self.log.append('write: %s' % (s,)) + + if SHOW_WRITES_AND_READS: + print('Test Writer Thread Written %s' % (s,)) + msg = s + '\n' + if IS_PY3K: + msg = msg.encode('utf-8') + self.sock.send(msg) + + + def start_socket(self, port=None): + from _pydev_bundle.pydev_localhost import get_socket_name + if SHOW_WRITES_AND_READS: + print('start_socket') + + if port is None: + socket_name = get_socket_name(close=True) + else: + socket_name = (pydev_localhost.get_localhost(), port) + s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + s.bind(socket_name) + self.port = socket_name[1] + s.listen(1) + if SHOW_WRITES_AND_READS: + print('Waiting in socket.accept()') + self.server_socket = s + newSock, addr = s.accept() + if SHOW_WRITES_AND_READS: + print('Test Writer Thread Socket:', newSock, addr) + + reader_thread = self.reader_thread = ReaderThread(newSock) + reader_thread.start() + self.sock = newSock + + self._sequence = -1 + # initial command is always the version + self.write_version() + self.log.append('start_socket') + + def next_breakpoint_id(self): + self._next_breakpoint_id += 1 + return self._next_breakpoint_id + + def next_seq(self): + self._sequence += 2 + return self._sequence + + + def wait_for_new_thread(self): + # wait for hit breakpoint + last = '' + while not ' + splitted = last.split('"') + thread_id = splitted[3] + return thread_id + + def wait_for_breakpoint_hit(self, *args, **kwargs): + return self.wait_for_breakpoint_hit_with_suspend_type(*args, **kwargs)[:-1] + + def wait_for_breakpoint_hit_with_suspend_type(self, reason='111', get_line=False, get_name=False): + ''' + 108 is over + 109 is return + 111 is breakpoint + ''' + self.log.append('Start: wait_for_breakpoint_hit') + # wait for hit breakpoint + last = '' + while not ('stop_reason="%s"' % reason) in last: + last = self.reader_thread.get_next_message('wait_for_breakpoint_hit. reason=%s' % (reason,)) + + # we have something like + + my_django_proj_17 + + + + + + org.python.pydev.PyDevBuilder + + + + + + org.python.pydev.pythonNature + org.python.pydev.django.djangoNature + + diff --git a/ptvsd/pydevd/tests_python/my_django_proj_17/.pydevproject b/ptvsd/pydevd/tests_python/my_django_proj_17/.pydevproject new file mode 100644 index 00000000..6e842d01 --- /dev/null +++ b/ptvsd/pydevd/tests_python/my_django_proj_17/.pydevproject @@ -0,0 +1,12 @@ + + + +DJANGO_MANAGE_LOCATION +manage.py + + +/${PROJECT_DIR_NAME} + +python 2.7 +Default + diff --git a/ptvsd/pydevd/tests_python/my_django_proj_17/manage.py b/ptvsd/pydevd/tests_python/my_django_proj_17/manage.py new file mode 100644 index 00000000..c29c377d --- /dev/null +++ b/ptvsd/pydevd/tests_python/my_django_proj_17/manage.py @@ -0,0 +1,10 @@ +#!/usr/bin/env python +import os +import sys + +if __name__ == "__main__": + os.environ.setdefault("DJANGO_SETTINGS_MODULE", "my_django_proj_17.settings") + + from django.core.management import execute_from_command_line + + execute_from_command_line(sys.argv) diff --git a/ptvsd/pydevd/tests_python/my_django_proj_17/my_app/__init__.py b/ptvsd/pydevd/tests_python/my_django_proj_17/my_app/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/ptvsd/pydevd/tests_python/my_django_proj_17/my_app/admin.py b/ptvsd/pydevd/tests_python/my_django_proj_17/my_app/admin.py new file mode 100644 index 00000000..8c38f3f3 --- /dev/null +++ b/ptvsd/pydevd/tests_python/my_django_proj_17/my_app/admin.py @@ -0,0 +1,3 @@ +from django.contrib import admin + +# Register your models here. diff --git a/ptvsd/pydevd/tests_python/my_django_proj_17/my_app/forms.py b/ptvsd/pydevd/tests_python/my_django_proj_17/my_app/forms.py new file mode 100644 index 00000000..fe030860 --- /dev/null +++ b/ptvsd/pydevd/tests_python/my_django_proj_17/my_app/forms.py @@ -0,0 +1,4 @@ +from django import forms + +class NameForm(forms.Form): + your_name = forms.CharField(label='Your name', max_length=100) \ No newline at end of file diff --git a/ptvsd/pydevd/tests_python/my_django_proj_17/my_app/models.py b/ptvsd/pydevd/tests_python/my_django_proj_17/my_app/models.py new file mode 100644 index 00000000..71a83623 --- /dev/null +++ b/ptvsd/pydevd/tests_python/my_django_proj_17/my_app/models.py @@ -0,0 +1,3 @@ +from django.db import models + +# Create your models here. diff --git a/ptvsd/pydevd/tests_python/my_django_proj_17/my_app/templates/my_app/index.html b/ptvsd/pydevd/tests_python/my_django_proj_17/my_app/templates/my_app/index.html new file mode 100644 index 00000000..5cad374c --- /dev/null +++ b/ptvsd/pydevd/tests_python/my_django_proj_17/my_app/templates/my_app/index.html @@ -0,0 +1,13 @@ +{% if entries %} +
    + {% for entry in entries %} +
  • + {{ entry.key }} + : + {{ entry.val }} +
  • + {% endfor %} +
+{% else %} +

No entries are available.

+{% endif %} \ No newline at end of file diff --git a/ptvsd/pydevd/tests_python/my_django_proj_17/my_app/templates/my_app/name.html b/ptvsd/pydevd/tests_python/my_django_proj_17/my_app/templates/my_app/name.html new file mode 100644 index 00000000..d47a2e12 --- /dev/null +++ b/ptvsd/pydevd/tests_python/my_django_proj_17/my_app/templates/my_app/name.html @@ -0,0 +1,7 @@ +
+ {% csrf_token %} + {{ form }} + It is {% now "jS F Y H:i" %} + +
+

End of form

\ No newline at end of file diff --git a/ptvsd/pydevd/tests_python/my_django_proj_17/my_app/tests.py b/ptvsd/pydevd/tests_python/my_django_proj_17/my_app/tests.py new file mode 100644 index 00000000..7ce503c2 --- /dev/null +++ b/ptvsd/pydevd/tests_python/my_django_proj_17/my_app/tests.py @@ -0,0 +1,3 @@ +from django.test import TestCase + +# Create your tests here. diff --git a/ptvsd/pydevd/tests_python/my_django_proj_17/my_app/urls.py b/ptvsd/pydevd/tests_python/my_django_proj_17/my_app/urls.py new file mode 100644 index 00000000..32ce6af3 --- /dev/null +++ b/ptvsd/pydevd/tests_python/my_django_proj_17/my_app/urls.py @@ -0,0 +1,8 @@ +from django.conf.urls import url + +from . import views + +urlpatterns = [ + url(r'^$', views.index, name='index'), + url(r'^name$', views.get_name, name='name'), +] \ No newline at end of file diff --git a/ptvsd/pydevd/tests_python/my_django_proj_17/my_app/views.py b/ptvsd/pydevd/tests_python/my_django_proj_17/my_app/views.py new file mode 100644 index 00000000..0e17f44f --- /dev/null +++ b/ptvsd/pydevd/tests_python/my_django_proj_17/my_app/views.py @@ -0,0 +1,43 @@ +from django.shortcuts import render + +# Create your views here. +from django.http import HttpResponse, HttpResponseRedirect +import sys +from .forms import NameForm + +class Entry(object): + + def __init__(self, key, val): + self.key = key + self.val = val + + def __unicode__(self): + return u'%s:%s' % (self.key, self.val) + + def __str__(self): + return u'%s:%s' % (self.key, self.val) + +def index(request): + context = { + 'entries': [Entry('v1', 'v1'), Entry('v2', 'v2')] + } + ret = render(request, 'my_app/index.html', context) + return ret + +def get_name(request): + # if this is a POST request we need to process the form data + if request.method == 'POST': + # create a form instance and populate it with data from the request: + form = NameForm(request.POST) + # check whether it's valid: + if form.is_valid(): + # process the data in form.cleaned_data as required + # ... + # redirect to a new URL: + return HttpResponseRedirect('/thanks/') + + # if a GET (or any other method) we'll create a blank form + else: + form = NameForm(data= {'your_name': 'unknown name'}) + + return render(request, 'my_app/name.html', {'form': form}) \ No newline at end of file diff --git a/ptvsd/pydevd/tests_python/my_django_proj_17/my_django_proj_17/__init__.py b/ptvsd/pydevd/tests_python/my_django_proj_17/my_django_proj_17/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/ptvsd/pydevd/tests_python/my_django_proj_17/my_django_proj_17/settings.py b/ptvsd/pydevd/tests_python/my_django_proj_17/my_django_proj_17/settings.py new file mode 100644 index 00000000..ec3fb3bb --- /dev/null +++ b/ptvsd/pydevd/tests_python/my_django_proj_17/my_django_proj_17/settings.py @@ -0,0 +1,86 @@ +""" +Django settings for my_django_proj_17 project. + +For more information on this file, see +https://docs.djangoproject.com/en/1.7/topics/settings/ + +For the full list of settings and their values, see +https://docs.djangoproject.com/en/1.7/ref/settings/ +""" + +# Build paths inside the project like this: os.path.join(BASE_DIR, ...) +import os +BASE_DIR = os.path.dirname(os.path.dirname(__file__)) + + +# Quick-start development settings - unsuitable for production +# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/ + +# SECURITY WARNING: keep the secret key used in production secret! +SECRET_KEY = '5_sue9bp&j=45#%_hcx3f34k!qnt$mxfd&7zq@7c7t@sn4_l)b' + +# SECURITY WARNING: don't run with debug turned on in production! +DEBUG = True + +TEMPLATE_DEBUG = True + +ALLOWED_HOSTS = [] + + +# Application definition + +INSTALLED_APPS = ( + 'django.contrib.admin', + 'django.contrib.auth', + 'django.contrib.contenttypes', + 'django.contrib.sessions', + 'django.contrib.messages', + 'django.contrib.staticfiles', + 'my_app', +) + +MIDDLEWARE_CLASSES = ( + 'django.contrib.sessions.middleware.SessionMiddleware', + 'django.middleware.common.CommonMiddleware', + 'django.middleware.csrf.CsrfViewMiddleware', + 'django.contrib.auth.middleware.AuthenticationMiddleware', + 'django.contrib.auth.middleware.SessionAuthenticationMiddleware', + 'django.contrib.messages.middleware.MessageMiddleware', + 'django.middleware.clickjacking.XFrameOptionsMiddleware', +) + +ROOT_URLCONF = 'my_django_proj_17.urls' + +WSGI_APPLICATION = 'my_django_proj_17.wsgi.application' + + +# Database +# https://docs.djangoproject.com/en/1.7/ref/settings/#databases + +# No database for our test. + +# DATABASES = { +# 'default': { +# 'ENGINE': 'django.db.backends.sqlite3', +# 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), +# } +# } + +# Internationalization +# https://docs.djangoproject.com/en/1.7/topics/i18n/ + +LANGUAGE_CODE = 'en-us' + +TIME_ZONE = 'UTC' + +USE_I18N = True + +USE_L10N = True + +USE_TZ = True + + +# Static files (CSS, JavaScript, Images) +# https://docs.djangoproject.com/en/1.7/howto/static-files/ + +STATIC_URL = '/static/' diff --git a/ptvsd/pydevd/tests_python/my_django_proj_17/my_django_proj_17/urls.py b/ptvsd/pydevd/tests_python/my_django_proj_17/my_django_proj_17/urls.py new file mode 100644 index 00000000..fc5c5877 --- /dev/null +++ b/ptvsd/pydevd/tests_python/my_django_proj_17/my_django_proj_17/urls.py @@ -0,0 +1,11 @@ +from django.conf.urls import patterns, include, url +from django.contrib import admin + +urlpatterns = patterns('', + # Examples: + # url(r'^$', 'my_django_proj_17.views.home', name='home'), + # url(r'^blog/', include('blog.urls')), + + url(r'^admin/', include(admin.site.urls)), + url(r'^my_app/', include('my_app.urls')), +) diff --git a/ptvsd/pydevd/tests_python/my_django_proj_17/my_django_proj_17/wsgi.py b/ptvsd/pydevd/tests_python/my_django_proj_17/my_django_proj_17/wsgi.py new file mode 100644 index 00000000..c410e8d2 --- /dev/null +++ b/ptvsd/pydevd/tests_python/my_django_proj_17/my_django_proj_17/wsgi.py @@ -0,0 +1,14 @@ +""" +WSGI config for my_django_proj_17 project. + +It exposes the WSGI callable as a module-level variable named ``application``. + +For more information on this file, see +https://docs.djangoproject.com/en/1.7/howto/deployment/wsgi/ +""" + +import os +os.environ.setdefault("DJANGO_SETTINGS_MODULE", "my_django_proj_17.settings") + +from django.core.wsgi import get_wsgi_application +application = get_wsgi_application() diff --git a/ptvsd/pydevd/tests_python/my_extensions/pydevd_plugins/__init__.py b/ptvsd/pydevd/tests_python/my_extensions/pydevd_plugins/__init__.py new file mode 100644 index 00000000..afff0c07 --- /dev/null +++ b/ptvsd/pydevd/tests_python/my_extensions/pydevd_plugins/__init__.py @@ -0,0 +1,5 @@ +try: + __import__('pkg_resources').declare_namespace(__name__) +except ImportError: + import pkgutil + __path__ = pkgutil.extend_path(__path__, __name__) diff --git a/ptvsd/pydevd/tests_python/my_extensions/pydevd_plugins/extensions/__init__.py b/ptvsd/pydevd/tests_python/my_extensions/pydevd_plugins/extensions/__init__.py new file mode 100644 index 00000000..afff0c07 --- /dev/null +++ b/ptvsd/pydevd/tests_python/my_extensions/pydevd_plugins/extensions/__init__.py @@ -0,0 +1,5 @@ +try: + __import__('pkg_resources').declare_namespace(__name__) +except ImportError: + import pkgutil + __path__ = pkgutil.extend_path(__path__, __name__) diff --git a/ptvsd/pydevd/tests_python/my_extensions/pydevd_plugins/extensions/pydevd_plugin_test_events.py b/ptvsd/pydevd/tests_python/my_extensions/pydevd_plugins/extensions/pydevd_plugin_test_events.py new file mode 100644 index 00000000..71c1ef9c --- /dev/null +++ b/ptvsd/pydevd/tests_python/my_extensions/pydevd_plugins/extensions/pydevd_plugin_test_events.py @@ -0,0 +1,18 @@ +from _pydevd_bundle.pydevd_extension_api import DebuggerEventHandler +import os +import sys + + +class VerifyEvent(object): + def on_debugger_modules_loaded(self, **kwargs): + print ("INITIALIZE EVENT RECEIVED") + # check that some core modules are loaded before this callback is invoked + modules_loaded = all(mod in sys.modules for mod in ('pydevd_file_utils', '_pydevd_bundle.pydevd_constants')) + if modules_loaded: + print ("TEST SUCEEDED") # incorrect spelling on purpose + else: + print ("TEST FAILED") + + +if os.environ.get("VERIFY_EVENT_TEST"): + DebuggerEventHandler.register(VerifyEvent) diff --git a/ptvsd/pydevd/tests_python/my_extensions/pydevd_plugins/extensions/pydevd_plugin_test_exttype.py b/ptvsd/pydevd/tests_python/my_extensions/pydevd_plugins/extensions/pydevd_plugin_test_exttype.py new file mode 100644 index 00000000..c3a7f783 --- /dev/null +++ b/ptvsd/pydevd/tests_python/my_extensions/pydevd_plugins/extensions/pydevd_plugin_test_exttype.py @@ -0,0 +1,20 @@ +from _pydevd_bundle.pydevd_extension_api import StrPresentationProvider, TypeResolveProvider + + +class RectResolver(TypeResolveProvider): + def get_dictionary(self, var): + return {'length': var.length, 'width': var.width, 'area': var.length * var.width} + + def resolve(self, var, attribute): + return getattr(var, attribute, None) if attribute != 'area' else var.length * var.width + + def can_provide(self, type_object, type_name): + return type_name.endswith('Rect') + + +class RectToString(StrPresentationProvider): + def get_str(self, val): + return "Rectangle[Length: %s, Width: %s , Area: %s]" % (val.length, val.width, val.length * val.width) + + def can_provide(self, type_object, type_name): + return type_name.endswith('Rect') diff --git a/ptvsd/pydevd/tests_python/performance_check.py b/ptvsd/pydevd/tests_python/performance_check.py new file mode 100644 index 00000000..3cd093fe --- /dev/null +++ b/ptvsd/pydevd/tests_python/performance_check.py @@ -0,0 +1,225 @@ +import debugger_unittest +import sys +import re +import os +import math + +CHECK_BASELINE, CHECK_REGULAR, CHECK_CYTHON = 'baseline', 'regular', 'cython' + +class PerformanceWriterThread(debugger_unittest.AbstractWriterThread): + + CHECK = None + + debugger_unittest.AbstractWriterThread.get_environ # overrides + def get_environ(self): + env = os.environ.copy() + if self.CHECK == CHECK_BASELINE: + env['PYTHONPATH'] = r'X:\PyDev.Debugger.baseline' + elif self.CHECK == CHECK_CYTHON: + env['PYDEVD_USE_CYTHON'] = 'YES' + elif self.CHECK == CHECK_REGULAR: + env['PYDEVD_USE_CYTHON'] = 'NO' + else: + raise AssertionError("Don't know what to check.") + return env + + debugger_unittest.AbstractWriterThread.get_pydevd_file # overrides + def get_pydevd_file(self): + if self.CHECK == CHECK_BASELINE: + return os.path.abspath(os.path.join(r'X:\PyDev.Debugger.baseline', 'pydevd.py')) + dirname = os.path.dirname(__file__) + dirname = os.path.dirname(dirname) + return os.path.abspath(os.path.join(dirname, 'pydevd.py')) + + +class WriterThreadPerformance1(PerformanceWriterThread): + + TEST_FILE = debugger_unittest._get_debugger_test_file('_performance_1.py') + BENCHMARK_NAME = 'method_calls_with_breakpoint' + + def run(self): + self.start_socket() + self.write_add_breakpoint(17, 'method') + self.write_make_initial_run() + self.finished_ok = True + +class WriterThreadPerformance2(PerformanceWriterThread): + + TEST_FILE = debugger_unittest._get_debugger_test_file('_performance_1.py') + BENCHMARK_NAME = 'method_calls_without_breakpoint' + + def run(self): + self.start_socket() + self.write_make_initial_run() + self.finished_ok = True + +class WriterThreadPerformance3(PerformanceWriterThread): + + TEST_FILE = debugger_unittest._get_debugger_test_file('_performance_1.py') + BENCHMARK_NAME = 'method_calls_with_step_over' + + def run(self): + self.start_socket() + self.write_add_breakpoint(26, None) + + self.write_make_initial_run() + thread_id, frame_id, line = self.wait_for_breakpoint_hit('111', True) + + self.write_step_over(thread_id) + thread_id, frame_id, line = self.wait_for_breakpoint_hit('108', True) + + self.write_run_thread(thread_id) + self.finished_ok = True + +class WriterThreadPerformance4(PerformanceWriterThread): + + TEST_FILE = debugger_unittest._get_debugger_test_file('_performance_1.py') + BENCHMARK_NAME = 'method_calls_with_exception_breakpoint' + + def run(self): + self.start_socket() + self.write_add_exception_breakpoint('ValueError') + + self.write_make_initial_run() + self.finished_ok = True + +class WriterThreadPerformance5(PerformanceWriterThread): + + TEST_FILE = debugger_unittest._get_debugger_test_file('_performance_2.py') + BENCHMARK_NAME = 'global_scope_1_with_breakpoint' + + def run(self): + self.start_socket() + self.write_add_breakpoint(23, None) + + self.write_make_initial_run() + self.finished_ok = True + +class WriterThreadPerformance6(PerformanceWriterThread): + + TEST_FILE = debugger_unittest._get_debugger_test_file('_performance_3.py') + BENCHMARK_NAME = 'global_scope_2_with_breakpoint' + + def run(self): + self.start_socket() + self.write_add_breakpoint(17, None) + + self.write_make_initial_run() + self.finished_ok = True + + +class CheckDebuggerPerformance(debugger_unittest.DebuggerRunner): + + def get_command_line(self): + return [sys.executable] + + def _get_time_from_result(self, result): + stdout = ''.join(result['stdout']) + match = re.search(r'TotalTime>>((\d|\.)+)<<', stdout) + time_taken = match.group(1) + return float(time_taken) + + def obtain_results(self, writer_thread_class): + runs = 5 + all_times = [] + for _ in range(runs): + all_times.append(self._get_time_from_result(self.check_case(writer_thread_class))) + print('partial for: %s: %.3fs' % (writer_thread_class.BENCHMARK_NAME, all_times[-1])) + all_times.remove(min(all_times)) + all_times.remove(max(all_times)) + time_when_debugged = sum(all_times) / float(len(all_times)) + + args = self.get_command_line() + args.append(writer_thread_class.TEST_FILE) + # regular_time = self._get_time_from_result(self.run_process(args, writer_thread=None)) + # simple_trace_time = self._get_time_from_result(self.run_process(args+['--regular-trace'], writer_thread=None)) + + if 'SPEEDTIN_AUTHORIZATION_KEY' in os.environ: + + SPEEDTIN_AUTHORIZATION_KEY = os.environ['SPEEDTIN_AUTHORIZATION_KEY'] + + # sys.path.append(r'X:\speedtin\pyspeedtin') + import pyspeedtin # If the authorization key is there, pyspeedtin must be available + import pydevd + pydevd_cython_project_id, pydevd_pure_python_project_id = 6, 7 + if writer_thread_class.CHECK == CHECK_BASELINE: + project_ids = (pydevd_cython_project_id, pydevd_pure_python_project_id) + elif writer_thread_class.CHECK == CHECK_REGULAR: + project_ids = (pydevd_pure_python_project_id,) + elif writer_thread_class.CHECK == CHECK_CYTHON: + project_ids = (pydevd_cython_project_id,) + else: + raise AssertionError('Wrong check: %s' % (writer_thread_class.CHECK)) + for project_id in project_ids: + api = pyspeedtin.PySpeedTinApi(authorization_key=SPEEDTIN_AUTHORIZATION_KEY, project_id=project_id) + + benchmark_name = writer_thread_class.BENCHMARK_NAME + + if writer_thread_class.CHECK == CHECK_BASELINE: + version = '0.0.1_baseline' + return # No longer commit the baseline (it's immutable right now). + else: + version=pydevd.__version__, + + commit_id, branch, commit_date = api.git_commit_id_branch_and_date_from_path(pydevd.__file__) + api.add_benchmark(benchmark_name) + api.add_measurement( + benchmark_name, + value=time_when_debugged, + version=version, + released=False, + branch=branch, + commit_id=commit_id, + commit_date=commit_date, + ) + api.commit() + + return '%s: %.3fs ' % (writer_thread_class.BENCHMARK_NAME, time_when_debugged) + + + def check_performance1(self): + return self.obtain_results(WriterThreadPerformance1) + + def check_performance2(self): + return self.obtain_results(WriterThreadPerformance2) + + def check_performance3(self): + return self.obtain_results(WriterThreadPerformance3) + + def check_performance4(self): + return self.obtain_results(WriterThreadPerformance4) + + def check_performance5(self): + return self.obtain_results(WriterThreadPerformance5) + + def check_performance6(self): + return self.obtain_results(WriterThreadPerformance6) + +if __name__ == '__main__': + debugger_unittest.SHOW_WRITES_AND_READS = False + debugger_unittest.SHOW_OTHER_DEBUG_INFO = False + debugger_unittest.SHOW_STDOUT = False + + import time + start_time = time.time() + + msgs = [] + for check in ( + # CHECK_BASELINE, -- Checks against the version checked out at X:\PyDev.Debugger.baseline. + CHECK_REGULAR, + CHECK_CYTHON + ): + PerformanceWriterThread.CHECK = check + msgs.append('Checking: %s' % (check,)) + check_debugger_performance = CheckDebuggerPerformance() + msgs.append(check_debugger_performance.check_performance1()) + msgs.append(check_debugger_performance.check_performance2()) + msgs.append(check_debugger_performance.check_performance3()) + msgs.append(check_debugger_performance.check_performance4()) + msgs.append(check_debugger_performance.check_performance5()) + msgs.append(check_debugger_performance.check_performance6()) + + for msg in msgs: + print(msg) + + print('TotalTime for profile: %.2fs' % (time.time()-start_time,)) diff --git a/ptvsd/pydevd/tests_python/test_additional_thread_info.py b/ptvsd/pydevd/tests_python/test_additional_thread_info.py new file mode 100644 index 00000000..f161216f --- /dev/null +++ b/ptvsd/pydevd/tests_python/test_additional_thread_info.py @@ -0,0 +1,86 @@ +import sys +import os +from _pydev_bundle import pydev_monkey +sys.path.insert(0, os.path.split(os.path.split(__file__)[0])[0]) + +from _pydevd_bundle.pydevd_constants import Null +import unittest + +try: + import thread +except: + import _thread as thread # @UnresolvedImport + +try: + xrange +except: + xrange = range + +#======================================================================================================================= +# TestCase +#======================================================================================================================= +class TestCase(unittest.TestCase): + + def test_start_new_thread(self): + pydev_monkey.patch_thread_modules() + try: + found = {} + def function(a, b, *args, **kwargs): + found['a'] = a + found['b'] = b + found['args'] = args + found['kwargs'] = kwargs + thread.start_new_thread(function, (1,2,3,4), {'d':1, 'e':2}) + import time + for _i in xrange(20): + if len(found) == 4: + break + time.sleep(.1) + else: + raise AssertionError('Could not get to condition before 2 seconds') + + self.assertEqual({'a': 1, 'b': 2, 'args': (3, 4), 'kwargs': {'e': 2, 'd': 1}}, found) + finally: + pydev_monkey.undo_patch_thread_modules() + + + def test_start_new_thread2(self): + pydev_monkey.patch_thread_modules() + try: + found = {} + + class F(object): + start_new_thread = thread.start_new_thread + + def start_it(self): + try: + self.start_new_thread(self.function, (1,2,3,4), {'d':1, 'e':2}) + except: + import traceback;traceback.print_exc() + + def function(self, a, b, *args, **kwargs): + found['a'] = a + found['b'] = b + found['args'] = args + found['kwargs'] = kwargs + + f = F() + f.start_it() + import time + for _i in xrange(20): + if len(found) == 4: + break + time.sleep(.1) + else: + raise AssertionError('Could not get to condition before 2 seconds') + + self.assertEqual({'a': 1, 'b': 2, 'args': (3, 4), 'kwargs': {'e': 2, 'd': 1}}, found) + finally: + pydev_monkey.undo_patch_thread_modules() + + +#======================================================================================================================= +# main +#======================================================================================================================= +if __name__ == '__main__': + unittest.main() diff --git a/ptvsd/pydevd/tests_python/test_bytecode_modification.py b/ptvsd/pydevd/tests_python/test_bytecode_modification.py new file mode 100644 index 00000000..bb5786d7 --- /dev/null +++ b/ptvsd/pydevd/tests_python/test_bytecode_modification.py @@ -0,0 +1,500 @@ +import dis +import sys +import unittest +from io import StringIO +import pytest + +from _pydevd_frame_eval.pydevd_modify_bytecode import insert_code + +TRACE_MESSAGE = "Trace called" + +def tracing(): + print(TRACE_MESSAGE) + + +def call_tracing(): + tracing() + + +def bar(a, b): + return a + b + +IS_PY36 = sys.version_info[0] == 3 and sys.version_info[1] == 6 + + +@pytest.mark.skipif(not IS_PY36, reason='Test requires Python 3.6') +class TestInsertCode(unittest.TestCase): + lines_separator = "---Line tested---" + + def check_insert_every_line(self, func_to_modify, func_to_insert, number_of_lines): + first_line = func_to_modify.__code__.co_firstlineno + 1 + last_line = first_line + number_of_lines + for i in range(first_line, last_line): + self.check_insert_to_line_with_exec(func_to_modify, func_to_insert, i) + print(self.lines_separator) + + def check_insert_to_line_with_exec(self, func_to_modify, func_to_insert, line_number): + code_orig = func_to_modify.__code__ + code_to_insert = func_to_insert.__code__ + success, result = insert_code(code_orig, code_to_insert, line_number) + exec(result) + output = sys.stdout.getvalue().strip().split(self.lines_separator)[-1] + self.assertTrue(TRACE_MESSAGE in output) + + def check_insert_to_line_by_symbols(self, func_to_modify, func_to_insert, line_number, code_for_check): + code_orig = func_to_modify.__code__ + code_to_insert = func_to_insert.__code__ + success, result = insert_code(code_orig, code_to_insert, line_number) + self.compare_bytes_sequence(list(result.co_code), list(code_for_check.co_code)) + + def compare_bytes_sequence(self, code1, code2): + seq1 = [(offset, op, arg) for offset, op, arg in dis._unpack_opargs(code1)] + seq2 = [(offset, op, arg) for offset, op, arg in dis._unpack_opargs(code2)] + self.assertTrue(len(seq1) == len(seq2), "Bytes sequences have different lengths") + for i in range(len(seq1)): + of, op1, arg1 = seq1[i] + _, op2, arg2 = seq2[i] + self.assertEqual(op1, op2, "Different operators at offset {}".format(of)) + if arg1 != arg2: + if op1 in (100, 101, 106, 116): + # Sometimes indexes of variable names and consts may be different, when we insert them, it's ok + continue + else: + self.assertEquals(arg1, arg2, "Different arguments at offset {}".format(of)) + + def test_assignment(self): + self.original_stdout = sys.stdout + sys.stdout = StringIO() + + try: + def original(): + a = 1 + b = 2 + c = 3 + + self.check_insert_every_line(original, tracing, 3) + + finally: + sys.stdout = self.original_stdout + + def test_for_loop(self): + self.original_stdout = sys.stdout + sys.stdout = StringIO() + + try: + def original(): + n = 3 + sum = 0 + for i in range(n): + sum += i + return sum + + self.check_insert_every_line(original, tracing, 5) + + finally: + sys.stdout = self.original_stdout + + def test_if(self): + self.original_stdout = sys.stdout + sys.stdout = StringIO() + + try: + def original(): + if True: + a = 1 + else: + a = 0 + print(a) + + self.check_insert_to_line_with_exec(original, tracing, original.__code__.co_firstlineno + 2) + self.check_insert_to_line_with_exec(original, tracing, original.__code__.co_firstlineno + 5) + + finally: + sys.stdout = self.original_stdout + + def test_else(self): + self.original_stdout = sys.stdout + sys.stdout = StringIO() + + try: + def original(): + if False: + a = 1 + else: + a = 0 + print(a) + + self.check_insert_to_line_with_exec(original, tracing, original.__code__.co_firstlineno + 4) + self.check_insert_to_line_with_exec(original, tracing, original.__code__.co_firstlineno + 5) + + finally: + sys.stdout = self.original_stdout + + def test_for_else(self): + self.original_stdout = sys.stdout + sys.stdout = StringIO() + + try: + def original(): + sum = 0 + for i in range(3): + sum += i + else: + print(sum) + + def check_line_1(): + tracing() + sum = 0 + for i in range(3): + sum += i + else: + print(sum) + + def check_line_3(): + sum = 0 + for i in range(3): + tracing() + sum += i + else: + print(sum) + + def check_line_5(): + sum = 0 + for i in range(3): + sum += i + else: + tracing() + print(sum) + + self.check_insert_to_line_with_exec(original, tracing, original.__code__.co_firstlineno + 1) + self.check_insert_to_line_with_exec(original, tracing, original.__code__.co_firstlineno + 3) + self.check_insert_to_line_with_exec(original, tracing, original.__code__.co_firstlineno + 5) + + sys.stdout = self.original_stdout + self.check_insert_to_line_by_symbols(original, call_tracing, original.__code__.co_firstlineno + 1, + check_line_1.__code__) + self.check_insert_to_line_by_symbols(original, call_tracing, original.__code__.co_firstlineno + 3, + check_line_3.__code__) + self.check_insert_to_line_by_symbols(original, call_tracing, original.__code__.co_firstlineno + 5, + check_line_5.__code__) + + finally: + sys.stdout = self.original_stdout + + def test_elif(self): + self.original_stdout = sys.stdout + sys.stdout = StringIO() + + try: + def original(): + a = 5 + b = 0 + if a < 0: + print("a < 0") + elif a < 3: + print("a < 3") + else: + print("a >= 3") + b = a + return b + + def check_line_1(): + tracing() + a = 5 + b = 0 + if a < 0: + print("a < 0") + elif a < 3: + print("a < 3") + else: + print("a >= 3") + b = a + return b + + def check_line_8(): + a = 5 + b = 0 + if a < 0: + print("a < 0") + elif a < 3: + print("a < 3") + else: + tracing() + print("a >= 3") + b = a + return b + + def check_line_9(): + a = 5 + b = 0 + if a < 0: + print("a < 0") + elif a < 3: + print("a < 3") + else: + print("a >= 3") + tracing() + b = a + return b + + self.check_insert_to_line_with_exec(original, tracing, original.__code__.co_firstlineno + 1) + self.check_insert_to_line_with_exec(original, tracing, original.__code__.co_firstlineno + 2) + self.check_insert_to_line_with_exec(original, tracing, original.__code__.co_firstlineno + 8) + self.check_insert_to_line_with_exec(original, tracing, original.__code__.co_firstlineno + 9) + + self.check_insert_to_line_by_symbols(original, call_tracing, original.__code__.co_firstlineno + 1, + check_line_1.__code__) + self.check_insert_to_line_by_symbols(original, call_tracing, original.__code__.co_firstlineno + 8, + check_line_8.__code__) + self.check_insert_to_line_by_symbols(original, call_tracing, original.__code__.co_firstlineno + 9, + check_line_9.__code__) + + finally: + sys.stdout = self.original_stdout + + def test_call_other_function(self): + self.original_stdout = sys.stdout + sys.stdout = StringIO() + + try: + def original(): + a = 1 + b = 3 + c = bar(a, b) + return c + + def check_line_3(): + a = 1 + b = 3 + tracing() + c = bar(a, b) + return c + + def check_line_4(): + a = 1 + b = 3 + c = bar(a, b) + tracing() + return c + + self.check_insert_every_line(original, tracing, 4) + sys.stdout = self.original_stdout + + self.check_insert_to_line_by_symbols(original, call_tracing, original.__code__.co_firstlineno + 3, + check_line_3.__code__) + self.check_insert_to_line_by_symbols(original, call_tracing, original.__code__.co_firstlineno + 4, + check_line_4.__code__) + + finally: + sys.stdout = self.original_stdout + + def test_class_method(self): + self.original_stdout = sys.stdout + sys.stdout = StringIO() + + try: + class A(object): + @staticmethod + def foo(): + print("i'm in foo") + + @staticmethod + def check_line_2(): + tracing() + print("i'm in foo") + + original = A.foo + self.check_insert_to_line_with_exec(original, tracing, original.__code__.co_firstlineno + 2) + + self.check_insert_to_line_by_symbols(original, call_tracing, original.__code__.co_firstlineno + 2, + A.check_line_2.__code__) + + finally: + sys.stdout = self.original_stdout + + def test_offset_overflow(self): + self.original_stdout = sys.stdout + sys.stdout = StringIO() + + try: + def foo(): + a = 1 + b = 2 # breakpoint + c = 3 + a1 = 1 if a > 1 else 2 + a2 = 1 if a > 1 else 2 + a3 = 1 if a > 1 else 2 + a4 = 1 if a > 1 else 2 + a5 = 1 if a > 1 else 2 + a6 = 1 if a > 1 else 2 + a7 = 1 if a > 1 else 2 + a8 = 1 if a > 1 else 2 + a9 = 1 if a > 1 else 2 + a10 = 1 if a > 1 else 2 + a11 = 1 if a > 1 else 2 + a12 = 1 if a > 1 else 2 + a13 = 1 if a > 1 else 2 + + for i in range(1): + if a > 0: + print("111") + # a = 1 + else: + print("222") + return b + + def check_line_2(): + a = 1 + tracing() + b = 2 + c = 3 + a1 = 1 if a > 1 else 2 + a2 = 1 if a > 1 else 2 + a3 = 1 if a > 1 else 2 + a4 = 1 if a > 1 else 2 + a5 = 1 if a > 1 else 2 + a6 = 1 if a > 1 else 2 + a7 = 1 if a > 1 else 2 + a8 = 1 if a > 1 else 2 + a9 = 1 if a > 1 else 2 + a10 = 1 if a > 1 else 2 + a11 = 1 if a > 1 else 2 + a12 = 1 if a > 1 else 2 + a13 = 1 if a > 1 else 2 + + for i in range(1): + if a > 0: + print("111") + # a = 1 + else: + print("222") + return b + + self.check_insert_to_line_with_exec(foo, tracing, foo.__code__.co_firstlineno + 2) + + self.check_insert_to_line_by_symbols(foo, call_tracing, foo.__code__.co_firstlineno + 2, + check_line_2.__code__) + + finally: + sys.stdout = self.original_stdout + + def test_long_lines(self): + self.original_stdout = sys.stdout + sys.stdout = StringIO() + + try: + def foo(): + a = 1 + b = 1 if a > 1 else 2 if a > 0 else 3 if a > 4 else 23 if a > 1 else 2 if a > 0 else 3 if a > 4 else 23 if a > 1 else 2 if a > 0 else 3 if a > 4 else 23 if a > 1 else 2 if a > 0 else 3 if a > 4 else 23 if a > 1 else 2 if a > 0 else 3 if a > 4 else 23 if a > 1 else 2 if a > 0 else 3 if a > 4 else 23 if a > 1 else 2 if a > 0 else 3 if a > 4 else 23 if a > 1 else 2 if a > 0 else 3 if a > 4 else 23 if a > 1 else 2 if a > 0 else 3 if a > 4 else 23 if a > 1 else 2 if a > 0 else 3 if a > 4 else 23 + c = 1 if b > 1 else 2 if b > 0 else 3 if a > 4 else 23 if a > 1 else 2 if a > 0 else 3 if a > 4 else 23 if a > 1 else 2 if a > 0 else 3 if a > 4 else 23 if a > 1 else 2 if a > 0 else 3 if a > 4 else 23 if a > 1 else 2 if a > 0 else 3 if a > 4 else 23 if a > 1 else 2 if a > 0 else 3 if a > 4 else 23 if a > 1 else 2 if a > 0 else 3 if a > 4 else 23 if a > 1 else 2 if a > 0 else 3 if a > 4 else 23 if a > 1 else 2 if a > 0 else 3 if a > 4 else 23 if a > 1 else 2 if a > 0 else 3 if a > 4 else 23 + d = 1 if c > 1 else 2 if c > 0 else 3 if a > 4 else 23 if a > 1 else 2 if a > 0 else 3 if a > 4 else 23 if a > 1 else 2 if a > 0 else 3 if a > 4 else 23 if a > 1 else 2 if a > 0 else 3 if a > 4 else 23 if a > 1 else 2 if a > 0 else 3 if a > 4 else 23 if a > 1 else 2 if a > 0 else 3 if a > 4 else 23 if a > 1 else 2 if a > 0 else 3 if a > 4 else 23 if a > 1 else 2 if a > 0 else 3 if a > 4 else 23 if a > 1 else 2 if a > 0 else 3 if a > 4 else 23 if a > 1 else 2 if a > 0 else 3 if a > 4 else 23 + e = d + 1 + return e + + def check_line_2(): + a = 1 + tracing() + b = 1 if a > 1 else 2 if a > 0 else 3 if a > 4 else 23 if a > 1 else 2 if a > 0 else 3 if a > 4 else 23 if a > 1 else 2 if a > 0 else 3 if a > 4 else 23 if a > 1 else 2 if a > 0 else 3 if a > 4 else 23 if a > 1 else 2 if a > 0 else 3 if a > 4 else 23 if a > 1 else 2 if a > 0 else 3 if a > 4 else 23 if a > 1 else 2 if a > 0 else 3 if a > 4 else 23 if a > 1 else 2 if a > 0 else 3 if a > 4 else 23 if a > 1 else 2 if a > 0 else 3 if a > 4 else 23 if a > 1 else 2 if a > 0 else 3 if a > 4 else 23 + c = 1 if b > 1 else 2 if b > 0 else 3 if a > 4 else 23 if a > 1 else 2 if a > 0 else 3 if a > 4 else 23 if a > 1 else 2 if a > 0 else 3 if a > 4 else 23 if a > 1 else 2 if a > 0 else 3 if a > 4 else 23 if a > 1 else 2 if a > 0 else 3 if a > 4 else 23 if a > 1 else 2 if a > 0 else 3 if a > 4 else 23 if a > 1 else 2 if a > 0 else 3 if a > 4 else 23 if a > 1 else 2 if a > 0 else 3 if a > 4 else 23 if a > 1 else 2 if a > 0 else 3 if a > 4 else 23 if a > 1 else 2 if a > 0 else 3 if a > 4 else 23 + d = 1 if c > 1 else 2 if c > 0 else 3 if a > 4 else 23 if a > 1 else 2 if a > 0 else 3 if a > 4 else 23 if a > 1 else 2 if a > 0 else 3 if a > 4 else 23 if a > 1 else 2 if a > 0 else 3 if a > 4 else 23 if a > 1 else 2 if a > 0 else 3 if a > 4 else 23 if a > 1 else 2 if a > 0 else 3 if a > 4 else 23 if a > 1 else 2 if a > 0 else 3 if a > 4 else 23 if a > 1 else 2 if a > 0 else 3 if a > 4 else 23 if a > 1 else 2 if a > 0 else 3 if a > 4 else 23 if a > 1 else 2 if a > 0 else 3 if a > 4 else 23 + e = d + 1 + return e + + self.check_insert_to_line_with_exec(foo, tracing, foo.__code__.co_firstlineno + 2) + sys.stdout = self.original_stdout + + self.check_insert_to_line_by_symbols(foo, call_tracing, foo.__code__.co_firstlineno + 2, + check_line_2.__code__) + + + finally: + sys.stdout = self.original_stdout + + def test_many_names(self): + self.original_stdout = sys.stdout + sys.stdout = StringIO() + + try: + from tests_python._bytecode_many_names_example import foo + self.check_insert_to_line_with_exec(foo, tracing, foo.__code__.co_firstlineno + 2) + + finally: + sys.stdout = self.original_stdout + + def test_extended_arg_overflow(self): + + from tests_python._bytecode_overflow_example import Dummy, DummyTracing + self.check_insert_to_line_by_symbols(Dummy.fun, call_tracing, Dummy.fun.__code__.co_firstlineno + 3, + DummyTracing.fun.__code__) + + def test_double_extended_arg(self): + self.original_stdout = sys.stdout + sys.stdout = StringIO() + + try: + def foo(): + a = 1 + b = 2 + if b > 0: + d = a + b + d += 1 + b = b - 1 if a > 0 else b + 1 + b = b - 1 if a > 0 else b + 1 + b = b - 1 if a > 0 else b + 1 + b = b - 1 if a > 0 else b + 1 + b = b - 1 if a > 0 else b + 1 + b = b - 1 if a > 0 else b + 1 + b = b - 1 if a > 0 else b + 1 + b = b - 1 if a > 0 else b + 1 + b = b - 1 if a > 0 else b + 1 + b = b - 1 if a > 0 else b + 1 + b = b - 1 if a > 0 else b + 1 + b = b - 1 if a > 0 else b + 1 + b = b - 1 if a > 0 else b + 1 + b = b - 1 if a > 0 else b + 1 + b = b - 1 if a > 0 else b + 1 + b = b - 1 if a > 0 else b + 1 + b = b - 1 if a > 0 else b + 1 + b = b - 1 if a > 0 else b + 1 + b = b - 1 if a > 0 else b + 1 + a = a + 1 + return a + + def foo_check(): + a = 1 + b = 2 + tracing() + if b > 0: + d = a + b + d += 1 + b = b - 1 if a > 0 else b + 1 + b = b - 1 if a > 0 else b + 1 + b = b - 1 if a > 0 else b + 1 + b = b - 1 if a > 0 else b + 1 + b = b - 1 if a > 0 else b + 1 + b = b - 1 if a > 0 else b + 1 + b = b - 1 if a > 0 else b + 1 + b = b - 1 if a > 0 else b + 1 + b = b - 1 if a > 0 else b + 1 + b = b - 1 if a > 0 else b + 1 + b = b - 1 if a > 0 else b + 1 + b = b - 1 if a > 0 else b + 1 + b = b - 1 if a > 0 else b + 1 + b = b - 1 if a > 0 else b + 1 + b = b - 1 if a > 0 else b + 1 + b = b - 1 if a > 0 else b + 1 + b = b - 1 if a > 0 else b + 1 + b = b - 1 if a > 0 else b + 1 + b = b - 1 if a > 0 else b + 1 + a = a + 1 + return a + + self.check_insert_to_line_with_exec(foo, tracing, foo.__code__.co_firstlineno + 2) + sys.stdout = self.original_stdout + + self.check_insert_to_line_by_symbols(foo, call_tracing, foo.__code__.co_firstlineno + 3, + foo_check.__code__) + + + finally: + sys.stdout = self.original_stdout \ No newline at end of file diff --git a/ptvsd/pydevd/tests_python/test_debugger.py b/ptvsd/pydevd/tests_python/test_debugger.py new file mode 100644 index 00000000..6247284b --- /dev/null +++ b/ptvsd/pydevd/tests_python/test_debugger.py @@ -0,0 +1,1472 @@ +''' + The idea is that we record the commands sent to the debugger and reproduce them from this script + (so, this works as the client, which spawns the debugger as a separate process and communicates + to it as if it was run from the outside) + + Note that it's a python script but it'll spawn a process to run as jython, ironpython and as python. +''' +import os +import platform +import sys +import threading +import time +import unittest + +import pytest + +from tests_python import debugger_unittest +from tests_python.debugger_unittest import get_free_port + + +CMD_SET_PROPERTY_TRACE, CMD_EVALUATE_CONSOLE_EXPRESSION, CMD_RUN_CUSTOM_OPERATION, CMD_ENABLE_DONT_TRACE = 133, 134, 135, 141 + +IS_CPYTHON = platform.python_implementation() == 'CPython' +IS_IRONPYTHON = platform.python_implementation() == 'IronPython' +IS_JYTHON = platform.python_implementation() == 'Jython' + +try: + xrange +except: + xrange = range + + +TEST_DJANGO = False +if sys.version_info[:2] == (2, 7): + # Only test on python 2.7 for now + try: + import django + TEST_DJANGO = True + except: + pass + +IS_PY2 = False +if sys.version_info[0] == 2: + IS_PY2 = True + +if IS_PY2: + builtin_qualifier = "__builtin__" +else: + builtin_qualifier = "builtins" + +IS_PY36 = False +if sys.version_info[0] == 3 and sys.version_info[1] == 6: + IS_PY36 = True + +TEST_CYTHON = os.getenv('PYDEVD_USE_CYTHON', None) == 'YES' +TEST_JYTHON = os.getenv('TEST_JYTHON', None) == 'YES' + +#======================================================================================================================= +# WriterThreadCaseSetNextStatement +#====================================================================================================================== +class WriterThreadCaseSetNextStatement(debugger_unittest.AbstractWriterThread): + + TEST_FILE = debugger_unittest._get_debugger_test_file('_debugger_case_set_next_statement.py') + + def run(self): + self.start_socket() + breakpoint_id = self.write_add_breakpoint(6, None) + self.write_make_initial_run() + + thread_id, frame_id, line = self.wait_for_breakpoint_hit('111', True) + + assert line == 6, 'Expected return to be in line 6, was: %s' % line + + self.write_evaluate_expression('%s\t%s\t%s' % (thread_id, frame_id, 'LOCAL'), 'a') + self.wait_for_evaluation('
  • v1:v1
  • v2:v2
  • ' % (contents,)) + + self.finished_ok = True + +#======================================================================================================================= +# WriterThreadCaseDjango2 +#====================================================================================================================== +class WriterThreadCaseDjango2(AbstractWriterThreadCaseDjango): + + def run(self): + self.start_socket() + self.write_add_breakpoint_django(4, None, 'name.html') + self.write_make_initial_run() + + t = self.create_request_thread('my_app/name') + time.sleep(2) # Give django some time to get to startup before requesting the page + t.start() + + thread_id, frame_id, line = self.wait_for_breakpoint_hit('111', True) + assert line == 4, 'Expected return to be in line 4, was: %s' % line + + self.write_get_frame(thread_id, frame_id) + self.wait_for_var('%0A
    '.format(builtin_qualifier,)) + self.write_run_thread(thread_id) + + self.finished_ok = True + +#======================================================================================================================= +# WriterThreadCase17 - [Test Case]: dont trace +#====================================================================================================================== +class WriterThreadCase17(debugger_unittest.AbstractWriterThread): + + TEST_FILE = debugger_unittest._get_debugger_test_file('_debugger_case17.py') + + def run(self): + self.start_socket() + self.write_enable_dont_trace(True) + self.write_add_breakpoint(27, 'main') + self.write_add_breakpoint(29, 'main') + self.write_add_breakpoint(31, 'main') + self.write_add_breakpoint(33, 'main') + self.write_make_initial_run() + + for i in range(4): + thread_id, frame_id, line = self.wait_for_breakpoint_hit('111', True) + + self.write_step_in(thread_id) + thread_id, frame_id, line = self.wait_for_breakpoint_hit('107', True) + # Should Skip step into properties setter + assert line == 2, 'Expected return to be in line 2, was: %s' % line + self.write_run_thread(thread_id) + + + self.finished_ok = True + +#======================================================================================================================= +# WriterThreadCase17a - [Test Case]: dont trace return +#====================================================================================================================== +class WriterThreadCase17a(debugger_unittest.AbstractWriterThread): + + TEST_FILE = debugger_unittest._get_debugger_test_file('_debugger_case17a.py') + + def run(self): + self.start_socket() + self.write_enable_dont_trace(True) + self.write_add_breakpoint(2, 'm1') + self.write_make_initial_run() + + thread_id, frame_id, line = self.wait_for_breakpoint_hit('111', True) + assert line == 2, 'Expected return to be in line 2, was: %s' % line + + self.write_step_in(thread_id) + thread_id, frame_id, line, name = self.wait_for_breakpoint_hit('107', get_line=True, get_name=True) + + # Should Skip step into properties setter + assert name == 'm3' + assert line == 10, 'Expected return to be in line 10, was: %s' % line + self.write_run_thread(thread_id) + + + self.finished_ok = True + +#======================================================================================================================= +# WriterThreadCase16 - [Test Case]: numpy.ndarray resolver +#====================================================================================================================== +class WriterThreadCase16(debugger_unittest.AbstractWriterThread): + + TEST_FILE = debugger_unittest._get_debugger_test_file('_debugger_case16.py') + + def run(self): + self.start_socket() + self.write_add_breakpoint(9, 'main') + self.write_make_initial_run() + + thread_id, frame_id, line = self.wait_for_breakpoint_hit('111', True) + + # In this test we check that the three arrays of different shapes, sizes and types + # are all resolved properly as ndarrays. + + # First pass check is that we have all three expected variables defined + self.write_get_frame(thread_id, frame_id) + self.wait_for_multiple_vars(( + '', + '', + '', + )) + + # For each variable, check each of the resolved (meta data) attributes... + self.write_get_variable(thread_id, frame_id, 'smallarray') + self.wait_for_multiple_vars(( + ''.format(builtin_qualifier), + ''.format(builtin_qualifier), + ''.format(builtin_qualifier), + ''.format(builtin_qualifier), + ], + [ + ''.format(builtin_qualifier), + ''.format(builtin_qualifier), + ''.format(builtin_qualifier), + ''.format(builtin_qualifier), + ], + 'False', '%27Black%27']) + assert 7 == self._sequence, 'Expected 9. Had: %s' % self._sequence + + # Change some variable + self.write_debug_console_expression("%s\t%s\tEVALUATE\tcarObj.color='Red'" % (thread_id, frame_id)) + self.write_debug_console_expression("%s\t%s\tEVALUATE\tcarObj.color" % (thread_id, frame_id)) + self.wait_for_var(['False', '%27Red%27']) + assert 11 == self._sequence, 'Expected 13. Had: %s' % self._sequence + + # Iterate some loop + self.write_debug_console_expression("%s\t%s\tEVALUATE\tfor i in range(3):" % (thread_id, frame_id)) + self.wait_for_var(['True']) + self.write_debug_console_expression("%s\t%s\tEVALUATE\t print(i)" % (thread_id, frame_id)) + self.wait_for_var(['True']) + self.write_debug_console_expression("%s\t%s\tEVALUATE\t" % (thread_id, frame_id)) + self.wait_for_var( + [ + 'False' ] + ) + assert 17 == self._sequence, 'Expected 19. Had: %s' % self._sequence + + self.write_run_thread(thread_id) + self.finished_ok = True + + +#======================================================================================================================= +# WriterThreadCase13 +#====================================================================================================================== +class WriterThreadCase13(debugger_unittest.AbstractWriterThread): + + TEST_FILE = debugger_unittest._get_debugger_test_file('_debugger_case13.py') + + def run(self): + self.start_socket() + self.write_add_breakpoint(35, 'main') + self.write("%s\t%s\t%s" % (CMD_SET_PROPERTY_TRACE, self.next_seq(), "true;false;false;true")) + self.write_make_initial_run() + thread_id, frame_id, line = self.wait_for_breakpoint_hit('111', True) + + self.write_get_frame(thread_id, frame_id) + + self.write_step_in(thread_id) + thread_id, frame_id, line = self.wait_for_breakpoint_hit('107', True) + # Should go inside setter method + assert line == 25, 'Expected return to be in line 25, was: %s' % line + + self.write_step_in(thread_id) + thread_id, frame_id, line = self.wait_for_breakpoint_hit('107', True) + + self.write_step_in(thread_id) + thread_id, frame_id, line = self.wait_for_breakpoint_hit('107', True) + # Should go inside getter method + assert line == 21, 'Expected return to be in line 21, was: %s' % line + + self.write_step_in(thread_id) + thread_id, frame_id, line = self.wait_for_breakpoint_hit('107', True) + + # Disable property tracing + self.write("%s\t%s\t%s" % (CMD_SET_PROPERTY_TRACE, self.next_seq(), "true;true;true;true")) + self.write_step_in(thread_id) + thread_id, frame_id, line = self.wait_for_breakpoint_hit('107', True) + # Should Skip step into properties setter + assert line == 39, 'Expected return to be in line 39, was: %s' % line + + # Enable property tracing + self.write("%s\t%s\t%s" % (CMD_SET_PROPERTY_TRACE, self.next_seq(), "true;false;false;true")) + self.write_step_in(thread_id) + thread_id, frame_id, line = self.wait_for_breakpoint_hit('107', True) + # Should go inside getter method + assert line == 8, 'Expected return to be in line 8, was: %s' % line + + self.write_run_thread(thread_id) + + self.finished_ok = True + +#======================================================================================================================= +# WriterThreadCase12 +#====================================================================================================================== +class WriterThreadCase12(debugger_unittest.AbstractWriterThread): + + TEST_FILE = debugger_unittest._get_debugger_test_file('_debugger_case10.py') + + def run(self): + self.start_socket() + self.write_add_breakpoint(2, '') # Should not be hit: setting empty function (not None) should only hit global. + self.write_add_breakpoint(6, 'Method1a') + self.write_add_breakpoint(11, 'Method2') + self.write_make_initial_run() + + thread_id, frame_id, line = self.wait_for_breakpoint_hit('111', True) + + assert line == 11, 'Expected return to be in line 11, was: %s' % line + + self.write_step_return(thread_id) + + thread_id, frame_id, line = self.wait_for_breakpoint_hit('111', True) # not a return (it stopped in the other breakpoint) + + assert line == 6, 'Expected return to be in line 6, was: %s' % line + + self.write_run_thread(thread_id) + + assert 13 == self._sequence, 'Expected 13. Had: %s' % self._sequence + + self.finished_ok = True + + + +#======================================================================================================================= +# WriterThreadCase11 +#====================================================================================================================== +class WriterThreadCase11(debugger_unittest.AbstractWriterThread): + + TEST_FILE = debugger_unittest._get_debugger_test_file('_debugger_case10.py') + + def run(self): + self.start_socket() + self.write_add_breakpoint(2, 'Method1') + self.write_make_initial_run() + + thread_id, frame_id, line = self.wait_for_breakpoint_hit('111', True) + + assert line == 2, 'Expected return to be in line 2, was: %s' % line + + self.write_step_over(thread_id) + + thread_id, frame_id, line = self.wait_for_breakpoint_hit('108', True) + + assert line == 3, 'Expected return to be in line 3, was: %s' % line + + self.write_step_over(thread_id) + + thread_id, frame_id, line = self.wait_for_breakpoint_hit('108', True) + + assert line == 11, 'Expected return to be in line 11, was: %s' % line + + self.write_step_over(thread_id) + + thread_id, frame_id, line = self.wait_for_breakpoint_hit('108', True) + + assert line == 12, 'Expected return to be in line 12, was: %s' % line + + self.write_run_thread(thread_id) + + assert 13 == self._sequence, 'Expected 13. Had: %s' % self._sequence + + self.finished_ok = True + + + +#======================================================================================================================= +# WriterThreadCase10 +#====================================================================================================================== +class WriterThreadCase10(debugger_unittest.AbstractWriterThread): + + TEST_FILE = debugger_unittest._get_debugger_test_file('_debugger_case10.py') + + def run(self): + self.start_socket() + self.write_add_breakpoint(2, 'None') # None or Method should make hit. + self.write_make_initial_run() + + thread_id, frame_id = self.wait_for_breakpoint_hit('111') + + self.write_step_return(thread_id) + + thread_id, frame_id, line = self.wait_for_breakpoint_hit('109', True) + + assert line == 11, 'Expected return to be in line 11, was: %s' % line + + self.write_step_over(thread_id) + + thread_id, frame_id, line = self.wait_for_breakpoint_hit('108', True) + + assert line == 12, 'Expected return to be in line 12, was: %s' % line + + self.write_run_thread(thread_id) + + assert 11 == self._sequence, 'Expected 11. Had: %s' % self._sequence + + self.finished_ok = True + + + +#======================================================================================================================= +# WriterThreadCase9 +#====================================================================================================================== +class WriterThreadCase9(debugger_unittest.AbstractWriterThread): + + TEST_FILE = debugger_unittest._get_debugger_test_file('_debugger_case89.py') + + def run(self): + self.start_socket() + self.write_add_breakpoint(10, 'Method3') + self.write_make_initial_run() + + thread_id, frame_id = self.wait_for_breakpoint_hit('111') + + self.write_step_over(thread_id) + + thread_id, frame_id, line = self.wait_for_breakpoint_hit('108', True) + + assert line == 11, 'Expected return to be in line 11, was: %s' % line + + self.write_step_over(thread_id) + + thread_id, frame_id, line = self.wait_for_breakpoint_hit('108', True) + + assert line == 12, 'Expected return to be in line 12, was: %s' % line + + self.write_run_thread(thread_id) + + assert 11 == self._sequence, 'Expected 11. Had: %s' % self._sequence + + self.finished_ok = True + + +#======================================================================================================================= +# WriterThreadCase8 +#====================================================================================================================== +class WriterThreadCase8(debugger_unittest.AbstractWriterThread): + + TEST_FILE = debugger_unittest._get_debugger_test_file('_debugger_case89.py') + + def run(self): + self.start_socket() + self.write_add_breakpoint(10, 'Method3') + self.write_make_initial_run() + + thread_id, frame_id = self.wait_for_breakpoint_hit('111') + + self.write_step_return(thread_id) + + thread_id, frame_id, line = self.wait_for_breakpoint_hit('109', True) + + assert line == 15, 'Expected return to be in line 15, was: %s' % line + + self.write_run_thread(thread_id) + + assert 9 == self._sequence, 'Expected 9. Had: %s' % self._sequence + + self.finished_ok = True + + + + +#======================================================================================================================= +# WriterThreadCase7 +#====================================================================================================================== +class WriterThreadCase7(debugger_unittest.AbstractWriterThread): + + TEST_FILE = debugger_unittest._get_debugger_test_file('_debugger_case7.py') + + def run(self): + self.start_socket() + self.write_add_breakpoint(2, 'Call') + self.write_make_initial_run() + + thread_id, frame_id = self.wait_for_breakpoint_hit('111') + + self.write_get_frame(thread_id, frame_id) + + self.wait_for_vars('') # no vars at this point + + self.write_step_over(thread_id) + + self.wait_for_breakpoint_hit('108') + + self.write_get_frame(thread_id, frame_id) + + self.wait_for_vars('%0A'.format(builtin_qualifier)) + + self.write_step_over(thread_id) + + self.wait_for_breakpoint_hit('108') + + + self.write_get_frame(thread_id, frame_id) + + self.wait_for_vars('%0A%0A'.format(builtin_qualifier)) + + self.write_run_thread(thread_id) + + assert 17 == self._sequence, 'Expected 17. Had: %s' % self._sequence + + self.finished_ok = True + + + +#======================================================================================================================= +# WriterThreadCase6 +#======================================================================================================================= +class WriterThreadCase6(debugger_unittest.AbstractWriterThread): + + TEST_FILE = debugger_unittest._get_debugger_test_file('_debugger_case56.py') + + def run(self): + self.start_socket() + self.write_add_breakpoint(2, 'Call2') + self.write_make_initial_run() + + thread_id, frame_id = self.wait_for_breakpoint_hit() + + self.write_get_frame(thread_id, frame_id) + + self.write_step_return(thread_id) + + thread_id, frame_id, line = self.wait_for_breakpoint_hit('109', True) + + assert line == 8, 'Expecting it to go to line 8. Went to: %s' % line + + self.write_step_in(thread_id) + + thread_id, frame_id, line = self.wait_for_breakpoint_hit('107', True) + + # goes to line 4 in jython (function declaration line) + assert line in (4, 5), 'Expecting it to go to line 4 or 5. Went to: %s' % line + + self.write_run_thread(thread_id) + + assert 13 == self._sequence, 'Expected 15. Had: %s' % self._sequence + + self.finished_ok = True + +#======================================================================================================================= +# WriterThreadCase5 +#======================================================================================================================= +class WriterThreadCase5(debugger_unittest.AbstractWriterThread): + + TEST_FILE = debugger_unittest._get_debugger_test_file('_debugger_case56.py') + + def run(self): + self.start_socket() + breakpoint_id = self.write_add_breakpoint(2, 'Call2') + self.write_make_initial_run() + + thread_id, frame_id = self.wait_for_breakpoint_hit() + + self.write_get_frame(thread_id, frame_id) + + self.write_remove_breakpoint(breakpoint_id) + + self.write_step_return(thread_id) + + thread_id, frame_id, line = self.wait_for_breakpoint_hit('109', True) + + assert line == 8, 'Expecting it to go to line 8. Went to: %s' % line + + self.write_step_in(thread_id) + + thread_id, frame_id, line = self.wait_for_breakpoint_hit('107', True) + + # goes to line 4 in jython (function declaration line) + assert line in (4, 5), 'Expecting it to go to line 4 or 5. Went to: %s' % line + + self.write_run_thread(thread_id) + + assert 15 == self._sequence, 'Expected 15. Had: %s' % self._sequence + + self.finished_ok = True + + +#======================================================================================================================= +# WriterThreadCase4 +#======================================================================================================================= +class WriterThreadCase4(debugger_unittest.AbstractWriterThread): + + TEST_FILE = debugger_unittest._get_debugger_test_file('_debugger_case4.py') + + def run(self): + self.start_socket() + self.write_make_initial_run() + + thread_id = self.wait_for_new_thread() + + self.write_suspend_thread(thread_id) + + time.sleep(4) # wait for time enough for the test to finish if it wasn't suspended + + self.write_run_thread(thread_id) + + self.finished_ok = True + + +#======================================================================================================================= +# WriterThreadCase3 +#======================================================================================================================= +class WriterThreadCase3(debugger_unittest.AbstractWriterThread): + + TEST_FILE = debugger_unittest._get_debugger_test_file('_debugger_case3.py') + + def run(self): + self.start_socket() + self.write_make_initial_run() + time.sleep(.5) + breakpoint_id = self.write_add_breakpoint(4, '') + self.write_add_breakpoint(5, 'FuncNotAvailable') # Check that it doesn't get hit in the global when a function is available + + thread_id, frame_id = self.wait_for_breakpoint_hit() + + self.write_get_frame(thread_id, frame_id) + + self.write_run_thread(thread_id) + + thread_id, frame_id = self.wait_for_breakpoint_hit() + + self.write_get_frame(thread_id, frame_id) + + self.write_remove_breakpoint(breakpoint_id) + + self.write_run_thread(thread_id) + + assert 17 == self._sequence, 'Expected 17. Had: %s' % self._sequence + + self.finished_ok = True + +#======================================================================================================================= +# WriterThreadCase2 +#======================================================================================================================= +class WriterThreadCase2(debugger_unittest.AbstractWriterThread): + + TEST_FILE = debugger_unittest._get_debugger_test_file('_debugger_case2.py') + + def run(self): + self.start_socket() + self.write_add_breakpoint(3, 'Call4') # seq = 3 + self.write_make_initial_run() + + thread_id, frame_id = self.wait_for_breakpoint_hit() + + self.write_get_frame(thread_id, frame_id) + + self.write_add_breakpoint(14, 'Call2') + + self.write_run_thread(thread_id) + + thread_id, frame_id = self.wait_for_breakpoint_hit() + + self.write_get_frame(thread_id, frame_id) + + self.write_run_thread(thread_id) + + self.log.append('Checking sequence. Found: %s' % (self._sequence)) + assert 15 == self._sequence, 'Expected 15. Had: %s' % self._sequence + + self.log.append('Marking finished ok.') + self.finished_ok = True + +#======================================================================================================================= +# WriterThreadCaseQThread1 +#======================================================================================================================= +class WriterThreadCaseQThread1(debugger_unittest.AbstractWriterThread): + + TEST_FILE = debugger_unittest._get_debugger_test_file('_debugger_case_qthread1.py') + + def run(self): + self.start_socket() + breakpoint_id = self.write_add_breakpoint(19, 'run') + self.write_make_initial_run() + + thread_id, frame_id = self.wait_for_breakpoint_hit() + + self.write_remove_breakpoint(breakpoint_id) + self.write_run_thread(thread_id) + + self.log.append('Checking sequence. Found: %s' % (self._sequence)) + assert 9 == self._sequence, 'Expected 9. Had: %s' % self._sequence + + self.log.append('Marking finished ok.') + self.finished_ok = True + +#======================================================================================================================= +# WriterThreadCaseQThread2 +#======================================================================================================================= +class WriterThreadCaseQThread2(debugger_unittest.AbstractWriterThread): + + TEST_FILE = debugger_unittest._get_debugger_test_file('_debugger_case_qthread2.py') + + def run(self): + self.start_socket() + breakpoint_id = self.write_add_breakpoint(24, 'long_running') + self.write_make_initial_run() + + thread_id, frame_id = self.wait_for_breakpoint_hit() + + self.write_remove_breakpoint(breakpoint_id) + self.write_run_thread(thread_id) + + self.log.append('Checking sequence. Found: %s' % (self._sequence)) + assert 9 == self._sequence, 'Expected 9. Had: %s' % self._sequence + + self.log.append('Marking finished ok.') + self.finished_ok = True + +#======================================================================================================================= +# WriterThreadCaseQThread3 +#======================================================================================================================= +class WriterThreadCaseQThread3(debugger_unittest.AbstractWriterThread): + + TEST_FILE = debugger_unittest._get_debugger_test_file('_debugger_case_qthread3.py') + + def run(self): + self.start_socket() + breakpoint_id = self.write_add_breakpoint(22, 'run') + self.write_make_initial_run() + + thread_id, frame_id = self.wait_for_breakpoint_hit() + + self.write_remove_breakpoint(breakpoint_id) + self.write_run_thread(thread_id) + + self.log.append('Checking sequence. Found: %s' % (self._sequence)) + assert 9 == self._sequence, 'Expected 9. Had: %s' % self._sequence + + self.log.append('Marking finished ok.') + self.finished_ok = True + +#======================================================================================================================= +# WriterThreadCaseQThread4 +#======================================================================================================================= +class WriterThreadCaseQThread4(debugger_unittest.AbstractWriterThread): + + TEST_FILE = debugger_unittest._get_debugger_test_file('_debugger_case_qthread4.py') + + def run(self): + self.start_socket() + breakpoint_id = self.write_add_breakpoint(28, 'on_start') # breakpoint on print('On start called2'). + self.write_make_initial_run() + + thread_id, frame_id = self.wait_for_breakpoint_hit() + + self.write_remove_breakpoint(breakpoint_id) + self.write_run_thread(thread_id) + + self.log.append('Checking sequence. Found: %s' % (self._sequence)) + assert 9 == self._sequence, 'Expected 9. Had: %s' % self._sequence + + self.log.append('Marking finished ok.') + self.finished_ok = True + + def additional_output_checks(self, stdout, stderr): + if 'On start called' not in stdout: + raise AssertionError('Expected "On start called" to be in stdout:\n%s' % (stdout,)) + if 'Done sleeping' not in stdout: + raise AssertionError('Expected "Done sleeping" to be in stdout:\n%s' % (stdout,)) + if 'native Qt signal is not callable' in stderr: + raise AssertionError('Did not expect "native Qt signal is not callable" to be in stderr:\n%s' % (stderr,)) + +#======================================================================================================================= +# WriterThreadCase1 +#======================================================================================================================= +class WriterThreadCase1(debugger_unittest.AbstractWriterThread): + + TEST_FILE = debugger_unittest._get_debugger_test_file('_debugger_case1.py') + + def run(self): + self.start_socket() + + self.log.append('writing add breakpoint') + self.write_add_breakpoint(6, 'set_up') + + self.log.append('making initial run') + self.write_make_initial_run() + + self.log.append('waiting for breakpoint hit') + thread_id, frame_id = self.wait_for_breakpoint_hit() + + self.log.append('get frame') + self.write_get_frame(thread_id, frame_id) + + self.log.append('step over') + self.write_step_over(thread_id) + + self.log.append('get frame') + self.write_get_frame(thread_id, frame_id) + + self.log.append('run thread') + self.write_run_thread(thread_id) + + self.log.append('asserting') + try: + assert 13 == self._sequence, 'Expected 13. Had: %s' % self._sequence + except: + self.log.append('assert failed!') + raise + self.log.append('asserted') + + self.finished_ok = True + +#======================================================================================================================= +# WriterThreadCaseMSwitch +#======================================================================================================================= +class WriterThreadCaseMSwitch(debugger_unittest.AbstractWriterThread): + + TEST_FILE = 'tests_python._debugger_case_m_switch' + IS_MODULE = True + + def get_environ(self): + env = os.environ.copy() + curr_pythonpath = env.get('PYTHONPATH', '') + + root_dirname = os.path.dirname(os.path.dirname(__file__)) + + curr_pythonpath += root_dirname + os.pathsep + env['PYTHONPATH'] = curr_pythonpath + return env + + def get_main_filename(self): + return debugger_unittest._get_debugger_test_file('_debugger_case_m_switch.py') + + def run(self): + self.start_socket() + + self.log.append('writing add breakpoint') + breakpoint_id = self.write_add_breakpoint(1, None) + + self.log.append('making initial run') + self.write_make_initial_run() + + self.log.append('waiting for breakpoint hit') + thread_id, frame_id = self.wait_for_breakpoint_hit() + + self.write_remove_breakpoint(breakpoint_id) + + self.log.append('run thread') + self.write_run_thread(thread_id) + + self.log.append('asserting') + try: + assert 9 == self._sequence, 'Expected 9. Had: %s' % self._sequence + except: + self.log.append('assert failed!') + raise + self.log.append('asserted') + + self.finished_ok = True + + +# ======================================================================================================================= +# WriterThreadCaseModuleWithEntryPoint +# ======================================================================================================================= +class WriterThreadCaseModuleWithEntryPoint(WriterThreadCaseMSwitch): + TEST_FILE = 'tests_python._debugger_case_module_entry_point:main' + IS_MODULE = True + + def get_main_filename(self): + return debugger_unittest._get_debugger_test_file('_debugger_case_module_entry_point.py') + + + + +#======================================================================================================================= +# WriterThreadCaseRemoteDebugger +#======================================================================================================================= +class WriterThreadCaseRemoteDebugger(debugger_unittest.AbstractWriterThread): + + TEST_FILE = debugger_unittest._get_debugger_test_file('_debugger_case_remote.py') + + def run(self): + self.start_socket(8787) + + self.log.append('making initial run') + self.write_make_initial_run() + + self.log.append('waiting for breakpoint hit') + thread_id, frame_id = self.wait_for_breakpoint_hit('105') + + self.log.append('run thread') + self.write_run_thread(thread_id) + + self.log.append('asserting') + try: + assert 5 == self._sequence, 'Expected 5. Had: %s' % self._sequence + except: + self.log.append('assert failed!') + raise + self.log.append('asserted') + + self.finished_ok = True + +#======================================================================================================================= +# _SecondaryMultiProcProcessWriterThread +#======================================================================================================================= +class _SecondaryMultiProcProcessWriterThread(debugger_unittest.AbstractWriterThread): + + FORCE_KILL_PROCESS_WHEN_FINISHED_OK = True + + def __init__(self, server_socket): + debugger_unittest.AbstractWriterThread.__init__(self) + self.server_socket = server_socket + + def run(self): + print('waiting for second process') + self.sock, addr = self.server_socket.accept() + print('accepted second process') + + from tests_python.debugger_unittest import ReaderThread + self.reader_thread = ReaderThread(self.sock) + self.reader_thread.start() + + self._sequence = -1 + # initial command is always the version + self.write_version() + self.log.append('start_socket') + self.write_make_initial_run() + time.sleep(.5) + self.finished_ok = True + +#======================================================================================================================= +# WriterThreadCaseRemoteDebuggerMultiProc +#======================================================================================================================= +class WriterThreadCaseRemoteDebuggerMultiProc(debugger_unittest.AbstractWriterThread): + + # It seems sometimes it becomes flaky on the ci because the process outlives the writer thread... + # As we're only interested in knowing if a second connection was received, just kill the related + # process. + FORCE_KILL_PROCESS_WHEN_FINISHED_OK = True + + TEST_FILE = debugger_unittest._get_debugger_test_file('_debugger_case_remote_1.py') + + def run(self): + self.start_socket(8787) + + self.log.append('making initial run') + self.write_make_initial_run() + + self.log.append('waiting for breakpoint hit') + thread_id, frame_id = self.wait_for_breakpoint_hit('105') + + self.secondary_multi_proc_process_writer_thread = secondary_multi_proc_process_writer_thread = \ + _SecondaryMultiProcProcessWriterThread(self.server_socket) + secondary_multi_proc_process_writer_thread.start() + + self.log.append('run thread') + self.write_run_thread(thread_id) + + for _i in xrange(400): + if secondary_multi_proc_process_writer_thread.finished_ok: + break + time.sleep(.1) + else: + self.log.append('Secondary process not finished ok!') + raise AssertionError('Secondary process not finished ok!') + + self.log.append('Secondary process finished!') + try: + assert 5 == self._sequence, 'Expected 5. Had: %s' % self._sequence + except: + self.log.append('assert failed!') + raise + self.log.append('asserted') + + self.finished_ok = True + + def do_kill(self): + debugger_unittest.AbstractWriterThread.do_kill(self) + if hasattr(self, 'secondary_multi_proc_process_writer_thread'): + self.secondary_multi_proc_process_writer_thread.do_kill() + +#======================================================================================================================= +# WriterThreadCaseTypeExt - [Test Case]: Custom type presentation extensions +#====================================================================================================================== +class WriterThreadCaseTypeExt(debugger_unittest.AbstractWriterThread): + + TEST_FILE = debugger_unittest._get_debugger_test_file('_debugger_case_type_ext.py') + + def run(self): + self.start_socket() + self.write_add_breakpoint(7, None) + self.write_make_initial_run() + + thread_id, frame_id, line = self.wait_for_breakpoint_hit('111', True) + self.write_get_frame(thread_id, frame_id) + self.wait_for_var(r'') is True + self.write_get_variable(thread_id, frame_id, 'my_rect') + self.wait_for_var(r''.format(builtin_qualifier)) is True + self.write_run_thread(thread_id) + self.finished_ok = True + + + def get_environ(self): + env = os.environ.copy() + + python_path = env.get("PYTHONPATH","") + ext_base = debugger_unittest._get_debugger_test_file('my_extensions') + env['PYTHONPATH']= ext_base + os.pathsep + python_path if python_path else ext_base + return env + +#======================================================================================================================= +# WriterThreadCaseEventExt - [Test Case]: Test initialize event for extensions +#====================================================================================================================== +class WriterThreadCaseEventExt(debugger_unittest.AbstractWriterThread): + + TEST_FILE = debugger_unittest._get_debugger_test_file('_debugger_case_event_ext.py') + + def run(self): + self.start_socket() + self.write_make_initial_run() + self.finished_ok = True + + def additional_output_checks(self, stdout, stderr): + if 'INITIALIZE EVENT RECEIVED' not in stdout: + raise AssertionError('No initialize event received') + + def get_environ(self): + env = os.environ.copy() + + python_path = env.get("PYTHONPATH","") + ext_base = debugger_unittest._get_debugger_test_file('my_extensions') + env['PYTHONPATH']= ext_base + os.pathsep + python_path if python_path else ext_base + env["VERIFY_EVENT_TEST"] = "1" + return env + +#======================================================================================================================= +# Test +#======================================================================================================================= +class Test(unittest.TestCase, debugger_unittest.DebuggerRunner): + + def get_command_line(self): + if IS_JYTHON: + if sys.executable is not None: + # i.e.: we're running with the provided jython.exe + return [sys.executable] + else: + + + return [ + get_java_location(), + '-classpath', + get_jython_jar(), + 'org.python.util.jython' + ] + + if IS_CPYTHON: + return [sys.executable, '-u'] + + if IS_IRONPYTHON: + return [ + sys.executable, + '-X:Frames' + ] + + raise RuntimeError('Unable to provide command line') + + @pytest.mark.skipif(IS_IRONPYTHON, reason='Test needs gc.get_referrers to really check anything.') + def test_case_1(self): + self.check_case(WriterThreadCase1) + + def test_case_2(self): + self.check_case(WriterThreadCase2) + + @pytest.mark.skipif(IS_IRONPYTHON, reason='This test fails once in a while due to timing issues on IronPython, so, skipping it.') + def test_case_3(self): + self.check_case(WriterThreadCase3) + + @pytest.mark.skipif(IS_JYTHON, reason='This test is flaky on Jython, so, skipping it.') + def test_case_4(self): + self.check_case(WriterThreadCase4) + + def test_case_5(self): + self.check_case(WriterThreadCase5) + + def test_case_6(self): + self.check_case(WriterThreadCase6) + + def test_case_7(self): + if IS_IRONPYTHON: + # This test checks that we start without variables and at each step a new var is created, but on ironpython, + # the variables exist all at once (with None values), so, we can't test it properly. + pytest.skip("Different behavior on IronPython") + + self.check_case(WriterThreadCase7) + + def test_case_8(self): + self.check_case(WriterThreadCase8) + + def test_case_9(self): + self.check_case(WriterThreadCase9) + + def test_case_10(self): + self.check_case(WriterThreadCase10) + + def test_case_11(self): + self.check_case(WriterThreadCase11) + + def test_case_12(self): + self.check_case(WriterThreadCase12) + + @pytest.mark.skipif(IS_IRONPYTHON, reason='Failing on IronPython (needs to be investigated).') + def test_case_13(self): + self.check_case(WriterThreadCase13) + + def test_case_14(self): + self.check_case(WriterThreadCase14) + + def test_case_15(self): + self.check_case(WriterThreadCase15) + + def test_case_16(self): + try: + import numpy + except ImportError: + pytest.skip('numpy not available') + + self.check_case(WriterThreadCase16) + + def test_case_17(self): + self.check_case(WriterThreadCase17) + + def test_case_17a(self): + self.check_case(WriterThreadCase17a) + + def test_case_18(self): + if IS_IRONPYTHON or IS_JYTHON: + pytest.skip('Unsupported assign to local') + + self.check_case(WriterThreadCase18) + + def test_case_19(self): + self.check_case(WriterThreadCase19) + + if TEST_DJANGO: + def test_case_django(self): + self.check_case(WriterThreadCaseDjango) + + def test_case_django2(self): + self.check_case(WriterThreadCaseDjango2) + + + if TEST_CYTHON: + def test_cython(self): + from _pydevd_bundle import pydevd_cython + assert pydevd_cython.trace_dispatch is not None + + def _has_qt(self): + try: + from PySide import QtCore # @UnresolvedImport + return True + except: + try: + from PyQt4 import QtCore + return True + except: + try: + from PyQt5 import QtCore + return True + except: + pass + return False + + def test_case_qthread1(self): + if self._has_qt(): + self.check_case(WriterThreadCaseQThread1) + + def test_case_qthread2(self): + if self._has_qt(): + self.check_case(WriterThreadCaseQThread2) + + def test_case_qthread3(self): + if self._has_qt(): + self.check_case(WriterThreadCaseQThread3) + + def test_case_qthread4(self): + if self._has_qt(): + self.check_case(WriterThreadCaseQThread4) + + def test_m_switch(self): + self.check_case(WriterThreadCaseMSwitch) + + def test_module_entry_point(self): + self.check_case(WriterThreadCaseModuleWithEntryPoint) + + @pytest.mark.skipif(not IS_CPYTHON or IS_PY36, reason='Only for Python (failing on 3.6 -- needs to be investigated).') + def test_case_set_next_statement(self): + self.check_case(WriterThreadCaseSetNextStatement) + + + @pytest.mark.skipif(IS_IRONPYTHON, reason='Failing on IronPython (needs to be investigated).') + def test_case_type_ext(self): + self.check_case(WriterThreadCaseTypeExt) + + @pytest.mark.skipif(IS_IRONPYTHON, reason='Failing on IronPython (needs to be investigated).') + def test_case_event_ext(self): + self.check_case(WriterThreadCaseEventExt) + +@pytest.mark.skipif(not IS_CPYTHON, reason='CPython only test.') +class TestPythonRemoteDebugger(unittest.TestCase, debugger_unittest.DebuggerRunner): + + def get_command_line(self): + return [sys.executable, '-u'] + + def add_command_line_args(self, args): + return args + [self.writer_thread.TEST_FILE] + + def test_remote_debugger(self): + self.check_case(WriterThreadCaseRemoteDebugger) + + def test_remote_debugger2(self): + self.check_case(WriterThreadCaseRemoteDebuggerMultiProc) + + + +def get_java_location(): + from java.lang import System # @UnresolvedImport + jre_dir = System.getProperty("java.home") + for f in [os.path.join(jre_dir, 'bin', 'java.exe'), os.path.join(jre_dir, 'bin', 'java')]: + if os.path.exists(f): + return f + raise RuntimeError('Unable to find java executable') + +def get_jython_jar(): + from java.lang import ClassLoader # @UnresolvedImport + cl = ClassLoader.getSystemClassLoader() + paths = map(lambda url: url.getFile(), cl.getURLs()) + for p in paths: + if 'jython.jar' in p: + return p + raise RuntimeError('Unable to find jython.jar') + + +def get_location_from_line(line): + loc = line.split('=')[1].strip() + if loc.endswith(';'): + loc = loc[:-1] + if loc.endswith('"'): + loc = loc[:-1] + if loc.startswith('"'): + loc = loc[1:] + return loc + + +def split_line(line): + if '=' not in line: + return None, None + var = line.split('=')[0].strip() + return var, get_location_from_line(line) + + + +# c:\bin\jython2.7.0\bin\jython.exe -m py.test tests_python diff --git a/ptvsd/pydevd/tests_python/test_frame_eval_and_tracing.py b/ptvsd/pydevd/tests_python/test_frame_eval_and_tracing.py new file mode 100644 index 00000000..a948a060 --- /dev/null +++ b/ptvsd/pydevd/tests_python/test_frame_eval_and_tracing.py @@ -0,0 +1,219 @@ +import os +import platform +import unittest +import pytest +import sys + +import time + +from tests_python import debugger_unittest + +IS_CPYTHON = platform.python_implementation() == 'CPython' +IS_PY36 = sys.version_info[0] == 3 and sys.version_info[1] == 6 +TEST_CYTHON = os.getenv('PYDEVD_USE_CYTHON', None) == 'YES' + + +class WriterThreadStepAndResume(debugger_unittest.AbstractWriterThread): + + TEST_FILE = debugger_unittest._get_debugger_test_file('_debugger_case10.py') + + def run(self): + self.start_socket() + self.write_add_breakpoint(10, 'Method2') + self.write_add_breakpoint(2, 'Method1') + self.write_make_initial_run() + + thread_id, frame_id, line, suspend_type = self.wait_for_breakpoint_hit_with_suspend_type('111', True) + + assert line == 10, 'Expected return to be in line 10, was: %s' % line + assert suspend_type == "frame_eval", 'Expected suspend type to be "frame_eval", but was: %s' % suspend_type + + self.write_step_over(thread_id) + thread_id, frame_id, line, suspend_type = self.wait_for_breakpoint_hit_with_suspend_type('108', True) + + assert line == 11, 'Expected return to be in line 11, was: %s' % line + # we use tracing debugger while stepping + assert suspend_type == "trace", 'Expected suspend type to be "trace", but was: %s' % suspend_type + + self.write_run_thread(thread_id) + + thread_id, frame_id, line, suspend_type = self.wait_for_breakpoint_hit_with_suspend_type('111', True) + + assert line == 2, 'Expected return to be in line 2, was: %s' % line + # we enable frame evaluation debugger after "Resume" command + assert suspend_type == "frame_eval", 'Expected suspend type to be "frame_eval", but was: %s' % suspend_type + + self.write_run_thread(thread_id) + + self.finished_ok = True + + +class WriterThreadStepReturn(debugger_unittest.AbstractWriterThread): + + TEST_FILE = debugger_unittest._get_debugger_test_file('_debugger_case56.py') + + def run(self): + self.start_socket() + self.write_add_breakpoint(2, 'Call2') + self.write_make_initial_run() + + thread_id, frame_id, suspend_type = self.wait_for_breakpoint_hit_with_suspend_type() + + assert suspend_type == "frame_eval", 'Expected suspend type to be "frame_eval", but was: %s' % suspend_type + self.write_get_frame(thread_id, frame_id) + + self.write_step_return(thread_id) + + thread_id, frame_id, line, suspend_type = self.wait_for_breakpoint_hit_with_suspend_type('109', True) + + assert line == 8, 'Expecting it to go to line 8. Went to: %s' % line + # Step return uses temporary breakpoint, so we use tracing debugger + assert suspend_type == "trace", 'Expected suspend type to be "trace", but was: %s' % suspend_type + + self.write_step_in(thread_id) + + thread_id, frame_id, line, suspend_type = self.wait_for_breakpoint_hit_with_suspend_type('107', True) + + # goes to line 4 in jython (function declaration line) + assert line in (4, 5), 'Expecting it to go to line 4 or 5. Went to: %s' % line + # we use tracing debugger for stepping + assert suspend_type == "trace", 'Expected suspend type to be "trace", but was: %s' % suspend_type + + self.write_run_thread(thread_id) + + self.finished_ok = True + + +class WriterThreadAddLineBreakWhileRun(debugger_unittest.AbstractWriterThread): + + TEST_FILE = debugger_unittest._get_debugger_test_file('_debugger_case3.py') + + def run(self): + self.start_socket() + self.write_make_initial_run() + time.sleep(.5) + breakpoint_id = self.write_add_breakpoint(4, '') + + thread_id, frame_id, line, suspend_type = self.wait_for_breakpoint_hit_with_suspend_type('111', True) + + assert line == 4, 'Expected return to be in line 4, was: %s' % line + # we use tracing debugger if breakpoint was added while running + assert suspend_type == "trace", 'Expected suspend type to be "trace", but was: %s' % suspend_type + + self.write_get_frame(thread_id, frame_id) + + self.write_run_thread(thread_id) + + thread_id, frame_id, line, suspend_type = self.wait_for_breakpoint_hit_with_suspend_type('111', True) + assert line == 4, 'Expected return to be in line 4, was: %s' % line + # we still use tracing debugger + assert suspend_type == "trace", 'Expected suspend type to be "trace", but was: %s' % suspend_type + + self.write_get_frame(thread_id, frame_id) + + self.write_remove_breakpoint(breakpoint_id) + + self.write_run_thread(thread_id) + + self.finished_ok = True + + +class WriterThreadExceptionBreak(debugger_unittest.AbstractWriterThread): + + TEST_FILE = debugger_unittest._get_debugger_test_file('_debugger_case10.py') + + def run(self): + self.start_socket() + self.write_add_breakpoint(10, 'Method2') + self.write_add_exception_breakpoint_with_policy('IndexError', "1", "0", "0") + self.write_make_initial_run() + time.sleep(.5) + + thread_id, frame_id, line, suspend_type = self.wait_for_breakpoint_hit_with_suspend_type('111', True) + + assert line == 10, 'Expected return to be in line 10, was: %s' % line + # we use tracing debugger if there are exception breakpoints + assert suspend_type == "trace", 'Expected suspend type to be "trace", but was: %s' % suspend_type + + self.write_run_thread(thread_id) + + self.finished_ok = True + + +class WriterThreadAddExceptionBreakWhileRunning(debugger_unittest.AbstractWriterThread): + + TEST_FILE = debugger_unittest._get_debugger_test_file('_debugger_case10.py') + + def run(self): + self.start_socket() + self.write_add_breakpoint(10, 'Method2') + self.write_add_breakpoint(2, 'Method1') + # self.write_add_exception_breakpoint_with_policy('IndexError', "1", "0", "0") + self.write_make_initial_run() + time.sleep(.5) + + thread_id, frame_id, line, suspend_type = self.wait_for_breakpoint_hit_with_suspend_type('111', True) + + assert line == 10, 'Expected return to be in line 10, was: %s' % line + # we use tracing debugger if there are exception breakpoints + assert suspend_type == "frame_eval", 'Expected suspend type to be "frame_eval", but was: %s' % suspend_type + + self.write_add_exception_breakpoint_with_policy('IndexError', "1", "0", "0") + + self.write_run_thread(thread_id) + + thread_id, frame_id, line, suspend_type = self.wait_for_breakpoint_hit_with_suspend_type('111', True) + + assert line == 2, 'Expected return to be in line 2, was: %s' % line + # we use tracing debugger if exception break was added + assert suspend_type == "trace", 'Expected suspend type to be "trace", but was: %s' % suspend_type + + self.write_run_thread(thread_id) + + self.finished_ok = True + + +class WriterThreadAddTerminationExceptionBreak(debugger_unittest.AbstractWriterThread): + + TEST_FILE = debugger_unittest._get_debugger_test_file('_debugger_case10.py') + + def run(self): + self.start_socket() + self.write_add_breakpoint(10, 'Method2') + self.write_add_exception_breakpoint_with_policy('IndexError', "0", "1", "0") + self.write_make_initial_run() + time.sleep(.5) + + thread_id, frame_id, line, suspend_type = self.wait_for_breakpoint_hit_with_suspend_type('111', True) + + assert line == 10, 'Expected return to be in line 10, was: %s' % line + # we can use frame evaluation with exception breakpoint with "On termination" suspend policy + assert suspend_type == "frame_eval", 'Expected suspend type to be "frame_eval", but was: %s' % suspend_type + + self.write_run_thread(thread_id) + + self.finished_ok = True + + +@pytest.mark.skipif(not IS_PY36 or not IS_CPYTHON or not TEST_CYTHON, reason='Test requires Python 3.6') +class TestFrameEval(unittest.TestCase, debugger_unittest.DebuggerRunner): + def get_command_line(self): + return [sys.executable, '-u'] + + def test_step_and_resume(self): + self.check_case(WriterThreadStepAndResume) + + def test_step_return(self): + self.check_case(WriterThreadStepReturn) + + def test_add_break_while_running(self): + self.check_case(WriterThreadAddLineBreakWhileRun) + + def test_exc_break(self): + self.check_case(WriterThreadExceptionBreak) + + def test_add_exc_break_while_running(self): + self.check_case(WriterThreadAddExceptionBreakWhileRunning) + + def test_add_termination_exc_break(self): + self.check_case(WriterThreadAddTerminationExceptionBreak) \ No newline at end of file diff --git a/ptvsd/pydevd/tests_python/test_process_command_line.py b/ptvsd/pydevd/tests_python/test_process_command_line.py new file mode 100644 index 00000000..8932630a --- /dev/null +++ b/ptvsd/pydevd/tests_python/test_process_command_line.py @@ -0,0 +1,28 @@ +import unittest + +class Test(unittest.TestCase): + + def testProcessCommandLine(self): + from _pydevd_bundle.pydevd_command_line_handling import process_command_line, setup_to_argv + setup = process_command_line(['pydevd.py', '--port', '1', '--save-threading']) + assert setup['save-threading'] + assert setup['port'] == 1 + assert not setup['qt-support'] + + argv = setup_to_argv(setup) + assert argv[0].endswith('pydevd.py') or argv[0].endswith('pydevd$py.class'), 'Expected: %s to end with pydevd.py' % (argv[0],) + argv = argv[1:] + assert argv == ['--port', '1', '--save-threading'] + + def testProcessCommandLine2(self): + from _pydevd_bundle.pydevd_command_line_handling import process_command_line, setup_to_argv + setup = process_command_line(['pydevd.py', '--port', '1', '--qt-support=auto']) + assert setup['qt-support'] == 'auto' + + setup = process_command_line(['pydevd.py', '--port', '1', '--qt-support']) + assert setup['qt-support'] == 'auto' + + setup = process_command_line(['pydevd.py', '--port', '1', '--qt-support=pyqt4']) + assert setup['qt-support'] == 'pyqt4' + + self.assertRaises(ValueError, process_command_line, ['pydevd.py', '--port', '1', '--qt-support=wrong']) diff --git a/ptvsd/pydevd/tests_python/test_pydev_monkey.py b/ptvsd/pydevd/tests_python/test_pydev_monkey.py new file mode 100644 index 00000000..a0625773 --- /dev/null +++ b/ptvsd/pydevd/tests_python/test_pydev_monkey.py @@ -0,0 +1,142 @@ +import sys +import os +import unittest +try: + from _pydev_bundle import pydev_monkey +except: + sys.path.append(os.path.dirname(os.path.dirname(__file__))) + from _pydev_bundle import pydev_monkey +from pydevd import SetupHolder +from _pydev_bundle.pydev_monkey import pydev_src_dir + + + +class TestCase(unittest.TestCase): + + def test_monkey(self): + original = SetupHolder.setup + + try: + SetupHolder.setup = {'client':'127.0.0.1', 'port': '0'} + check='''C:\\bin\\python.exe -u -c connect(\\"127.0.0.1\\")''' + debug_command = ( + 'import sys; ' + 'sys.path.append(r\'%s\'); ' + 'import pydevd; pydevd.settrace(host=\'127.0.0.1\', port=0, suspend=False, ' + 'trace_only_current_thread=False, patch_multiprocessing=True); ' + '' + 'connect("127.0.0.1")') % pydev_src_dir + if sys.platform == "win32": + debug_command = debug_command.replace('"', '\\"') + debug_command = '"%s"' % debug_command + self.assertEqual( + 'C:\\bin\\python.exe -u -c %s' % debug_command, + pydev_monkey.patch_arg_str_win(check)) + finally: + SetupHolder.setup = original + + def test_str_to_args_windows(self): + self.assertEqual(['a', 'b'], pydev_monkey.str_to_args_windows('a "b"')) + + def test_monkey_patch_args_indc(self): + original = SetupHolder.setup + + try: + SetupHolder.setup = {'client':'127.0.0.1', 'port': '0'} + check=['C:\\bin\\python.exe', '-u', '-c', 'connect("127.0.0.1")'] + debug_command = ( + 'import sys; sys.path.append(r\'%s\'); import pydevd; ' + 'pydevd.settrace(host=\'127.0.0.1\', port=0, suspend=False, trace_only_current_thread=False, patch_multiprocessing=True); ' + '' + 'connect("127.0.0.1")') % pydev_src_dir + if sys.platform == "win32": + debug_command = debug_command.replace('"', '\\"') + debug_command = '"%s"' % debug_command + res = pydev_monkey.patch_args(check) + self.assertEqual(res, [ + 'C:\\bin\\python.exe', + '-u', + '-c', + debug_command + ]) + finally: + SetupHolder.setup = original + + def test_monkey_patch_args_module(self): + original = SetupHolder.setup + + try: + SetupHolder.setup = {'client':'127.0.0.1', 'port': '0', 'multiprocess': True} + check=['C:\\bin\\python.exe', '-m', 'test'] + from _pydevd_bundle.pydevd_command_line_handling import get_pydevd_file + self.assertEqual(pydev_monkey.patch_args(check), [ + 'C:\\bin\\python.exe', + get_pydevd_file(), + '--module', + '--port', + '0', + '--client', + '127.0.0.1', + '--multiprocess', + '--file', + 'test', + ]) + finally: + SetupHolder.setup = original + + def test_monkey_patch_args_no_indc(self): + original = SetupHolder.setup + + try: + SetupHolder.setup = {'client':'127.0.0.1', 'port': '0'} + check=['C:\\bin\\python.exe', 'connect(\\"127.0.0.1\\")'] + from _pydevd_bundle.pydevd_command_line_handling import get_pydevd_file + self.assertEqual(pydev_monkey.patch_args(check), [ + 'C:\\bin\\python.exe', + get_pydevd_file(), + '--port', + '0', + '--client', + '127.0.0.1', + '--file', + 'connect(\\"127.0.0.1\\")']) + finally: + SetupHolder.setup = original + + def test_monkey_patch_args_no_indc_with_pydevd(self): + original = SetupHolder.setup + + try: + SetupHolder.setup = {'client':'127.0.0.1', 'port': '0'} + check=['C:\\bin\\python.exe', 'pydevd.py', 'connect(\\"127.0.0.1\\")', 'bar'] + + self.assertEqual(pydev_monkey.patch_args(check), [ + 'C:\\bin\\python.exe', 'pydevd.py', 'connect(\\"127.0.0.1\\")', 'bar']) + finally: + SetupHolder.setup = original + + def test_monkey_patch_args_no_indc_without_pydevd(self): + original = SetupHolder.setup + from _pydevd_bundle.pydevd_command_line_handling import get_pydevd_file + + try: + SetupHolder.setup = {'client':'127.0.0.1', 'port': '0'} + check=['C:\\bin\\python.exe', 'target.py', 'connect(\\"127.0.0.1\\")', 'bar'] + + self.assertEqual(pydev_monkey.patch_args(check), [ + 'C:\\bin\\python.exe', + get_pydevd_file(), + '--port', + '0', + '--client', + '127.0.0.1', + '--file', + 'target.py', + 'connect(\\"127.0.0.1\\")', + 'bar', + ]) + finally: + SetupHolder.setup = original + +if __name__ == '__main__': + unittest.main() \ No newline at end of file diff --git a/ptvsd/pydevd/tests_python/test_save_locals.py b/ptvsd/pydevd/tests_python/test_save_locals.py new file mode 100644 index 00000000..ed4bd2c4 --- /dev/null +++ b/ptvsd/pydevd/tests_python/test_save_locals.py @@ -0,0 +1,101 @@ +import inspect +import sys +import unittest + +from _pydevd_bundle.pydevd_save_locals import save_locals +from _pydevd_bundle.pydevd_constants import IS_JYTHON, IS_IRONPYTHON +import pytest + + +def use_save_locals(name, value): + """ + Attempt to set the local of the given name to value, using locals_to_fast. + """ + frame = inspect.currentframe().f_back + locals_dict = frame.f_locals + locals_dict[name] = value + + save_locals(frame) + + +def check_method(fn): + """ + A harness for testing methods that attempt to modify the values of locals on the stack. + """ + x = 1 + + # The method 'fn' should attempt to set x = 2 in the current frame. + fn('x', 2) + + return x + + + +@pytest.mark.skipif(IS_JYTHON or IS_IRONPYTHON, reason='CPython/pypy only') +class TestSetLocals(unittest.TestCase): + """ + Test setting locals in one function from another function using several approaches. + """ + + def test_set_locals_using_save_locals(self): + x = check_method(use_save_locals) + self.assertEqual(x, 2) # Expected to succeed + + + def test_frame_simple_change(self): + frame = sys._getframe() + a = 20 + frame.f_locals['a'] = 50 + save_locals(frame) + self.assertEqual(50, a) + + + def test_frame_co_freevars(self): + + outer_var = 20 + + def func(): + frame = sys._getframe() + frame.f_locals['outer_var'] = 50 + save_locals(frame) + self.assertEqual(50, outer_var) + + func() + + def test_frame_co_cellvars(self): + + def check_co_vars(a): + frame = sys._getframe() + def function2(): + print(a) + + assert 'a' in frame.f_code.co_cellvars + frame = sys._getframe() + frame.f_locals['a'] = 50 + save_locals(frame) + self.assertEqual(50, a) + + check_co_vars(1) + + + def test_frame_change_in_inner_frame(self): + def change(f): + self.assertTrue(f is not sys._getframe()) + f.f_locals['a']= 50 + save_locals(f) + + + frame = sys._getframe() + a = 20 + change(frame) + self.assertEqual(50, a) + + +if __name__ == '__main__': + suite = unittest.TestSuite() +# suite.addTest(TestSetLocals('test_set_locals_using_dict')) +# #suite.addTest(Test('testCase10a')) +# unittest.TextTestRunner(verbosity=3).run(suite) + + suite = unittest.makeSuite(TestSetLocals) + unittest.TextTestRunner(verbosity=3).run(suite) diff --git a/ptvsd/pydevd/tests_runfiles/not_in_default_pythonpath.txt b/ptvsd/pydevd/tests_runfiles/not_in_default_pythonpath.txt new file mode 100644 index 00000000..29cdc5bc --- /dev/null +++ b/ptvsd/pydevd/tests_runfiles/not_in_default_pythonpath.txt @@ -0,0 +1 @@ +(no __init__.py file) \ No newline at end of file diff --git a/ptvsd/pydevd/tests_runfiles/samples/.cvsignore b/ptvsd/pydevd/tests_runfiles/samples/.cvsignore new file mode 100644 index 00000000..d1c89951 --- /dev/null +++ b/ptvsd/pydevd/tests_runfiles/samples/.cvsignore @@ -0,0 +1,2 @@ +*.class +*.pyc diff --git a/ptvsd/pydevd/tests_runfiles/samples/nested_dir/.cvsignore b/ptvsd/pydevd/tests_runfiles/samples/nested_dir/.cvsignore new file mode 100644 index 00000000..d1c89951 --- /dev/null +++ b/ptvsd/pydevd/tests_runfiles/samples/nested_dir/.cvsignore @@ -0,0 +1,2 @@ +*.class +*.pyc diff --git a/ptvsd/pydevd/tests_runfiles/samples/nested_dir/__init__.py b/ptvsd/pydevd/tests_runfiles/samples/nested_dir/__init__.py new file mode 100644 index 00000000..8b137891 --- /dev/null +++ b/ptvsd/pydevd/tests_runfiles/samples/nested_dir/__init__.py @@ -0,0 +1 @@ + diff --git a/ptvsd/pydevd/tests_runfiles/samples/nested_dir/nested2/.cvsignore b/ptvsd/pydevd/tests_runfiles/samples/nested_dir/nested2/.cvsignore new file mode 100644 index 00000000..d1c89951 --- /dev/null +++ b/ptvsd/pydevd/tests_runfiles/samples/nested_dir/nested2/.cvsignore @@ -0,0 +1,2 @@ +*.class +*.pyc diff --git a/ptvsd/pydevd/tests_runfiles/samples/nested_dir/nested2/__init__.py b/ptvsd/pydevd/tests_runfiles/samples/nested_dir/nested2/__init__.py new file mode 100644 index 00000000..8b137891 --- /dev/null +++ b/ptvsd/pydevd/tests_runfiles/samples/nested_dir/nested2/__init__.py @@ -0,0 +1 @@ + diff --git a/ptvsd/pydevd/tests_runfiles/samples/nested_dir/nested2/deep_nest_test.py b/ptvsd/pydevd/tests_runfiles/samples/nested_dir/nested2/deep_nest_test.py new file mode 100644 index 00000000..7b1972b8 --- /dev/null +++ b/ptvsd/pydevd/tests_runfiles/samples/nested_dir/nested2/deep_nest_test.py @@ -0,0 +1,22 @@ +import unittest + +class SampleTest(unittest.TestCase): + + def setUp(self): + return + + def tearDown(self): + return + + def test_non_unique_name(self): + pass + + def test_asdf2(self): + pass + + def test_i_am_a_unique_test_name(self): + pass + + +if __name__ == '__main__': + unittest.main() diff --git a/ptvsd/pydevd/tests_runfiles/samples/nested_dir/nested2/non_test_file.py b/ptvsd/pydevd/tests_runfiles/samples/nested_dir/nested2/non_test_file.py new file mode 100644 index 00000000..470c6504 --- /dev/null +++ b/ptvsd/pydevd/tests_runfiles/samples/nested_dir/nested2/non_test_file.py @@ -0,0 +1,3 @@ + +""" i am a python file with no tests """ +pass diff --git a/ptvsd/pydevd/tests_runfiles/samples/nested_dir/nested3/.cvsignore b/ptvsd/pydevd/tests_runfiles/samples/nested_dir/nested3/.cvsignore new file mode 100644 index 00000000..d1c89951 --- /dev/null +++ b/ptvsd/pydevd/tests_runfiles/samples/nested_dir/nested3/.cvsignore @@ -0,0 +1,2 @@ +*.class +*.pyc diff --git a/ptvsd/pydevd/tests_runfiles/samples/nested_dir/nested3/__init__.py b/ptvsd/pydevd/tests_runfiles/samples/nested_dir/nested3/__init__.py new file mode 100644 index 00000000..8b137891 --- /dev/null +++ b/ptvsd/pydevd/tests_runfiles/samples/nested_dir/nested3/__init__.py @@ -0,0 +1 @@ + diff --git a/ptvsd/pydevd/tests_runfiles/samples/nested_dir/nested3/junk.txt b/ptvsd/pydevd/tests_runfiles/samples/nested_dir/nested3/junk.txt new file mode 100644 index 00000000..14dd4ddd --- /dev/null +++ b/ptvsd/pydevd/tests_runfiles/samples/nested_dir/nested3/junk.txt @@ -0,0 +1 @@ +im a junk file diff --git a/ptvsd/pydevd/tests_runfiles/samples/nested_dir/nested3/non_test_file.py b/ptvsd/pydevd/tests_runfiles/samples/nested_dir/nested3/non_test_file.py new file mode 100644 index 00000000..470c6504 --- /dev/null +++ b/ptvsd/pydevd/tests_runfiles/samples/nested_dir/nested3/non_test_file.py @@ -0,0 +1,3 @@ + +""" i am a python file with no tests """ +pass diff --git a/ptvsd/pydevd/tests_runfiles/samples/nested_dir/non_test_file.py b/ptvsd/pydevd/tests_runfiles/samples/nested_dir/non_test_file.py new file mode 100644 index 00000000..470c6504 --- /dev/null +++ b/ptvsd/pydevd/tests_runfiles/samples/nested_dir/non_test_file.py @@ -0,0 +1,3 @@ + +""" i am a python file with no tests """ +pass diff --git a/ptvsd/pydevd/tests_runfiles/samples/nested_dir/simple4_test.py b/ptvsd/pydevd/tests_runfiles/samples/nested_dir/simple4_test.py new file mode 100644 index 00000000..ba5d45f1 --- /dev/null +++ b/ptvsd/pydevd/tests_runfiles/samples/nested_dir/simple4_test.py @@ -0,0 +1,16 @@ +import unittest + +class NestedSampleTest(unittest.TestCase): + + def setUp(self): + return + + def tearDown(self): + return + + def test_non_unique_name(self): + pass + + +if __name__ == '__main__': + unittest.main() diff --git a/ptvsd/pydevd/tests_runfiles/samples/non_test_file.py b/ptvsd/pydevd/tests_runfiles/samples/non_test_file.py new file mode 100644 index 00000000..470c6504 --- /dev/null +++ b/ptvsd/pydevd/tests_runfiles/samples/non_test_file.py @@ -0,0 +1,3 @@ + +""" i am a python file with no tests """ +pass diff --git a/ptvsd/pydevd/tests_runfiles/samples/not_in_default_pythonpath.txt b/ptvsd/pydevd/tests_runfiles/samples/not_in_default_pythonpath.txt new file mode 100644 index 00000000..29cdc5bc --- /dev/null +++ b/ptvsd/pydevd/tests_runfiles/samples/not_in_default_pythonpath.txt @@ -0,0 +1 @@ +(no __init__.py file) \ No newline at end of file diff --git a/ptvsd/pydevd/tests_runfiles/samples/simple2_test.py b/ptvsd/pydevd/tests_runfiles/samples/simple2_test.py new file mode 100644 index 00000000..d46468ed --- /dev/null +++ b/ptvsd/pydevd/tests_runfiles/samples/simple2_test.py @@ -0,0 +1,16 @@ +import unittest + +class YetAnotherSampleTest(unittest.TestCase): + + def setUp(self): + return + + def tearDown(self): + return + + def test_abc(self): + pass + + +if __name__ == '__main__': + unittest.main() diff --git a/ptvsd/pydevd/tests_runfiles/samples/simple3_test.py b/ptvsd/pydevd/tests_runfiles/samples/simple3_test.py new file mode 100644 index 00000000..da1ccbfb --- /dev/null +++ b/ptvsd/pydevd/tests_runfiles/samples/simple3_test.py @@ -0,0 +1,16 @@ +import unittest + +class StillYetAnotherSampleTest(unittest.TestCase): + + def setUp(self): + return + + def tearDown(self): + return + + def test_non_unique_name(self): + pass + + +if __name__ == '__main__': + unittest.main() diff --git a/ptvsd/pydevd/tests_runfiles/samples/simpleClass_test.py b/ptvsd/pydevd/tests_runfiles/samples/simpleClass_test.py new file mode 100644 index 00000000..3a9c900e --- /dev/null +++ b/ptvsd/pydevd/tests_runfiles/samples/simpleClass_test.py @@ -0,0 +1,14 @@ +import unittest + +class SetUpClassTest(unittest.TestCase): + + @classmethod + def setUpClass(cls): + raise ValueError("This is an INTENTIONAL value error in setUpClass.") + + def test_blank(self): + pass + + +if __name__ == '__main__': + unittest.main() diff --git a/ptvsd/pydevd/tests_runfiles/samples/simpleModule_test.py b/ptvsd/pydevd/tests_runfiles/samples/simpleModule_test.py new file mode 100644 index 00000000..fdde67e4 --- /dev/null +++ b/ptvsd/pydevd/tests_runfiles/samples/simpleModule_test.py @@ -0,0 +1,16 @@ +import unittest + +def setUpModule(): + raise ValueError("This is an INTENTIONAL value error in setUpModule.") + +class SetUpModuleTest(unittest.TestCase): + + def setUp(cls): + pass + + def test_blank(self): + pass + + +if __name__ == '__main__': + unittest.main() diff --git a/ptvsd/pydevd/tests_runfiles/samples/simple_test.py b/ptvsd/pydevd/tests_runfiles/samples/simple_test.py new file mode 100644 index 00000000..619df7c8 --- /dev/null +++ b/ptvsd/pydevd/tests_runfiles/samples/simple_test.py @@ -0,0 +1,45 @@ +import unittest + +class SampleTest(unittest.TestCase): + + def setUp(self): + pass + + def tearDown(self): + pass + + def test_xxxxxx1(self): + self.fail('Fail test 2') + def test_xxxxxx2(self): + pass + def test_xxxxxx3(self): + pass + def test_xxxxxx4(self): + pass + def test_non_unique_name(self): + print('non unique name ran') + + +class AnotherSampleTest(unittest.TestCase): + def setUp(self): + pass + + def tearDown(self): + pass + + def test_1(self): + pass + def test_2(self): + """ im a doc string""" + pass + def todo_not_tested(self): + ''' + Not there by default! + ''' + + +if __name__ == '__main__': +# suite = unittest.makeSuite(SampleTest, 'test') +# runner = unittest.TextTestRunner( verbosity=3 ) +# runner.run(suite) + unittest.main() diff --git a/ptvsd/pydevd/tests_runfiles/test_pydevd_property.py b/ptvsd/pydevd/tests_runfiles/test_pydevd_property.py new file mode 100644 index 00000000..aa1d0108 --- /dev/null +++ b/ptvsd/pydevd/tests_runfiles/test_pydevd_property.py @@ -0,0 +1,124 @@ +''' +Created on Aug 22, 2011 + +@author: hussain.bohra +@author: fabioz +''' + +import os +import sys +import unittest + +#======================================================================================================================= +# Test +#======================================================================================================================= +class Test(unittest.TestCase): + """Test cases to validate custom property implementation in pydevd + """ + + def setUp(self, nused=None): + self.tempdir = os.path.join(os.path.dirname(os.path.dirname(__file__))) + sys.path.insert(0, self.tempdir) + from _pydevd_bundle import pydevd_traceproperty + self.old = pydevd_traceproperty.replace_builtin_property() + + + def tearDown(self, unused=None): + from _pydevd_bundle import pydevd_traceproperty + pydevd_traceproperty.replace_builtin_property(self.old) + sys.path.remove(self.tempdir) + + + def test_property(self): + """Test case to validate custom property + """ + + from _pydevd_bundle import pydevd_traceproperty + class TestProperty(object): + + def __init__(self): + self._get = 0 + self._set = 0 + self._del = 0 + + def get_name(self): + self._get += 1 + return self.__name + + def set_name(self, value): + self._set += 1 + self.__name = value + + def del_name(self): + self._del += 1 + del self.__name + name = property(get_name, set_name, del_name, "name's docstring") + self.assertEqual(name.__class__, pydevd_traceproperty.DebugProperty) + + testObj = TestProperty() + self._check(testObj) + + + def test_property2(self): + """Test case to validate custom property + """ + + class TestProperty(object): + + def __init__(self): + self._get = 0 + self._set = 0 + self._del = 0 + + def name(self): + self._get += 1 + return self.__name + name = property(name) + + def set_name(self, value): + self._set += 1 + self.__name = value + name.setter(set_name) + + def del_name(self): + self._del += 1 + del self.__name + name.deleter(del_name) + + testObj = TestProperty() + self._check(testObj) + + + def test_property3(self): + """Test case to validate custom property + """ + + class TestProperty(object): + + def __init__(self): + self._name = 'foo' + + def name(self): + return self._name + name = property(name) + + testObj = TestProperty() + self.assertRaises(AttributeError, setattr, testObj, 'name', 'bar') + self.assertRaises(AttributeError, delattr, testObj, 'name') + + + def _check(self, testObj): + testObj.name = "Custom" + self.assertEqual(1, testObj._set) + + self.assertEqual(testObj.name, "Custom") + self.assertEqual(1, testObj._get) + + self.assertTrue(hasattr(testObj, 'name')) + del testObj.name + self.assertEqual(1, testObj._del) + + self.assertTrue(not hasattr(testObj, 'name')) + testObj.name = "Custom2" + self.assertEqual(testObj.name, "Custom2") + diff --git a/ptvsd/pydevd/tests_runfiles/test_pydevdio.py b/ptvsd/pydevd/tests_runfiles/test_pydevdio.py new file mode 100644 index 00000000..3d0b007d --- /dev/null +++ b/ptvsd/pydevd/tests_runfiles/test_pydevdio.py @@ -0,0 +1,36 @@ +import sys +import os + + +import unittest + +class Test(unittest.TestCase): + + def test_it(self): + #make it as if we were executing from the directory above this one (so that we can use jycompletionserver + #without the need for it being in the pythonpath) + #(twice the dirname to get the previous level from this file.) + import test_pydevdio #@UnresolvedImport - importing itself + ADD_TO_PYTHONPATH = os.path.join(os.path.dirname(os.path.dirname(test_pydevdio.__file__))) + sys.path.insert(0, ADD_TO_PYTHONPATH) + + try: + from _pydevd_bundle import pydevd_io + original = sys.stdout + + try: + sys.stdout = pydevd_io.IOBuf() + print('foo') + print('bar') + + self.assertEqual('foo\nbar\n', sys.stdout.getvalue()) #@UndefinedVariable + + print('ww') + print('xx') + self.assertEqual('ww\nxx\n', sys.stdout.getvalue()) #@UndefinedVariable + finally: + sys.stdout = original + finally: + #remove it to leave it ok for other tests + sys.path.remove(ADD_TO_PYTHONPATH) + diff --git a/ptvsd/pydevd/tests_runfiles/test_runfiles.py b/ptvsd/pydevd/tests_runfiles/test_runfiles.py new file mode 100644 index 00000000..19ca07b0 --- /dev/null +++ b/ptvsd/pydevd/tests_runfiles/test_runfiles.py @@ -0,0 +1,433 @@ +import os.path +import sys + +IS_JYTHON = sys.platform.find('java') != -1 + +try: + this_file_name = __file__ +except NameError: + # stupid jython. plain old __file__ isnt working for some reason + import test_runfiles #@UnresolvedImport - importing the module itself + this_file_name = test_runfiles.__file__ + + +desired_runfiles_path = os.path.normpath(os.path.dirname(this_file_name) + "/..") +sys.path.insert(0, desired_runfiles_path) + +from _pydev_runfiles import pydev_runfiles_unittest +from _pydev_runfiles import pydev_runfiles_xml_rpc +from _pydevd_bundle import pydevd_io + +#remove existing pydev_runfiles from modules (if any), so that we can be sure we have the correct version +if 'pydev_runfiles' in sys.modules: + del sys.modules['pydev_runfiles'] +if '_pydev_runfiles.pydev_runfiles' in sys.modules: + del sys.modules['_pydev_runfiles.pydev_runfiles'] + + +from _pydev_runfiles import pydev_runfiles +import unittest +import tempfile +import re + +try: + set +except: + from sets import Set as set + +#this is an early test because it requires the sys.path changed +orig_syspath = sys.path +a_file = pydev_runfiles.__file__ +pydev_runfiles.PydevTestRunner(pydev_runfiles.Configuration(files_or_dirs=[a_file])) +file_dir = os.path.dirname(os.path.dirname(a_file)) +assert file_dir in sys.path +sys.path = orig_syspath[:] + +#remove it so that we leave it ok for other tests +sys.path.remove(desired_runfiles_path) + +class RunfilesTest(unittest.TestCase): + + def _setup_scenario( + self, + path, + include_tests=None, + tests=None, + files_to_tests=None, + exclude_files=None, + exclude_tests=None, + include_files=None, + ): + self.MyTestRunner = pydev_runfiles.PydevTestRunner( + pydev_runfiles.Configuration( + files_or_dirs=path, + include_tests=include_tests, + verbosity=1, + tests=tests, + files_to_tests=files_to_tests, + exclude_files=exclude_files, + exclude_tests=exclude_tests, + include_files=include_files, + ) + ) + self.files = self.MyTestRunner.find_import_files() + self.modules = self.MyTestRunner.find_modules_from_files(self.files) + self.all_tests = self.MyTestRunner.find_tests_from_modules(self.modules) + self.filtered_tests = self.MyTestRunner.filter_tests(self.all_tests) + + def setUp(self): + self.file_dir = [os.path.abspath(os.path.join(desired_runfiles_path, 'tests_runfiles/samples'))] + self._setup_scenario(self.file_dir, None) + + + def test_suite_used(self): + for suite in self.all_tests + self.filtered_tests: + self.assertTrue(isinstance(suite, pydev_runfiles_unittest.PydevTestSuite)) + + def test_parse_cmdline(self): + sys.argv = "pydev_runfiles.py ./".split() + configuration = pydev_runfiles.parse_cmdline() + self.assertEqual([sys.argv[1]], configuration.files_or_dirs) + self.assertEqual(2, configuration.verbosity) # default value + self.assertEqual(None, configuration.include_tests) # default value + + sys.argv = "pydev_runfiles.py ../images c:/temp".split() + configuration = pydev_runfiles.parse_cmdline() + self.assertEqual(sys.argv[1:3], configuration.files_or_dirs) + self.assertEqual(2, configuration.verbosity) + + sys.argv = "pydev_runfiles.py --verbosity 3 ../junk c:/asdf ".split() + configuration = pydev_runfiles.parse_cmdline() + self.assertEqual(sys.argv[3:], configuration.files_or_dirs) + self.assertEqual(int(sys.argv[2]), configuration.verbosity) + + sys.argv = "pydev_runfiles.py --include_tests test_def ./".split() + configuration = pydev_runfiles.parse_cmdline() + self.assertEqual([sys.argv[-1]], configuration.files_or_dirs) + self.assertEqual([sys.argv[2]], configuration.include_tests) + + sys.argv = "pydev_runfiles.py --include_tests Abc.test_def,Mod.test_abc c:/junk/".split() + configuration = pydev_runfiles.parse_cmdline() + self.assertEqual([sys.argv[-1]], configuration.files_or_dirs) + self.assertEqual(sys.argv[2].split(','), configuration.include_tests) + + sys.argv = ('C:\\eclipse-SDK-3.2-win32\\eclipse\\plugins\\org.python.pydev.debug_1.2.2\\pysrc\\pydev_runfiles.py ' + + '--verbosity 1 ' + + 'C:\\workspace_eclipse\\fronttpa\\tests\\gui_tests\\calendar_popup_control_test.py ').split() + configuration = pydev_runfiles.parse_cmdline() + self.assertEqual([sys.argv[-1]], configuration.files_or_dirs) + self.assertEqual(1, configuration.verbosity) + + sys.argv = "pydev_runfiles.py --verbosity 1 --include_tests Mod.test_abc c:/junk/ ./".split() + configuration = pydev_runfiles.parse_cmdline() + self.assertEqual(sys.argv[5:], configuration.files_or_dirs) + self.assertEqual(int(sys.argv[2]), configuration.verbosity) + self.assertEqual([sys.argv[4]], configuration.include_tests) + + sys.argv = "pydev_runfiles.py --exclude_files=*.txt,a*.py".split() + configuration = pydev_runfiles.parse_cmdline() + self.assertEqual(['*.txt', 'a*.py'], configuration.exclude_files) + + sys.argv = "pydev_runfiles.py --exclude_tests=*__todo,test*bar".split() + configuration = pydev_runfiles.parse_cmdline() + self.assertEqual(['*__todo', 'test*bar'], configuration.exclude_tests) + + + def test___adjust_python_path_works_for_directories(self): + orig_syspath = sys.path + tempdir = tempfile.gettempdir() + pydev_runfiles.PydevTestRunner(pydev_runfiles.Configuration(files_or_dirs=[tempdir])) + self.assertEqual(1, tempdir in sys.path) + sys.path = orig_syspath[:] + + + def test___is_valid_py_file(self): + isvalid = self.MyTestRunner._PydevTestRunner__is_valid_py_file + self.assertEqual(1, isvalid("test.py")) + self.assertEqual(0, isvalid("asdf.pyc")) + self.assertEqual(0, isvalid("__init__.py")) + self.assertEqual(0, isvalid("__init__.pyc")) + self.assertEqual(1, isvalid("asdf asdf.pyw")) + + def test___unixify(self): + unixify = self.MyTestRunner._PydevTestRunner__unixify + self.assertEqual("c:/temp/junk/asdf.py", unixify("c:SEPtempSEPjunkSEPasdf.py".replace('SEP', os.sep))) + + def test___importify(self): + importify = self.MyTestRunner._PydevTestRunner__importify + self.assertEqual("temp.junk.asdf", importify("temp/junk/asdf.py")) + self.assertEqual("asdf", importify("asdf.py")) + self.assertEqual("abc.def.hgi", importify("abc/def/hgi")) + + def test_finding_a_file_from_file_system(self): + test_file = "simple_test.py" + self.MyTestRunner.files_or_dirs = [self.file_dir[0] + test_file] + files = self.MyTestRunner.find_import_files() + self.assertEqual(1, len(files)) + self.assertEqual(files[0], self.file_dir[0] + test_file) + + def test_finding_files_in_dir_from_file_system(self): + self.assertEqual(1, len(self.files) > 0) + for import_file in self.files: + self.assertEqual(-1, import_file.find(".pyc")) + self.assertEqual(-1, import_file.find("__init__.py")) + self.assertEqual(-1, import_file.find("\\")) + self.assertEqual(-1, import_file.find(".txt")) + + def test___get_module_from_str(self): + my_importer = self.MyTestRunner._PydevTestRunner__get_module_from_str + my_os_path = my_importer("os.path", True, 'unused') + from os import path + import os.path as path2 + self.assertEqual(path, my_os_path) + self.assertEqual(path2, my_os_path) + self.assertNotEqual(__import__("os.path"), my_os_path) + self.assertNotEqual(__import__("os"), my_os_path) + + def test_finding_modules_from_import_strings(self): + self.assertEqual(1, len(self.modules) > 0) + + def test_finding_tests_when_no_filter(self): + # unittest.py will create a TestCase with 0 tests in it + # since it just imports what is given + self.assertEqual(1, len(self.all_tests) > 0) + files_with_tests = [1 for t in self.all_tests if len(t._tests) > 0] + self.assertNotEqual(len(self.files), len(files_with_tests)) + + def count_suite(self, tests=None): + total = 0 + for t in tests: + total += t.countTestCases() + return total + + def test___match(self): + matcher = self.MyTestRunner._PydevTestRunner__match + self.assertEqual(1, matcher(None, "aname")) + self.assertEqual(1, matcher([".*"], "aname")) + self.assertEqual(0, matcher(["^x$"], "aname")) + self.assertEqual(0, matcher(["abc"], "aname")) + self.assertEqual(1, matcher(["abc", "123"], "123")) + + def test_finding_tests_from_modules_with_bad_filter_returns_0_tests(self): + self._setup_scenario(self.file_dir, ["NO_TESTS_ARE_SURE_TO_HAVE_THIS_NAME"]) + self.assertEqual(0, self.count_suite(self.all_tests)) + + def test_finding_test_with_unique_name_returns_1_test(self): + self._setup_scenario(self.file_dir, include_tests=["test_i_am_a_unique_test_name"]) + filtered_tests = self.MyTestRunner.filter_tests(self.all_tests) + self.assertEqual(1, self.count_suite(filtered_tests)) + + def test_finding_test_with_non_unique_name(self): + self._setup_scenario(self.file_dir, include_tests=["test_non_unique_name"]) + filtered_tests = self.MyTestRunner.filter_tests(self.all_tests) + self.assertEqual(1, self.count_suite(filtered_tests) > 2) + + def test_finding_tests_with_regex_filters(self): + self._setup_scenario(self.file_dir, include_tests=["test_non*"]) + filtered_tests = self.MyTestRunner.filter_tests(self.all_tests) + self.assertEqual(1, self.count_suite(filtered_tests) > 2) + + self._setup_scenario(self.file_dir, ["^$"]) + filtered_tests = self.MyTestRunner.filter_tests(self.all_tests) + self.assertEqual(0, self.count_suite(filtered_tests)) + + self._setup_scenario(self.file_dir, None, exclude_tests=["*"]) + filtered_tests = self.MyTestRunner.filter_tests(self.all_tests) + self.assertEqual(0, self.count_suite(filtered_tests)) + + def test_matching_tests(self): + self._setup_scenario(self.file_dir, None, ['StillYetAnotherSampleTest']) + filtered_tests = self.MyTestRunner.filter_tests(self.all_tests) + self.assertEqual(1, self.count_suite(filtered_tests)) + + self._setup_scenario(self.file_dir, None, ['SampleTest.test_xxxxxx1']) + filtered_tests = self.MyTestRunner.filter_tests(self.all_tests) + self.assertEqual(1, self.count_suite(filtered_tests)) + + self._setup_scenario(self.file_dir, None, ['SampleTest']) + filtered_tests = self.MyTestRunner.filter_tests(self.all_tests) + self.assertEqual(8, self.count_suite(filtered_tests)) + + self._setup_scenario(self.file_dir, None, ['AnotherSampleTest.todo_not_tested']) + filtered_tests = self.MyTestRunner.filter_tests(self.all_tests) + self.assertEqual(1, self.count_suite(filtered_tests)) + + self._setup_scenario(self.file_dir, None, ['StillYetAnotherSampleTest', 'SampleTest.test_xxxxxx1']) + filtered_tests = self.MyTestRunner.filter_tests(self.all_tests) + self.assertEqual(2, self.count_suite(filtered_tests)) + + self._setup_scenario(self.file_dir, None, exclude_tests=['*']) + filtered_tests = self.MyTestRunner.filter_tests(self.all_tests) + self.assertEqual(self.count_suite(filtered_tests), 0) + + + self._setup_scenario(self.file_dir, None, exclude_tests=['*a*']) + filtered_tests = self.MyTestRunner.filter_tests(self.all_tests) + self.assertEqual(self.count_suite(filtered_tests), 6) + + self.assertEqual( + set(self.MyTestRunner.list_test_names(filtered_tests)), + set(['test_1', 'test_2', 'test_xxxxxx1', 'test_xxxxxx2', 'test_xxxxxx3', 'test_xxxxxx4']) + ) + + self._setup_scenario(self.file_dir, None, exclude_tests=['*a*', '*x*']) + filtered_tests = self.MyTestRunner.filter_tests(self.all_tests) + self.assertEqual(self.count_suite(filtered_tests), 2) + + self.assertEqual( + set(self.MyTestRunner.list_test_names(filtered_tests)), + set(['test_1', 'test_2']) + ) + + self._setup_scenario(self.file_dir, None, exclude_files=['simple_test.py']) + filtered_tests = self.MyTestRunner.filter_tests(self.all_tests) + names = self.MyTestRunner.list_test_names(filtered_tests) + self.assertTrue('test_xxxxxx1' not in names, 'Found: %s' % (names,)) + + self.assertEqual( + set(['test_abc', 'test_non_unique_name', 'test_non_unique_name', 'test_asdf2', 'test_i_am_a_unique_test_name', 'test_non_unique_name', 'test_blank']), + set(names) + ) + + self._setup_scenario(self.file_dir, None, include_files=['simple3_test.py']) + filtered_tests = self.MyTestRunner.filter_tests(self.all_tests) + names = self.MyTestRunner.list_test_names(filtered_tests) + self.assertTrue('test_xxxxxx1' not in names, 'Found: %s' % (names,)) + + self.assertEqual( + set(['test_non_unique_name']), + set(names) + ) + + def test_xml_rpc_communication(self): + import sys + sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'samples')) + notifications = [] + class Server: + + def __init__(self, notifications): + self.notifications = notifications + + def notifyConnected(self): + #This method is called at the very start (in runfiles.py), and we do not check this here + raise AssertionError('Should not be called from the run tests.') + + + def notifyTestsCollected(self, number_of_tests): + self.notifications.append(('notifyTestsCollected', number_of_tests)) + + + def notifyStartTest(self, file, test): + pass + + def notifyTest(self, cond, captured_output, error_contents, file, test, time): + try: + #I.e.: when marked as Binary in xml-rpc + captured_output = captured_output.data + except: + pass + try: + #I.e.: when marked as Binary in xml-rpc + error_contents = error_contents.data + except: + pass + if error_contents: + error_contents = error_contents.splitlines()[-1].strip() + self.notifications.append(('notifyTest', cond, captured_output.strip(), error_contents, file, test)) + + def notifyTestRunFinished(self, total_time): + self.notifications.append(('notifyTestRunFinished',)) + + server = Server(notifications) + pydev_runfiles_xml_rpc.set_server(server) + simple_test = os.path.join(self.file_dir[0], 'simple_test.py') + simple_test2 = os.path.join(self.file_dir[0], 'simple2_test.py') + simpleClass_test = os.path.join(self.file_dir[0], 'simpleClass_test.py') + simpleModule_test = os.path.join(self.file_dir[0], 'simpleModule_test.py') + + files_to_tests = {} + files_to_tests.setdefault(simple_test , []).append('SampleTest.test_xxxxxx1') + files_to_tests.setdefault(simple_test , []).append('SampleTest.test_xxxxxx2') + files_to_tests.setdefault(simple_test , []).append('SampleTest.test_non_unique_name') + files_to_tests.setdefault(simple_test2, []).append('YetAnotherSampleTest.test_abc') + files_to_tests.setdefault(simpleClass_test, []).append('SetUpClassTest.test_blank') + files_to_tests.setdefault(simpleModule_test, []).append('SetUpModuleTest.test_blank') + + self._setup_scenario(None, files_to_tests=files_to_tests) + self.MyTestRunner.verbosity = 2 + + buf = pydevd_io.start_redirect(keep_original_redirection=False) + try: + self.MyTestRunner.run_tests() + self.assertEqual(8, len(notifications)) + if sys.version_info[:2] <= (2, 6): + # The setUpClass is not supported in Python 2.6 (thus we have no collection error). + expected = [ + ('notifyTest', 'fail', '', 'AssertionError: Fail test 2', simple_test, 'SampleTest.test_xxxxxx1'), + ('notifyTest', 'ok', '', '', simple_test2, 'YetAnotherSampleTest.test_abc'), + ('notifyTest', 'ok', '', '', simpleClass_test, 'SetUpClassTest.test_blank'), + ('notifyTest', 'ok', '', '', simpleModule_test, 'SetUpModuleTest.test_blank'), + ('notifyTest', 'ok', '', '', simple_test, 'SampleTest.test_xxxxxx2'), + ('notifyTest', 'ok', 'non unique name ran', '', simple_test, 'SampleTest.test_non_unique_name'), + ('notifyTestRunFinished',), + ('notifyTestsCollected', 6) + ] + else: + expected = [ + ('notifyTestsCollected', 6), + ('notifyTest', 'ok', 'non unique name ran', '', simple_test, 'SampleTest.test_non_unique_name'), + ('notifyTest', 'fail', '', 'AssertionError: Fail test 2', simple_test, 'SampleTest.test_xxxxxx1'), + ('notifyTest', 'ok', '', '', simple_test, 'SampleTest.test_xxxxxx2'), + ('notifyTest', 'ok', '', '', simple_test2, 'YetAnotherSampleTest.test_abc'), + ] + + if not IS_JYTHON: + if 'samples.simpleClass_test' in str(notifications): + expected.append(('notifyTest', 'error', '', 'ValueError: This is an INTENTIONAL value error in setUpClass.', + simpleClass_test.replace('/', os.path.sep), 'samples.simpleClass_test.SetUpClassTest ')) + expected.append(('notifyTest', 'error', '', 'ValueError: This is an INTENTIONAL value error in setUpModule.', + simpleModule_test.replace('/', os.path.sep), 'samples.simpleModule_test ')) + else: + expected.append(('notifyTest', 'error', '', 'ValueError: This is an INTENTIONAL value error in setUpClass.', + simpleClass_test.replace('/', os.path.sep), 'simpleClass_test.SetUpClassTest ')) + expected.append(('notifyTest', 'error', '', 'ValueError: This is an INTENTIONAL value error in setUpModule.', + simpleModule_test.replace('/', os.path.sep), 'simpleModule_test ')) + else: + expected.append(('notifyTest', 'ok', '', '', simpleClass_test, 'SetUpClassTest.test_blank')) + expected.append(('notifyTest', 'ok', '', '', simpleModule_test, 'SetUpModuleTest.test_blank')) + + expected.append(('notifyTestRunFinished',)) + + expected.sort() + new_notifications = [] + for notification in expected: + try: + if len(notification) == 6: + # Some are binary on Py3. + new_notifications.append(( + notification[0], + notification[1], + notification[2].encode('latin1'), + notification[3].encode('latin1'), + notification[4], + notification[5], + )) + else: + new_notifications.append(notification) + except: + raise + expected = new_notifications + + notifications.sort() + if not IS_JYTHON: + self.assertEqual( + expected, + notifications + ) + finally: + pydevd_io.end_redirect() + b = buf.getvalue() + if sys.version_info[:2] > (2, 6): + self.assertTrue(b.find('Ran 4 tests in ') != -1, 'Found: ' + b) + else: + self.assertTrue(b.find('Ran 6 tests in ') != -1, 'Found: ' + b) diff --git a/ptvsd/pydevd/third_party/isort_container/isort/__init__.py b/ptvsd/pydevd/third_party/isort_container/isort/__init__.py new file mode 100644 index 00000000..3063d1ed --- /dev/null +++ b/ptvsd/pydevd/third_party/isort_container/isort/__init__.py @@ -0,0 +1,28 @@ +"""__init__.py. + +Defines the isort module to include the SortImports utility class as well as any defined settings. + +Copyright (C) 2013 Timothy Edmund Crosley + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated +documentation files (the "Software"), to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and +to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or +substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED +TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF +CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. + +""" + +from __future__ import absolute_import, division, print_function, unicode_literals + +from . import settings +from .isort import SortImports + +__version__ = "4.2.15" diff --git a/ptvsd/pydevd/third_party/isort_container/isort/__main__.py b/ptvsd/pydevd/third_party/isort_container/isort/__main__.py new file mode 100644 index 00000000..94b1d057 --- /dev/null +++ b/ptvsd/pydevd/third_party/isort_container/isort/__main__.py @@ -0,0 +1,3 @@ +from isort.main import main + +main() diff --git a/ptvsd/pydevd/third_party/isort_container/isort/hooks.py b/ptvsd/pydevd/third_party/isort_container/isort/hooks.py new file mode 100644 index 00000000..15b6d408 --- /dev/null +++ b/ptvsd/pydevd/third_party/isort_container/isort/hooks.py @@ -0,0 +1,82 @@ +"""isort.py. + +Defines a git hook to allow pre-commit warnings and errors about import order. + +usage: + exit_code = git_hook(strict=True) + +Copyright (C) 2015 Helen Sherwood-Taylor + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated +documentation files (the "Software"), to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and +to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or +substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED +TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF +CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. + +""" +import subprocess + +from isort import SortImports + + +def get_output(command): + """ + Run a command and return raw output + + :param str command: the command to run + :returns: the stdout output of the command + """ + return subprocess.check_output(command.split()) + + +def get_lines(command): + """ + Run a command and return lines of output + + :param str command: the command to run + :returns: list of whitespace-stripped lines output by command + """ + stdout = get_output(command) + return [line.strip().decode('utf-8') for line in stdout.splitlines()] + + +def git_hook(strict=False): + """ + Git pre-commit hook to check staged files for isort errors + + :param bool strict - if True, return number of errors on exit, + causing the hook to fail. If False, return zero so it will + just act as a warning. + + :return number of errors if in strict mode, 0 otherwise. + """ + + # Get list of files modified and staged + diff_cmd = "git diff-index --cached --name-only --diff-filter=ACMRTUXB HEAD" + files_modified = get_lines(diff_cmd) + + errors = 0 + for filename in files_modified: + if filename.endswith('.py'): + # Get the staged contents of the file + staged_cmd = "git show :%s" % filename + staged_contents = get_output(staged_cmd) + + sort = SortImports( + file_path=filename, + file_contents=staged_contents.decode(), + check=True + ) + + if sort.incorrectly_sorted: + errors += 1 + + return errors if strict else 0 diff --git a/ptvsd/pydevd/third_party/isort_container/isort/isort.py b/ptvsd/pydevd/third_party/isort_container/isort/isort.py new file mode 100644 index 00000000..cecd5af9 --- /dev/null +++ b/ptvsd/pydevd/third_party/isort_container/isort/isort.py @@ -0,0 +1,969 @@ +"""isort.py. + +Exposes a simple library to sort through imports within Python code + +usage: + SortImports(file_name) +or: + sorted = SortImports(file_contents=file_contents).output + +Copyright (C) 2013 Timothy Edmund Crosley + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated +documentation files (the "Software"), to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and +to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or +substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED +TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF +CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. + +""" +from __future__ import absolute_import, division, print_function, unicode_literals + +import copy +import io +import itertools +import os +import re +import sys +from collections import namedtuple +from datetime import datetime +from difflib import unified_diff +from fnmatch import fnmatch +from glob import glob + +from . import settings +from .natural import nsorted +from .pie_slice import OrderedDict, OrderedSet, input, itemsview + +KNOWN_SECTION_MAPPING = { + 'STDLIB': 'STANDARD_LIBRARY', + 'FUTURE': 'FUTURE_LIBRARY', + 'FIRSTPARTY': 'FIRST_PARTY', + 'THIRDPARTY': 'THIRD_PARTY', +} + + +class SortImports(object): + incorrectly_sorted = False + skipped = False + + def __init__(self, file_path=None, file_contents=None, write_to_stdout=False, check=False, + show_diff=False, settings_path=None, ask_to_apply=False, **setting_overrides): + if not settings_path and file_path: + settings_path = os.path.dirname(os.path.abspath(file_path)) + settings_path = settings_path or os.getcwd() + + self.config = settings.from_path(settings_path).copy() + for key, value in itemsview(setting_overrides): + access_key = key.replace('not_', '').lower() + # The sections config needs to retain order and can't be converted to a set. + if access_key != 'sections' and type(self.config.get(access_key)) in (list, tuple): + if key.startswith('not_'): + self.config[access_key] = list(set(self.config[access_key]).difference(value)) + else: + self.config[access_key] = list(set(self.config[access_key]).union(value)) + else: + self.config[key] = value + + if self.config['force_alphabetical_sort']: + self.config.update({'force_alphabetical_sort_within_sections': True, + 'no_sections': True, + 'lines_between_types': 1, + 'from_first': True}) + + indent = str(self.config['indent']) + if indent.isdigit(): + indent = " " * int(indent) + else: + indent = indent.strip("'").strip('"') + if indent.lower() == "tab": + indent = "\t" + self.config['indent'] = indent + + self.place_imports = {} + self.import_placements = {} + self.remove_imports = [self._format_simplified(removal) for removal in self.config['remove_imports']] + self.add_imports = [self._format_natural(addition) for addition in self.config['add_imports']] + self._section_comments = ["# " + value for key, value in itemsview(self.config) if + key.startswith('import_heading') and value] + + self.file_encoding = 'utf-8' + file_name = file_path + self.file_path = file_path or "" + if file_path: + file_path = os.path.abspath(file_path) + if settings.should_skip(file_path, self.config): + self.skipped = True + if self.config['verbose']: + print("WARNING: {0} was skipped as it's listed in 'skip' setting" + " or matches a glob in 'skip_glob' setting".format(file_path)) + file_contents = None + elif not file_contents: + self.file_path = file_path + self.file_encoding = coding_check(file_path) + with io.open(file_path, encoding=self.file_encoding) as file_to_import_sort: + file_contents = file_to_import_sort.read() + + if file_contents is None or ("isort:" + "skip_file") in file_contents: + return + + self.in_lines = file_contents.split("\n") + self.original_length = len(self.in_lines) + if (self.original_length > 1 or self.in_lines[:1] not in ([], [""])) or self.config['force_adds']: + for add_import in self.add_imports: + self.in_lines.append(add_import) + self.number_of_lines = len(self.in_lines) + + self.out_lines = [] + self.comments = {'from': {}, 'straight': {}, 'nested': {}, 'above': {'straight': {}, 'from': {}}} + self.imports = OrderedDict() + self.as_map = {} + + section_names = self.config['sections'] + self.sections = namedtuple('Sections', section_names)(*[name for name in section_names]) + for section in itertools.chain(self.sections, self.config['forced_separate']): + self.imports[section] = {'straight': OrderedSet(), 'from': OrderedDict()} + + self.known_patterns = [] + for placement in reversed(self.sections): + known_placement = KNOWN_SECTION_MAPPING.get(placement, placement) + config_key = 'known_{0}'.format(known_placement.lower()) + known_patterns = self.config.get(config_key, []) + for known_pattern in known_patterns: + self.known_patterns.append((re.compile('^' + known_pattern.replace('*', '.*').replace('?', '.?') + '$'), + placement)) + + self.index = 0 + self.import_index = -1 + self._first_comment_index_start = -1 + self._first_comment_index_end = -1 + self._parse() + if self.import_index != -1: + self._add_formatted_imports() + + self.length_change = len(self.out_lines) - self.original_length + while self.out_lines and self.out_lines[-1].strip() == "": + self.out_lines.pop(-1) + self.out_lines.append("") + + self.output = "\n".join(self.out_lines) + if self.config['atomic']: + try: + compile(self._strip_top_comments(self.out_lines), self.file_path, 'exec', 0, 1) + except SyntaxError: + self.output = file_contents + self.incorrectly_sorted = True + try: + compile(self._strip_top_comments(self.in_lines), self.file_path, 'exec', 0, 1) + print("ERROR: {0} isort would have introduced syntax errors, please report to the project!". \ + format(self.file_path)) + except SyntaxError: + print("ERROR: {0} File contains syntax errors.".format(self.file_path)) + + return + if check: + check_output = self.output + check_against = file_contents + if self.config['ignore_whitespace']: + check_output = check_output.replace("\n", "").replace(" ", "") + check_against = check_against.replace("\n", "").replace(" ", "") + + if check_output == check_against: + if self.config['verbose']: + print("SUCCESS: {0} Everything Looks Good!".format(self.file_path)) + return + + print("ERROR: {0} Imports are incorrectly sorted.".format(self.file_path)) + self.incorrectly_sorted = True + if show_diff or self.config['show_diff']: + self._show_diff(file_contents) + elif write_to_stdout: + sys.stdout.write(self.output) + elif file_name and not check: + if ask_to_apply: + if self.output == file_contents: + return + self._show_diff(file_contents) + answer = None + while answer not in ('yes', 'y', 'no', 'n', 'quit', 'q'): + answer = input("Apply suggested changes to '{0}' [y/n/q]?".format(self.file_path)).lower() + if answer in ('no', 'n'): + return + if answer in ('quit', 'q'): + sys.exit(1) + with io.open(self.file_path, encoding=self.file_encoding, mode='w') as output_file: + output_file.write(self.output) + + def _show_diff(self, file_contents): + for line in unified_diff( + file_contents.splitlines(1), + self.output.splitlines(1), + fromfile=self.file_path + ':before', + tofile=self.file_path + ':after', + fromfiledate=str(datetime.fromtimestamp(os.path.getmtime(self.file_path)) + if self.file_path else datetime.now()), + tofiledate=str(datetime.now()) + ): + sys.stdout.write(line) + + @staticmethod + def _strip_top_comments(lines): + """Strips # comments that exist at the top of the given lines""" + lines = copy.copy(lines) + while lines and lines[0].startswith("#"): + lines = lines[1:] + return "\n".join(lines) + + def place_module(self, module_name): + """Tries to determine if a module is a python std import, third party import, or project code: + + if it can't determine - it assumes it is project code + + """ + for forced_separate in self.config['forced_separate']: + # Ensure all forced_separate patterns will match to end of string + path_glob = forced_separate + if not forced_separate.endswith('*'): + path_glob = '%s*' % forced_separate + + if fnmatch(module_name, path_glob) or fnmatch(module_name, '.' + path_glob): + return forced_separate + + if module_name.startswith("."): + return self.sections.LOCALFOLDER + + # Try to find most specific placement instruction match (if any) + parts = module_name.split('.') + module_names_to_check = ['.'.join(parts[:first_k]) for first_k in range(len(parts), 0, -1)] + for module_name_to_check in module_names_to_check: + for pattern, placement in self.known_patterns: + if pattern.match(module_name_to_check): + return placement + + # Use a copy of sys.path to avoid any unintended modifications + # to it - e.g. `+=` used below will change paths in place and + # if not copied, consequently sys.path, which will grow unbounded + # with duplicates on every call to this method. + paths = list(sys.path) + virtual_env = self.config.get('virtual_env') or os.environ.get('VIRTUAL_ENV') + virtual_env_src = False + if virtual_env: + paths += [path for path in glob('{0}/lib/python*/site-packages'.format(virtual_env)) + if path not in paths] + paths += [path for path in glob('{0}/src/*'.format(virtual_env)) if os.path.isdir(path)] + virtual_env_src = '{0}/src/'.format(virtual_env) + + # handle case-insensitive paths on windows + stdlib_lib_prefix = os.path.normcase(get_stdlib_path()) + + for prefix in paths: + module_path = "/".join((prefix, module_name.replace(".", "/"))) + package_path = "/".join((prefix, module_name.split(".")[0])) + is_module = (exists_case_sensitive(module_path + ".py") or + exists_case_sensitive(module_path + ".so")) + is_package = exists_case_sensitive(package_path) and os.path.isdir(package_path) + if is_module or is_package: + if ('site-packages' in prefix or 'dist-packages' in prefix or + (virtual_env and virtual_env_src in prefix)): + return self.sections.THIRDPARTY + elif os.path.normcase(prefix).startswith(stdlib_lib_prefix): + return self.sections.STDLIB + else: + return self.config['default_section'] + + return self.config['default_section'] + + def _get_line(self): + """Returns the current line from the file while incrementing the index.""" + line = self.in_lines[self.index] + self.index += 1 + return line + + @staticmethod + def _import_type(line): + """If the current line is an import line it will return its type (from or straight)""" + if "isort:skip" in line: + return + elif line.startswith('import '): + return "straight" + elif line.startswith('from '): + return "from" + + def _at_end(self): + """returns True if we are at the end of the file.""" + return self.index == self.number_of_lines + + @staticmethod + def _module_key(module_name, config, sub_imports=False, ignore_case=False): + prefix = "" + if ignore_case: + module_name = str(module_name).lower() + else: + module_name = str(module_name) + + if sub_imports and config['order_by_type']: + if module_name.isupper() and len(module_name) > 1: + prefix = "A" + elif module_name[0:1].isupper(): + prefix = "B" + else: + prefix = "C" + module_name = module_name.lower() + return "{0}{1}{2}".format(module_name in config['force_to_top'] and "A" or "B", prefix, + config['length_sort'] and (str(len(module_name)) + ":" + module_name) or module_name) + + def _add_comments(self, comments, original_string=""): + """ + Returns a string with comments added + """ + return comments and "{0} # {1}".format(self._strip_comments(original_string)[0], + "; ".join(comments)) or original_string + + def _wrap(self, line): + """ + Returns an import wrapped to the specified line-length, if possible. + """ + wrap_mode = self.config['multi_line_output'] + if len(line) > self.config['line_length'] and wrap_mode != settings.WrapModes.NOQA: + for splitter in ("import", ".", "as"): + exp = r"\b" + re.escape(splitter) + r"\b" + if re.search(exp, line) and not line.strip().startswith(splitter): + line_parts = re.split(exp, line) + next_line = [] + while (len(line) + 2) > (self.config['wrap_length'] or self.config['line_length']) and line_parts: + next_line.append(line_parts.pop()) + line = splitter.join(line_parts) + if not line: + line = next_line.pop() + + cont_line = self._wrap(self.config['indent'] + splitter.join(next_line).lstrip()) + if self.config['use_parentheses']: + output = "{0}{1} (\n{2}{3}{4})".format( + line, splitter, cont_line, + "," if self.config['include_trailing_comma'] else "", + "\n" if wrap_mode in ( + settings.WrapModes.VERTICAL_HANGING_INDENT, + settings.WrapModes.VERTICAL_GRID_GROUPED, + ) else "") + lines = output.split('\n') + if ' #' in lines[-1] and lines[-1].endswith(')'): + line, comment = lines[-1].split(' #', 1) + lines[-1] = line + ') #' + comment[:-1] + return '\n'.join(lines) + return "{0}{1} \\\n{2}".format(line, splitter, cont_line) + elif len(line) > self.config['line_length'] and wrap_mode == settings.WrapModes.NOQA: + if "# NOQA" not in line: + return "{0} # NOQA".format(line) + + return line + + def _add_straight_imports(self, straight_modules, section, section_output): + for module in straight_modules: + if module in self.remove_imports: + continue + + if module in self.as_map: + import_definition = "import {0} as {1}".format(module, self.as_map[module]) + else: + import_definition = "import {0}".format(module) + + comments_above = self.comments['above']['straight'].pop(module, None) + if comments_above: + section_output.extend(comments_above) + section_output.append(self._add_comments(self.comments['straight'].get(module), import_definition)) + + def _add_from_imports(self, from_modules, section, section_output, ignore_case): + for module in from_modules: + if module in self.remove_imports: + continue + + import_start = "from {0} import ".format(module) + from_imports = self.imports[section]['from'][module] + from_imports = nsorted(from_imports, key=lambda key: self._module_key(key, self.config, True, ignore_case)) + if self.remove_imports: + from_imports = [line for line in from_imports if not "{0}.{1}".format(module, line) in + self.remove_imports] + + for from_import in copy.copy(from_imports): + submodule = module + "." + from_import + import_as = self.as_map.get(submodule, False) + if import_as: + import_definition = "{0} as {1}".format(from_import, import_as) + if self.config['combine_as_imports'] and not ("*" in from_imports and + self.config['combine_star']): + from_imports[from_imports.index(from_import)] = import_definition + else: + import_statement = import_start + import_definition + force_grid_wrap = self.config['force_grid_wrap'] + comments = self.comments['straight'].get(submodule) + import_statement = self._add_comments(comments, self._wrap(import_statement)) + from_imports.remove(from_import) + section_output.append(import_statement) + + + if from_imports: + comments = self.comments['from'].pop(module, ()) + if "*" in from_imports and self.config['combine_star']: + import_statement = self._wrap(self._add_comments(comments, "{0}*".format(import_start))) + elif self.config['force_single_line']: + import_statements = [] + for from_import in from_imports: + single_import_line = self._add_comments(comments, import_start + from_import) + comment = self.comments['nested'].get(module, {}).pop(from_import, None) + if comment: + single_import_line += "{0} {1}".format(comments and ";" or " #", comment) + import_statements.append(self._wrap(single_import_line)) + comments = None + import_statement = "\n".join(import_statements) + else: + star_import = False + if "*" in from_imports: + section_output.append(self._add_comments(comments, "{0}*".format(import_start))) + from_imports.remove('*') + star_import = True + comments = None + + for from_import in copy.copy(from_imports): + comment = self.comments['nested'].get(module, {}).pop(from_import, None) + if comment: + single_import_line = self._add_comments(comments, import_start + from_import) + single_import_line += "{0} {1}".format(comments and ";" or " #", comment) + above_comments = self.comments['above']['from'].pop(module, None) + if above_comments: + section_output.extend(above_comments) + section_output.append(self._wrap(single_import_line)) + from_imports.remove(from_import) + comments = None + + if star_import: + import_statement = import_start + (", ").join(from_imports) + else: + import_statement = self._add_comments(comments, import_start + (", ").join(from_imports)) + if not from_imports: + import_statement = "" + + do_multiline_reformat = False + + force_grid_wrap = self.config['force_grid_wrap'] + if force_grid_wrap and len(from_imports) >= force_grid_wrap: + do_multiline_reformat = True + + if len(import_statement) > self.config['line_length'] and len(from_imports) > 1: + do_multiline_reformat = True + + # If line too long AND have imports AND we are NOT using GRID or VERTICAL wrap modes + if (len(import_statement) > self.config['line_length'] and len(from_imports) > 0 and + self.config['multi_line_output'] not in (1, 0)): + do_multiline_reformat = True + + if do_multiline_reformat: + import_statement = self._multi_line_reformat(import_start, from_imports, comments) + if not do_multiline_reformat and len(import_statement) > self.config['line_length']: + import_statement = self._wrap(import_statement) + + if import_statement: + above_comments = self.comments['above']['from'].pop(module, None) + if above_comments: + section_output.extend(above_comments) + section_output.append(import_statement) + + def _multi_line_reformat(self, import_start, from_imports, comments): + output_mode = settings.WrapModes._fields[self.config['multi_line_output']].lower() + formatter = getattr(self, "_output_" + output_mode, self._output_grid) + dynamic_indent = " " * (len(import_start) + 1) + indent = self.config['indent'] + line_length = self.config['wrap_length'] or self.config['line_length'] + import_statement = formatter(import_start, copy.copy(from_imports), + dynamic_indent, indent, line_length, comments) + if self.config['balanced_wrapping']: + lines = import_statement.split("\n") + line_count = len(lines) + if len(lines) > 1: + minimum_length = min([len(line) for line in lines[:-1]]) + else: + minimum_length = 0 + new_import_statement = import_statement + while (len(lines[-1]) < minimum_length and + len(lines) == line_count and line_length > 10): + import_statement = new_import_statement + line_length -= 1 + new_import_statement = formatter(import_start, copy.copy(from_imports), + dynamic_indent, indent, line_length, comments) + lines = new_import_statement.split("\n") + if import_statement.count('\n') == 0: + return self._wrap(import_statement) + return import_statement + + def _add_formatted_imports(self): + """Adds the imports back to the file. + + (at the index of the first import) sorted alphabetically and split between groups + + """ + sort_ignore_case = self.config['force_alphabetical_sort_within_sections'] + sections = itertools.chain(self.sections, self.config['forced_separate']) + + if self.config['no_sections']: + self.imports['no_sections'] = {'straight': [], 'from': {}} + for section in sections: + self.imports['no_sections']['straight'].extend(self.imports[section].get('straight', [])) + self.imports['no_sections']['from'].update(self.imports[section].get('from', {})) + sections = ('no_sections', ) + + output = [] + for section in sections: + straight_modules = self.imports[section]['straight'] + straight_modules = nsorted(straight_modules, key=lambda key: self._module_key(key, self.config)) + from_modules = self.imports[section]['from'] + from_modules = nsorted(from_modules, key=lambda key: self._module_key(key, self.config)) + + section_output = [] + if self.config['from_first']: + self._add_from_imports(from_modules, section, section_output, sort_ignore_case) + if self.config['lines_between_types'] and from_modules and straight_modules: + section_output.extend([''] * self.config['lines_between_types']) + self._add_straight_imports(straight_modules, section, section_output) + else: + self._add_straight_imports(straight_modules, section, section_output) + if self.config['lines_between_types'] and from_modules and straight_modules: + section_output.extend([''] * self.config['lines_between_types']) + self._add_from_imports(from_modules, section, section_output, sort_ignore_case) + + if self.config['force_sort_within_sections']: + def by_module(line): + section = 'B' + if line.startswith('#'): + return 'AA' + + line = re.sub('^from ', '', line) + line = re.sub('^import ', '', line) + if line.split(' ')[0] in self.config['force_to_top']: + section = 'A' + if not self.config['order_by_type']: + line = line.lower() + return '{0}{1}'.format(section, line) + section_output = nsorted(section_output, key=by_module) + + if section_output: + section_name = section + if section_name in self.place_imports: + self.place_imports[section_name] = section_output + continue + + section_title = self.config.get('import_heading_' + str(section_name).lower(), '') + if section_title: + section_comment = "# {0}".format(section_title) + if not section_comment in self.out_lines[0:1] and not section_comment in self.in_lines[0:1]: + section_output.insert(0, section_comment) + output += section_output + ([''] * self.config['lines_between_sections']) + + while [character.strip() for character in output[-1:]] == [""]: + output.pop() + + output_at = 0 + if self.import_index < self.original_length: + output_at = self.import_index + elif self._first_comment_index_end != -1 and self._first_comment_index_start <= 2: + output_at = self._first_comment_index_end + self.out_lines[output_at:0] = output + + imports_tail = output_at + len(output) + while [character.strip() for character in self.out_lines[imports_tail: imports_tail + 1]] == [""]: + self.out_lines.pop(imports_tail) + + if len(self.out_lines) > imports_tail: + next_construct = "" + self._in_quote = False + tail = self.out_lines[imports_tail:] + for index, line in enumerate(tail): + if not self._skip_line(line) and line.strip(): + if line.strip().startswith("#") and len(tail) > (index + 1) and tail[index + 1].strip(): + continue + next_construct = line + break + + if self.config['lines_after_imports'] != -1: + self.out_lines[imports_tail:0] = ["" for line in range(self.config['lines_after_imports'])] + elif next_construct.startswith("def") or next_construct.startswith("class") or \ + next_construct.startswith("@") or next_construct.startswith("async def"): + self.out_lines[imports_tail:0] = ["", ""] + else: + self.out_lines[imports_tail:0] = [""] + + if self.place_imports: + new_out_lines = [] + for index, line in enumerate(self.out_lines): + new_out_lines.append(line) + if line in self.import_placements: + new_out_lines.extend(self.place_imports[self.import_placements[line]]) + if len(self.out_lines) <= index or self.out_lines[index + 1].strip() != "": + new_out_lines.append("") + self.out_lines = new_out_lines + + def _output_grid(self, statement, imports, white_space, indent, line_length, comments): + statement += "(" + imports.pop(0) + while imports: + next_import = imports.pop(0) + next_statement = self._add_comments(comments, statement + ", " + next_import) + if len(next_statement.split("\n")[-1]) + 1 > line_length: + lines = ['{0}{1}'.format(white_space, next_import.split(" ")[0])] + for part in next_import.split(" ")[1:]: + new_line = '{0} {1}'.format(lines[-1], part) + if len(new_line) + 1 > line_length: + lines.append('{0}{1}'.format(white_space, part)) + else: + lines[-1] = new_line + next_import = '\n'.join(lines) + statement = (self._add_comments(comments, "{0},".format(statement)) + + "\n{0}".format(next_import)) + comments = None + else: + statement += ", " + next_import + return statement + ("," if self.config['include_trailing_comma'] else "") + ")" + + def _output_vertical(self, statement, imports, white_space, indent, line_length, comments): + first_import = self._add_comments(comments, imports.pop(0) + ",") + "\n" + white_space + return "{0}({1}{2}{3})".format( + statement, + first_import, + (",\n" + white_space).join(imports), + "," if self.config['include_trailing_comma'] else "", + ) + + def _output_hanging_indent(self, statement, imports, white_space, indent, line_length, comments): + statement += imports.pop(0) + while imports: + next_import = imports.pop(0) + next_statement = self._add_comments(comments, statement + ", " + next_import) + if len(next_statement.split("\n")[-1]) + 3 > line_length: + next_statement = (self._add_comments(comments, "{0}, \\".format(statement)) + + "\n{0}{1}".format(indent, next_import)) + comments = None + statement = next_statement + return statement + + def _output_vertical_hanging_indent(self, statement, imports, white_space, indent, line_length, comments): + return "{0}({1}\n{2}{3}{4}\n)".format( + statement, + self._add_comments(comments), + indent, + (",\n" + indent).join(imports), + "," if self.config['include_trailing_comma'] else "", + ) + + def _output_vertical_grid_common(self, statement, imports, white_space, indent, line_length, comments): + statement += self._add_comments(comments, "(") + "\n" + indent + imports.pop(0) + while imports: + next_import = imports.pop(0) + next_statement = "{0}, {1}".format(statement, next_import) + if len(next_statement.split("\n")[-1]) + 1 > line_length: + next_statement = "{0},\n{1}{2}".format(statement, indent, next_import) + statement = next_statement + if self.config['include_trailing_comma']: + statement += ',' + return statement + + def _output_vertical_grid(self, statement, imports, white_space, indent, line_length, comments): + return self._output_vertical_grid_common(statement, imports, white_space, indent, line_length, comments) + ")" + + def _output_vertical_grid_grouped(self, statement, imports, white_space, indent, line_length, comments): + return self._output_vertical_grid_common(statement, imports, white_space, indent, line_length, comments) + "\n)" + + def _output_noqa(self, statement, imports, white_space, indent, line_length, comments): + retval = '{0}{1}'.format(statement, ', '.join(imports)) + comment_str = ' '.join(comments) + if comments: + if len(retval) + 4 + len(comment_str) <= line_length: + return '{0} # {1}'.format(retval, comment_str) + else: + if len(retval) <= line_length: + return retval + if comments: + if "NOQA" in comments: + return '{0} # {1}'.format(retval, comment_str) + else: + return '{0} # NOQA {1}'.format(retval, comment_str) + else: + return '{0} # NOQA'.format(retval) + + @staticmethod + def _strip_comments(line, comments=None): + """Removes comments from import line.""" + if comments is None: + comments = [] + + new_comments = False + comment_start = line.find("#") + if comment_start != -1: + comments.append(line[comment_start + 1:].strip()) + new_comments = True + line = line[:comment_start] + + return line, comments, new_comments + + @staticmethod + def _format_simplified(import_line): + import_line = import_line.strip() + if import_line.startswith("from "): + import_line = import_line.replace("from ", "") + import_line = import_line.replace(" import ", ".") + elif import_line.startswith("import "): + import_line = import_line.replace("import ", "") + + return import_line + + @staticmethod + def _format_natural(import_line): + import_line = import_line.strip() + if not import_line.startswith("from ") and not import_line.startswith("import "): + if not "." in import_line: + return "import {0}".format(import_line) + parts = import_line.split(".") + end = parts.pop(-1) + return "from {0} import {1}".format(".".join(parts), end) + + return import_line + + def _skip_line(self, line): + skip_line = self._in_quote + if self.index == 1 and line.startswith("#"): + self._in_top_comment = True + return True + elif self._in_top_comment: + if not line.startswith("#"): + self._in_top_comment = False + self._first_comment_index_end = self.index - 1 + + if '"' in line or "'" in line: + index = 0 + if self._first_comment_index_start == -1 and (line.startswith('"') or line.startswith("'")): + self._first_comment_index_start = self.index + while index < len(line): + if line[index] == "\\": + index += 1 + elif self._in_quote: + if line[index:index + len(self._in_quote)] == self._in_quote: + self._in_quote = False + if self._first_comment_index_end < self._first_comment_index_start: + self._first_comment_index_end = self.index + elif line[index] in ("'", '"'): + long_quote = line[index:index + 3] + if long_quote in ('"""', "'''"): + self._in_quote = long_quote + index += 2 + else: + self._in_quote = line[index] + elif line[index] == "#": + break + index += 1 + + return skip_line or self._in_quote or self._in_top_comment + + def _strip_syntax(self, import_string): + import_string = import_string.replace("_import", "[[i]]") + for remove_syntax in ['\\', '(', ')', ',']: + import_string = import_string.replace(remove_syntax, " ") + import_list = import_string.split() + for key in ('from', 'import'): + if key in import_list: + import_list.remove(key) + import_string = ' '.join(import_list) + import_string = import_string.replace("[[i]]", "_import") + return import_string.replace("{ ", "{|").replace(" }", "|}") + + def _parse(self): + """Parses a python file taking out and categorizing imports.""" + self._in_quote = False + self._in_top_comment = False + while not self._at_end(): + line = self._get_line() + statement_index = self.index + skip_line = self._skip_line(line) + + if line in self._section_comments and not skip_line: + if self.import_index == -1: + self.import_index = self.index - 1 + continue + + if "isort:imports-" in line and line.startswith("#"): + section = line.split("isort:imports-")[-1].split()[0].upper() + self.place_imports[section] = [] + self.import_placements[line] = section + + if ";" in line: + for part in (part.strip() for part in line.split(";")): + if part and not part.startswith("from ") and not part.startswith("import "): + skip_line = True + + import_type = self._import_type(line) + if not import_type or skip_line: + self.out_lines.append(line) + continue + + for line in (line.strip() for line in line.split(";")): + import_type = self._import_type(line) + if not import_type: + self.out_lines.append(line) + continue + + line = line.replace("\t", " ").replace('import*', 'import *') + if self.import_index == -1: + self.import_index = self.index - 1 + + nested_comments = {} + import_string, comments, new_comments = self._strip_comments(line) + stripped_line = [part for part in self._strip_syntax(import_string).strip().split(" ") if part] + + if import_type == "from" and len(stripped_line) == 2 and stripped_line[1] != "*" and new_comments: + nested_comments[stripped_line[-1]] = comments[0] + + if "(" in line.split("#")[0] and not self._at_end(): + while not line.strip().endswith(")") and not self._at_end(): + line, comments, new_comments = self._strip_comments(self._get_line(), comments) + stripped_line = self._strip_syntax(line).strip() + if import_type == "from" and stripped_line and not " " in stripped_line and new_comments: + nested_comments[stripped_line] = comments[-1] + import_string += "\n" + line + else: + while line.strip().endswith("\\"): + line, comments, new_comments = self._strip_comments(self._get_line(), comments) + stripped_line = self._strip_syntax(line).strip() + if import_type == "from" and stripped_line and not " " in stripped_line and new_comments: + nested_comments[stripped_line] = comments[-1] + if import_string.strip().endswith(" import") or line.strip().startswith("import "): + import_string += "\n" + line + else: + import_string = import_string.rstrip().rstrip("\\") + " " + line.lstrip() + + if import_type == "from": + import_string = import_string.replace("import(", "import (") + parts = import_string.split(" import ") + from_import = parts[0].split(" ") + import_string = " import ".join([from_import[0] + " " + "".join(from_import[1:])] + parts[1:]) + + imports = [item.replace("{|", "{ ").replace("|}", " }") for item in + self._strip_syntax(import_string).split()] + if "as" in imports and (imports.index('as') + 1) < len(imports): + while "as" in imports: + index = imports.index('as') + if import_type == "from": + module = imports[0] + "." + imports[index - 1] + self.as_map[module] = imports[index + 1] + else: + module = imports[index - 1] + self.as_map[module] = imports[index + 1] + if not self.config['combine_as_imports']: + self.comments['straight'][module] = comments + comments = [] + del imports[index:index + 2] + if import_type == "from": + import_from = imports.pop(0) + placed_module = self.place_module(import_from) + if placed_module == '': + print( + "WARNING: could not place module {0} of line {1} --" + " Do you need to define a default section?".format(import_from, line) + ) + root = self.imports[placed_module][import_type] + for import_name in imports: + associated_comment = nested_comments.get(import_name) + if associated_comment: + self.comments['nested'].setdefault(import_from, {})[import_name] = associated_comment + comments.pop(comments.index(associated_comment)) + if comments: + self.comments['from'].setdefault(import_from, []).extend(comments) + + if len(self.out_lines) > max(self.import_index, self._first_comment_index_end + 1, 1) - 1: + last = self.out_lines and self.out_lines[-1].rstrip() or "" + while (last.startswith("#") and not last.endswith('"""') and not last.endswith("'''") and not + 'isort:imports-' in last): + self.comments['above']['from'].setdefault(import_from, []).insert(0, self.out_lines.pop(-1)) + if len(self.out_lines) > max(self.import_index - 1, self._first_comment_index_end + 1, 1) - 1: + last = self.out_lines[-1].rstrip() + else: + last = "" + if statement_index - 1 == self.import_index: + self.import_index -= len(self.comments['above']['from'].get(import_from, [])) + + if root.get(import_from, False): + root[import_from].update(imports) + else: + root[import_from] = OrderedSet(imports) + else: + for module in imports: + if comments: + self.comments['straight'][module] = comments + comments = None + + if len(self.out_lines) > max(self.import_index, self._first_comment_index_end + 1, 1) - 1: + + last = self.out_lines and self.out_lines[-1].rstrip() or "" + while (last.startswith("#") and not last.endswith('"""') and not last.endswith("'''") and + not 'isort:imports-' in last): + self.comments['above']['straight'].setdefault(module, []).insert(0, + self.out_lines.pop(-1)) + if len(self.out_lines) > 0: + last = self.out_lines[-1].rstrip() + else: + last = "" + if self.index - 1 == self.import_index: + self.import_index -= len(self.comments['above']['straight'].get(module, [])) + placed_module = self.place_module(module) + if placed_module == '': + print( + "WARNING: could not place module {0} of line {1} --" + " Do you need to define a default section?".format(import_from, line) + ) + self.imports[placed_module][import_type].add(module) + + +def coding_check(fname, default='utf-8'): + + # see https://www.python.org/dev/peps/pep-0263/ + pattern = re.compile(br'coding[:=]\s*([-\w.]+)') + + coding = default + with io.open(fname, 'rb') as f: + for line_number, line in enumerate(f, 1): + groups = re.findall(pattern, line) + if groups: + coding = groups[0].decode('ascii') + break + if line_number > 2: + break + + return coding + + +def get_stdlib_path(): + """Returns the path to the standard lib for the current path installation. + + This function can be dropped and "sysconfig.get_paths()" used directly once Python 2.6 support is dropped. + """ + if sys.version_info >= (2, 7): + import sysconfig + return sysconfig.get_paths()['stdlib'] + else: + return os.path.join(sys.prefix, 'lib') + + +def exists_case_sensitive(path): + """ + Returns if the given path exists and also matches the case on Windows. + + When finding files that can be imported, it is important for the cases to match because while + file os.path.exists("module.py") and os.path.exists("MODULE.py") both return True on Windows, Python + can only import using the case of the real file. + """ + result = os.path.exists(path) + if sys.platform.startswith('win') and result: + directory, basename = os.path.split(path) + result = basename in os.listdir(directory) + return result diff --git a/ptvsd/pydevd/third_party/isort_container/isort/main.py b/ptvsd/pydevd/third_party/isort_container/isort/main.py new file mode 100644 index 00000000..eae7afa5 --- /dev/null +++ b/ptvsd/pydevd/third_party/isort_container/isort/main.py @@ -0,0 +1,296 @@ +#! /usr/bin/env python +''' Tool for sorting imports alphabetically, and automatically separated into sections. + +Copyright (C) 2013 Timothy Edmund Crosley + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated +documentation files (the "Software"), to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and +to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or +substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED +TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF +CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. + +''' +from __future__ import absolute_import, division, print_function, unicode_literals + +import argparse +import glob +import os +import sys + +import setuptools + +from isort import SortImports, __version__ +from isort.settings import DEFAULT_SECTIONS, default, from_path, should_skip + +from .pie_slice import itemsview + + +INTRO = r""" +/#######################################################################\ + + `sMMy` + .yyyy- ` + ##soos## ./o. + ` ``..-..` ``...`.`` ` ```` ``-ssso``` + .s:-y- .+osssssso/. ./ossss+:so+:` :+o-`/osso:+sssssssso/ + .s::y- osss+.``.`` -ssss+-.`-ossso` ssssso/::..::+ssss:::. + .s::y- /ssss+//:-.` `ssss+ `ssss+ sssso` :ssss` + .s::y- `-/+oossssso/ `ssss/ sssso ssss/ :ssss` + .y-/y- ````:ssss` ossso. :ssss: ssss/ :ssss. + `/so:` `-//::/osss+ `+ssss+-/ossso: /sso- `osssso/. + \/ `-/oooo++/- .:/++:/++/-` .. `://++/. + + + isort your Python imports for you so you don't have to + + VERSION {0} + +\########################################################################/ +""".format(__version__) + + +def iter_source_code(paths, config, skipped): + """Iterate over all Python source files defined in paths.""" + for path in paths: + if os.path.isdir(path): + if should_skip(path, config, os.getcwd()): + skipped.append(path) + continue + + for dirpath, dirnames, filenames in os.walk(path, topdown=True): + for dirname in list(dirnames): + if should_skip(dirname, config, dirpath): + skipped.append(dirname) + dirnames.remove(dirname) + for filename in filenames: + if filename.endswith('.py'): + if should_skip(filename, config, dirpath): + skipped.append(filename) + else: + yield os.path.join(dirpath, filename) + else: + yield path + + +class ISortCommand(setuptools.Command): + """The :class:`ISortCommand` class is used by setuptools to perform + imports checks on registered modules. + """ + + description = "Run isort on modules registered in setuptools" + user_options = [] + + def initialize_options(self): + default_settings = default.copy() + for (key, value) in itemsview(default_settings): + setattr(self, key, value) + + def finalize_options(self): + "Get options from config files." + self.arguments = {} + computed_settings = from_path(os.getcwd()) + for (key, value) in itemsview(computed_settings): + self.arguments[key] = value + + def distribution_files(self): + """Find distribution packages.""" + # This is verbatim from flake8 + if self.distribution.packages: + package_dirs = self.distribution.package_dir or {} + for package in self.distribution.packages: + pkg_dir = package + if package in package_dirs: + pkg_dir = package_dirs[package] + elif '' in package_dirs: + pkg_dir = package_dirs[''] + os.path.sep + pkg_dir + yield pkg_dir.replace('.', os.path.sep) + + if self.distribution.py_modules: + for filename in self.distribution.py_modules: + yield "%s.py" % filename + # Don't miss the setup.py file itself + yield "setup.py" + + def run(self): + arguments = self.arguments + wrong_sorted_files = False + arguments['check'] = True + for path in self.distribution_files(): + for python_file in glob.iglob(os.path.join(path, '*.py')): + try: + incorrectly_sorted = SortImports(python_file, **arguments).incorrectly_sorted + if incorrectly_sorted: + wrong_sorted_files = True + except IOError as e: + print("WARNING: Unable to parse file {0} due to {1}".format(python_file, e)) + if wrong_sorted_files: + exit(1) + + +def create_parser(): + parser = argparse.ArgumentParser(description='Sort Python import definitions alphabetically ' + 'within logical sections.') + parser.add_argument('files', nargs='*', help='One or more Python source files that need their imports sorted.') + parser.add_argument('-y', '--apply', dest='apply', action='store_true', + help='Tells isort to apply changes recursively without asking') + parser.add_argument('-l', '--lines', help='[Deprecated] The max length of an import line (used for wrapping ' + 'long imports).', + dest='line_length', type=int) + parser.add_argument('-w', '--line-width', help='The max length of an import line (used for wrapping long imports).', + dest='line_length', type=int) + parser.add_argument('-s', '--skip', help='Files that sort imports should skip over. If you want to skip multiple ' + 'files you should specify twice: --skip file1 --skip file2.', dest='skip', action='append') + parser.add_argument('-ns', '--dont-skip', help='Files that sort imports should never skip over.', + dest='not_skip', action='append') + parser.add_argument('-sg', '--skip-glob', help='Files that sort imports should skip over.', dest='skip_glob', + action='append') + parser.add_argument('-t', '--top', help='Force specific imports to the top of their appropriate section.', + dest='force_to_top', action='append') + parser.add_argument('-f', '--future', dest='known_future_library', action='append', + help='Force sortImports to recognize a module as part of the future compatibility libraries.') + parser.add_argument('-b', '--builtin', dest='known_standard_library', action='append', + help='Force sortImports to recognize a module as part of the python standard library.') + parser.add_argument('-o', '--thirdparty', dest='known_third_party', action='append', + help='Force sortImports to recognize a module as being part of a third party library.') + parser.add_argument('-p', '--project', dest='known_first_party', action='append', + help='Force sortImports to recognize a module as being part of the current python project.') + parser.add_argument('--virtual-env', dest='virtual_env', + help='Virtual environment to use for determining whether a package is third-party') + parser.add_argument('-m', '--multi-line', dest='multi_line_output', type=int, choices=[0, 1, 2, 3, 4, 5], + help='Multi line output (0-grid, 1-vertical, 2-hanging, 3-vert-hanging, 4-vert-grid, ' + '5-vert-grid-grouped).') + parser.add_argument('-i', '--indent', help='String to place for indents defaults to " " (4 spaces).', + dest='indent', type=str) + parser.add_argument('-a', '--add-import', dest='add_imports', action='append', + help='Adds the specified import line to all files, ' + 'automatically determining correct placement.') + parser.add_argument('-af', '--force-adds', dest='force_adds', action='store_true', + help='Forces import adds even if the original file is empty.') + parser.add_argument('-r', '--remove-import', dest='remove_imports', action='append', + help='Removes the specified import from all files.') + parser.add_argument('-ls', '--length-sort', help='Sort imports by their string length.', + dest='length_sort', action='store_true') + parser.add_argument('-d', '--stdout', help='Force resulting output to stdout, instead of in-place.', + dest='write_to_stdout', action='store_true') + parser.add_argument('-c', '--check-only', action='store_true', dest="check", + help='Checks the file for unsorted / unformatted imports and prints them to the ' + 'command line without modifying the file.') + parser.add_argument('-ws', '--ignore-whitespace', action='store_true', dest="ignore_whitespace", + help='Tells isort to ignore whitespace differences when --check-only is being used.') + parser.add_argument('-sl', '--force-single-line-imports', dest='force_single_line', action='store_true', + help='Forces all from imports to appear on their own line') + parser.add_argument('-ds', '--no-sections', help='Put all imports into the same section bucket', dest='no_sections', + action='store_true') + parser.add_argument('-sd', '--section-default', dest='default_section', + help='Sets the default section for imports (by default FIRSTPARTY) options: ' + + str(DEFAULT_SECTIONS)) + parser.add_argument('-df', '--diff', dest='show_diff', action='store_true', + help="Prints a diff of all the changes isort would make to a file, instead of " + "changing it in place") + parser.add_argument('-e', '--balanced', dest='balanced_wrapping', action='store_true', + help='Balances wrapping to produce the most consistent line length possible') + parser.add_argument('-rc', '--recursive', dest='recursive', action='store_true', + help='Recursively look for Python files of which to sort imports') + parser.add_argument('-ot', '--order-by-type', dest='order_by_type', + action='store_true', help='Order imports by type in addition to alphabetically') + parser.add_argument('-dt', '--dont-order-by-type', dest='dont_order_by_type', + action='store_true', help='Only order imports alphabetically, do not attempt type ordering') + parser.add_argument('-ac', '--atomic', dest='atomic', action='store_true', + help="Ensures the output doesn't save if the resulting file contains syntax errors.") + parser.add_argument('-cs', '--combine-star', dest='combine_star', action='store_true', + help="Ensures that if a star import is present, nothing else is imported from that namespace.") + parser.add_argument('-ca', '--combine-as', dest='combine_as_imports', action='store_true', + help="Combines as imports on the same line.") + parser.add_argument('-tc', '--trailing-comma', dest='include_trailing_comma', action='store_true', + help='Includes a trailing comma on multi line imports that include parentheses.') + parser.add_argument('-v', '--version', action='store_true', dest='show_version') + parser.add_argument('-vb', '--verbose', action='store_true', dest="verbose", + help='Shows verbose output, such as when files are skipped or when a check is successful.') + parser.add_argument('-q', '--quiet', action='store_true', dest="quiet", + help='Shows extra quiet output, only errors are outputted.') + parser.add_argument('-sp', '--settings-path', dest="settings_path", + help='Explicitly set the settings path instead of auto determining based on file location.') + parser.add_argument('-ff', '--from-first', dest='from_first', + help="Switches the typical ordering preference, showing from imports first then straight ones.") + parser.add_argument('-wl', '--wrap-length', dest='wrap_length', + help="Specifies how long lines that are wrapped should be, if not set line_length is used.") + parser.add_argument('-fgw', '--force-grid-wrap', nargs='?', const=2, type=int, dest="force_grid_wrap", + help='Force number of from imports (defaults to 2) to be grid wrapped regardless of line ' + 'length') + parser.add_argument('-fass', '--force-alphabetical-sort-within-sections', action='store_true', + dest="force_alphabetical_sort", help='Force all imports to be sorted alphabetically within a ' + 'section') + parser.add_argument('-fas', '--force-alphabetical-sort', action='store_true', dest="force_alphabetical_sort", + help='Force all imports to be sorted as a single section') + parser.add_argument('-fss', '--force-sort-within-sections', action='store_true', dest="force_sort_within_sections", + help='Force imports to be sorted by module, independent of import_type') + parser.add_argument('-lbt', '--lines-between-types', dest='lines_between_types', type=int) + parser.add_argument('-up', '--use-parentheses', dest='use_parentheses', action='store_true', + help='Use parenthesis for line continuation on lenght limit instead of slashes.') + + arguments = dict((key, value) for (key, value) in itemsview(vars(parser.parse_args())) if value) + if 'dont_order_by_type' in arguments: + arguments['order_by_type'] = False + return arguments + + +def main(): + arguments = create_parser() + if arguments.get('show_version'): + print(INTRO) + return + + if 'settings_path' in arguments: + sp = arguments['settings_path'] + arguments['settings_path'] = os.path.abspath(sp) if os.path.isdir(sp) else os.path.dirname(os.path.abspath(sp)) + + file_names = arguments.pop('files', []) + if file_names == ['-']: + SortImports(file_contents=sys.stdin.read(), write_to_stdout=True, **arguments) + else: + if not file_names: + file_names = ['.'] + arguments['recursive'] = True + if not arguments.get('apply', False): + arguments['ask_to_apply'] = True + config = from_path(os.path.abspath(file_names[0]) or os.getcwd()).copy() + config.update(arguments) + wrong_sorted_files = False + skipped = [] + if arguments.get('recursive', False): + file_names = iter_source_code(file_names, config, skipped) + num_skipped = 0 + if config['verbose'] or config.get('show_logo', False): + print(INTRO) + for file_name in file_names: + try: + sort_attempt = SortImports(file_name, **arguments) + incorrectly_sorted = sort_attempt.incorrectly_sorted + if arguments.get('check', False) and incorrectly_sorted: + wrong_sorted_files = True + if sort_attempt.skipped: + num_skipped += 1 + except IOError as e: + print("WARNING: Unable to parse file {0} due to {1}".format(file_name, e)) + if wrong_sorted_files: + exit(1) + + num_skipped += len(skipped) + if num_skipped and not arguments.get('quiet', False): + if config['verbose']: + for was_skipped in skipped: + print("WARNING: {0} was skipped as it's listed in 'skip' setting" + " or matches a glob in 'skip_glob' setting".format(was_skipped)) + print("Skipped {0} files".format(num_skipped)) + + +if __name__ == "__main__": + main() diff --git a/ptvsd/pydevd/third_party/isort_container/isort/natural.py b/ptvsd/pydevd/third_party/isort_container/isort/natural.py new file mode 100644 index 00000000..aac8c4a3 --- /dev/null +++ b/ptvsd/pydevd/third_party/isort_container/isort/natural.py @@ -0,0 +1,47 @@ +"""isort/natural.py. + +Enables sorting strings that contain numbers naturally + +usage: + natural.nsorted(list) + +Copyright (C) 2013 Timothy Edmund Crosley + +Implementation originally from @HappyLeapSecond stack overflow user in response to: + http://stackoverflow.com/questions/5967500/how-to-correctly-sort-a-string-with-a-number-inside + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated +documentation files (the "Software"), to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and +to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or +substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED +TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF +CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. + +""" +import re + + +def _atoi(text): + return int(text) if text.isdigit() else text + + +def _natural_keys(text): + return [_atoi(c) for c in re.split(r'(\d+)', text)] + + +def nsorted(to_sort, key=None): + """Returns a naturally sorted list""" + if key is None: + key_callback = _natural_keys + else: + def key_callback(item): + return _natural_keys(key(item)) + + return sorted(to_sort, key=key_callback) diff --git a/ptvsd/pydevd/third_party/isort_container/isort/pie_slice.py b/ptvsd/pydevd/third_party/isort_container/isort/pie_slice.py new file mode 100644 index 00000000..131f325a --- /dev/null +++ b/ptvsd/pydevd/third_party/isort_container/isort/pie_slice.py @@ -0,0 +1,594 @@ +"""pie_slice/overrides.py. + +Overrides Python syntax to conform to the Python3 version as much as possible using a '*' import + +Copyright (C) 2013 Timothy Edmund Crosley + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated +documentation files (the "Software"), to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and +to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or +substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED +TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF +CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. + +""" +from __future__ import absolute_import + +import abc +import collections +import functools +import sys +from numbers import Integral + +__version__ = "1.1.0" + +PY2 = sys.version_info[0] == 2 +PY3 = sys.version_info[0] == 3 +VERSION = sys.version_info + +native_dict = dict +native_round = round +native_filter = filter +native_map = map +native_zip = zip +native_range = range +native_str = str +native_chr = chr +native_input = input +native_next = next +native_object = object + +common = ['native_dict', 'native_round', 'native_filter', 'native_map', 'native_range', 'native_str', 'native_chr', + 'native_input', 'PY2', 'PY3', 'u', 'itemsview', 'valuesview', 'keysview', 'execute', 'integer_types', + 'native_next', 'native_object', 'with_metaclass', 'OrderedDict', 'lru_cache'] + + +def with_metaclass(meta, *bases): + """Enables use of meta classes across Python Versions. taken from jinja2/_compat.py. + + Use it like this:: + + class BaseForm(object): + pass + + class FormType(type): + pass + + class Form(with_metaclass(FormType, BaseForm)): + pass + + """ + class metaclass(meta): + __call__ = type.__call__ + __init__ = type.__init__ + + def __new__(cls, name, this_bases, d): + if this_bases is None: + return type.__new__(cls, name, (), d) + return meta(name, bases, d) + return metaclass('temporary_class', None, {}) + + +def unmodified_isinstance(*bases): + """When called in the form + + MyOverrideClass(unmodified_isinstance(BuiltInClass)) + + it allows calls against passed in built in instances to pass even if there not a subclass + + """ + class UnmodifiedIsInstance(type): + if sys.version_info[0] == 2 and sys.version_info[1] <= 6: + + @classmethod + def __instancecheck__(cls, instance): + if cls.__name__ in (str(base.__name__) for base in bases): + return isinstance(instance, bases) + + subclass = getattr(instance, '__class__', None) + subtype = type(instance) + instance_type = getattr(abc, '_InstanceType', None) + if not instance_type: + class test_object: + pass + instance_type = type(test_object) + if subtype is instance_type: + subtype = subclass + if subtype is subclass or subclass is None: + return cls.__subclasscheck__(subtype) + return (cls.__subclasscheck__(subclass) or cls.__subclasscheck__(subtype)) + else: + @classmethod + def __instancecheck__(cls, instance): + if cls.__name__ in (str(base.__name__) for base in bases): + return isinstance(instance, bases) + + return type.__instancecheck__(cls, instance) + + return with_metaclass(UnmodifiedIsInstance, *bases) + + +if PY3: + import urllib + import builtins + from urllib import parse + + input = input + integer_types = (int, ) + + def u(string): + return string + + def itemsview(collection): + return collection.items() + + def valuesview(collection): + return collection.values() + + def keysview(collection): + return collection.keys() + + urllib.quote = parse.quote + urllib.quote_plus = parse.quote_plus + urllib.unquote = parse.unquote + urllib.unquote_plus = parse.unquote_plus + urllib.urlencode = parse.urlencode + execute = getattr(builtins, 'exec') + if VERSION[1] < 2: + def callable(entity): + return hasattr(entity, '__call__') + common.append('callable') + + __all__ = common + ['urllib'] +else: + from itertools import ifilter as filter + from itertools import imap as map + from itertools import izip as zip + from decimal import Decimal, ROUND_HALF_EVEN + + import codecs + str = unicode + chr = unichr + input = raw_input + range = xrange + integer_types = (int, long) + + import sys + stdout = sys.stdout + stderr = sys.stderr +# reload(sys) +# sys.stdout = stdout +# sys.stderr = stderr +# sys.setdefaultencoding('utf-8') + + def _create_not_allowed(name): + def _not_allow(*args, **kwargs): + raise NameError("name '{0}' is not defined".format(name)) + _not_allow.__name__ = name + return _not_allow + + for removed in ('apply', 'cmp', 'coerce', 'execfile', 'raw_input', 'unpacks'): + globals()[removed] = _create_not_allowed(removed) + + def u(s): + if isinstance(s, unicode): + return s + else: + return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape") + + def execute(_code_, _globs_=None, _locs_=None): + """Execute code in a namespace.""" + if _globs_ is None: + frame = sys._getframe(1) + _globs_ = frame.f_globals + if _locs_ is None: + _locs_ = frame.f_locals + del frame + elif _locs_ is None: + _locs_ = _globs_ + exec("""exec _code_ in _globs_, _locs_""") + + class _dict_view_base(object): + __slots__ = ('_dictionary', ) + + def __init__(self, dictionary): + self._dictionary = dictionary + + def __repr__(self): + return "{0}({1})".format(self.__class__.__name__, str(list(self.__iter__()))) + + def __unicode__(self): + return str(self.__repr__()) + + def __str__(self): + return str(self.__unicode__()) + + class dict_keys(_dict_view_base): + __slots__ = () + + def __iter__(self): + return self._dictionary.iterkeys() + + class dict_values(_dict_view_base): + __slots__ = () + + def __iter__(self): + return self._dictionary.itervalues() + + class dict_items(_dict_view_base): + __slots__ = () + + def __iter__(self): + return self._dictionary.iteritems() + + def itemsview(collection): + return dict_items(collection) + + def valuesview(collection): + return dict_values(collection) + + def keysview(collection): + return dict_keys(collection) + + class dict(unmodified_isinstance(native_dict)): + def has_key(self, *args, **kwargs): + return AttributeError("'dict' object has no attribute 'has_key'") + + def items(self): + return dict_items(self) + + def keys(self): + return dict_keys(self) + + def values(self): + return dict_values(self) + + def round(number, ndigits=None): + return_int = False + if ndigits is None: + return_int = True + ndigits = 0 + if hasattr(number, '__round__'): + return number.__round__(ndigits) + + if ndigits < 0: + raise NotImplementedError('negative ndigits not supported yet') + exponent = Decimal('10') ** (-ndigits) + d = Decimal.from_float(number).quantize(exponent, + rounding=ROUND_HALF_EVEN) + if return_int: + return int(d) + else: + return float(d) + + def next(iterator): + try: + iterator.__next__() + except Exception: + native_next(iterator) + + class FixStr(type): + def __new__(cls, name, bases, dct): + if '__str__' in dct: + dct['__unicode__'] = dct['__str__'] + dct['__str__'] = lambda self: self.__unicode__().encode('utf-8') + return type.__new__(cls, name, bases, dct) + + if sys.version_info[1] <= 6: + def __instancecheck__(cls, instance): + if cls.__name__ == "object": + return isinstance(instance, native_object) + + subclass = getattr(instance, '__class__', None) + subtype = type(instance) + instance_type = getattr(abc, '_InstanceType', None) + if not instance_type: + class test_object: + pass + instance_type = type(test_object) + if subtype is instance_type: + subtype = subclass + if subtype is subclass or subclass is None: + return cls.__subclasscheck__(subtype) + return (cls.__subclasscheck__(subclass) or cls.__subclasscheck__(subtype)) + else: + def __instancecheck__(cls, instance): + if cls.__name__ == "object": + return isinstance(instance, native_object) + return type.__instancecheck__(cls, instance) + + class object(with_metaclass(FixStr, object)): + pass + + __all__ = common + ['round', 'dict', 'apply', 'cmp', 'coerce', 'execfile', 'raw_input', 'unpacks', 'str', 'chr', + 'input', 'range', 'filter', 'map', 'zip', 'object'] + +if sys.version_info[0] == 2 and sys.version_info[1] < 7: + # OrderedDict + # Copyright (c) 2009 Raymond Hettinger + # + # Permission is hereby granted, free of charge, to any person + # obtaining a copy of this software and associated documentation files + # (the "Software"), to deal in the Software without restriction, + # including without limitation the rights to use, copy, modify, merge, + # publish, distribute, sublicense, and/or sell copies of the Software, + # and to permit persons to whom the Software is furnished to do so, + # subject to the following conditions: + # + # The above copyright notice and this permission notice shall be + # included in all copies or substantial portions of the Software. + # + # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES + # OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT + # HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, + # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR + # OTHER DEALINGS IN THE SOFTWARE. + + from UserDict import DictMixin + + class OrderedDict(dict, DictMixin): + + def __init__(self, *args, **kwds): + if len(args) > 1: + raise TypeError('expected at most 1 arguments, got %d' % len(args)) + try: + self.__end + except AttributeError: + self.clear() + self.update(*args, **kwds) + + def clear(self): + self.__end = end = [] + end += [None, end, end] # sentinel node for doubly linked list + self.__map = {} # key --> [key, prev, next] + dict.clear(self) + + def __setitem__(self, key, value): + if key not in self: + end = self.__end + curr = end[1] + curr[2] = end[1] = self.__map[key] = [key, curr, end] + dict.__setitem__(self, key, value) + + def __delitem__(self, key): + dict.__delitem__(self, key) + key, prev, next = self.__map.pop(key) + prev[2] = next + next[1] = prev + + def __iter__(self): + end = self.__end + curr = end[2] + while curr is not end: + yield curr[0] + curr = curr[2] + + def __reversed__(self): + end = self.__end + curr = end[1] + while curr is not end: + yield curr[0] + curr = curr[1] + + def popitem(self, last=True): + if not self: + raise KeyError('dictionary is empty') + if last: + key = reversed(self).next() + else: + key = iter(self).next() + value = self.pop(key) + return key, value + + def __reduce__(self): + items = [[k, self[k]] for k in self] + tmp = self.__map, self.__end + del self.__map, self.__end + inst_dict = vars(self).copy() + self.__map, self.__end = tmp + if inst_dict: + return (self.__class__, (items,), inst_dict) + return self.__class__, (items,) + + def keys(self): + return list(self) + + setdefault = DictMixin.setdefault + update = DictMixin.update + pop = DictMixin.pop + values = DictMixin.values + items = DictMixin.items + iterkeys = DictMixin.iterkeys + itervalues = DictMixin.itervalues + iteritems = DictMixin.iteritems + + def __repr__(self): + if not self: + return '%s()' % (self.__class__.__name__,) + return '%s(%r)' % (self.__class__.__name__, self.items()) + + def copy(self): + return self.__class__(self) + + @classmethod + def fromkeys(cls, iterable, value=None): + d = cls() + for key in iterable: + d[key] = value + return d + + def __eq__(self, other): + if isinstance(other, OrderedDict): + if len(self) != len(other): + return False + for p, q in zip(self.items(), other.items()): + if p != q: + return False + return True + return dict.__eq__(self, other) + + def __ne__(self, other): + return not self == other +else: + from collections import OrderedDict + + +if sys.version_info < (3, 2): + try: + from threading import Lock + except ImportError: + from dummy_threading import Lock + + from functools import wraps + + def lru_cache(maxsize=100): + """Least-recently-used cache decorator. + Taking from: https://github.com/MiCHiLU/python-functools32/blob/master/functools32/functools32.py + with slight modifications. + If *maxsize* is set to None, the LRU features are disabled and the cache + can grow without bound. + Arguments to the cached function must be hashable. + View the cache statistics named tuple (hits, misses, maxsize, currsize) with + f.cache_info(). Clear the cache and statistics with f.cache_clear(). + Access the underlying function with f.__wrapped__. + See: http://en.wikipedia.org/wiki/Cache_algorithms#Least_Recently_Used + + """ + def decorating_function(user_function, tuple=tuple, sorted=sorted, len=len, KeyError=KeyError): + hits, misses = [0], [0] + kwd_mark = (object(),) # separates positional and keyword args + lock = Lock() + + if maxsize is None: + CACHE = dict() + + @wraps(user_function) + def wrapper(*args, **kwds): + key = args + if kwds: + key += kwd_mark + tuple(sorted(kwds.items())) + try: + result = CACHE[key] + hits[0] += 1 + return result + except KeyError: + pass + result = user_function(*args, **kwds) + CACHE[key] = result + misses[0] += 1 + return result + else: + CACHE = OrderedDict() + + @wraps(user_function) + def wrapper(*args, **kwds): + key = args + if kwds: + key += kwd_mark + tuple(sorted(kwds.items())) + with lock: + cached = CACHE.get(key, None) + if cached: + del CACHE[key] + CACHE[key] = cached + hits[0] += 1 + return cached + result = user_function(*args, **kwds) + with lock: + CACHE[key] = result # record recent use of this key + misses[0] += 1 + while len(CACHE) > maxsize: + CACHE.popitem(last=False) + return result + + def cache_info(): + """Report CACHE statistics.""" + with lock: + return _CacheInfo(hits[0], misses[0], maxsize, len(CACHE)) + + def cache_clear(): + """Clear the CACHE and CACHE statistics.""" + with lock: + CACHE.clear() + hits[0] = misses[0] = 0 + + wrapper.cache_info = cache_info + wrapper.cache_clear = cache_clear + return wrapper + + return decorating_function + +else: + from functools import lru_cache + + +class OrderedSet(collections.MutableSet): + + def __init__(self, iterable=None): + self.end = end = [] + end += [None, end, end] + self.map = {} + if iterable is not None: + self |= iterable + + def __len__(self): + return len(self.map) + + def __contains__(self, key): + return key in self.map + + def add(self, key): + if key not in self.map: + end = self.end + curr = end[1] + curr[2] = end[1] = self.map[key] = [key, curr, end] + + def discard(self, key): + if key in self.map: + key, prev, next = self.map.pop(key) + prev[2] = next + next[1] = prev + + def __iter__(self): + end = self.end + curr = end[2] + while curr is not end: + yield curr[0] + curr = curr[2] + + def __reversed__(self): + end = self.end + curr = end[1] + while curr is not end: + yield curr[0] + curr = curr[1] + + def pop(self, last=True): + if not self: + raise KeyError('set is empty') + key = self.end[1][0] if last else self.end[2][0] + self.discard(key) + return key + + def __repr__(self): + if not self: + return '%s()' % (self.__class__.__name__,) + return '%s(%r)' % (self.__class__.__name__, list(self)) + + def __eq__(self, other): + if isinstance(other, OrderedSet): + return len(self) == len(other) and list(self) == list(other) + return set(self) == set(other) + + def update(self, other): + for item in other: + self.add(item) diff --git a/ptvsd/pydevd/third_party/isort_container/isort/pylama_isort.py b/ptvsd/pydevd/third_party/isort_container/isort/pylama_isort.py new file mode 100644 index 00000000..6fa235f9 --- /dev/null +++ b/ptvsd/pydevd/third_party/isort_container/isort/pylama_isort.py @@ -0,0 +1,29 @@ +import os +import sys + +from pylama.lint import Linter as BaseLinter + +from .isort import SortImports + + +class Linter(BaseLinter): + + def allow(self, path): + """Determine if this path should be linted.""" + return path.endswith('.py') + + def run(self, path, **meta): + """Lint the file. Return an array of error dicts if appropriate.""" + with open(os.devnull, 'w') as devnull: + # Suppress isort messages + sys.stdout = devnull + + if SortImports(path, check=True).incorrectly_sorted: + return [{ + 'lnum': 0, + 'col': 0, + 'text': 'Incorrectly sorted imports.', + 'type': 'ISORT' + }] + else: + return [] diff --git a/ptvsd/pydevd/third_party/isort_container/isort/settings.py b/ptvsd/pydevd/third_party/isort_container/isort/settings.py new file mode 100644 index 00000000..15cdb210 --- /dev/null +++ b/ptvsd/pydevd/third_party/isort_container/isort/settings.py @@ -0,0 +1,256 @@ +"""isort/settings.py. + +Defines how the default settings for isort should be loaded + +(First from the default setting dictionary at the top of the file, then overridden by any settings + in ~/.isort.cfg if there are any) + +Copyright (C) 2013 Timothy Edmund Crosley + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated +documentation files (the "Software"), to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and +to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or +substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED +TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF +CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. + +""" +from __future__ import absolute_import, division, print_function, unicode_literals + +import fnmatch +import os +import posixpath +from collections import namedtuple + +from .pie_slice import itemsview, lru_cache, native_str + +try: + import configparser +except ImportError: + import ConfigParser as configparser + +MAX_CONFIG_SEARCH_DEPTH = 25 # The number of parent directories isort will look for a config file within +DEFAULT_SECTIONS = ('FUTURE', 'STDLIB', 'THIRDPARTY', 'FIRSTPARTY', 'LOCALFOLDER') + +WrapModes = ('GRID', 'VERTICAL', 'HANGING_INDENT', 'VERTICAL_HANGING_INDENT', 'VERTICAL_GRID', 'VERTICAL_GRID_GROUPED', 'NOQA') +WrapModes = namedtuple('WrapModes', WrapModes)(*range(len(WrapModes))) + +# Note that none of these lists must be complete as they are simply fallbacks for when included auto-detection fails. +default = {'force_to_top': [], + 'skip': ['__init__.py', ], + 'skip_glob': [], + 'line_length': 79, + 'wrap_length': 0, + 'sections': DEFAULT_SECTIONS, + 'no_sections': False, + 'known_future_library': ['__future__'], + 'known_standard_library': ['AL', 'BaseHTTPServer', 'Bastion', 'CGIHTTPServer', 'Carbon', 'ColorPicker', + 'ConfigParser', 'Cookie', 'DEVICE', 'DocXMLRPCServer', 'EasyDialogs', 'FL', + 'FrameWork', 'GL', 'HTMLParser', 'MacOS', 'MimeWriter', 'MiniAEFrame', 'Nav', + 'PixMapWrapper', 'Queue', 'SUNAUDIODEV', 'ScrolledText', 'SimpleHTTPServer', + 'SimpleXMLRPCServer', 'SocketServer', 'StringIO', 'Tix', 'Tkinter', 'UserDict', + 'UserList', 'UserString', 'W', '__builtin__', 'abc', 'aepack', 'aetools', + 'aetypes', 'aifc', 'al', 'anydbm', 'applesingle', 'argparse', 'array', 'ast', + 'asynchat', 'asyncio', 'asyncore', 'atexit', 'audioop', 'autoGIL', 'base64', + 'bdb', 'binascii', 'binhex', 'bisect', 'bsddb', 'buildtools', 'builtins', + 'bz2', 'cPickle', 'cProfile', 'cStringIO', 'calendar', 'cd', 'cfmfile', 'cgi', + 'cgitb', 'chunk', 'cmath', 'cmd', 'code', 'codecs', 'codeop', 'collections', + 'colorsys', 'commands', 'compileall', 'compiler', 'concurrent', 'configparser', + 'contextlib', 'cookielib', 'copy', 'copy_reg', 'copyreg', 'crypt', 'csv', + 'ctypes', 'curses', 'datetime', 'dbhash', 'dbm', 'decimal', 'difflib', + 'dircache', 'dis', 'distutils', 'dl', 'doctest', 'dumbdbm', 'dummy_thread', + 'dummy_threading', 'email', 'encodings', 'ensurepip', 'enum', 'errno', + 'exceptions', 'faulthandler', 'fcntl', 'filecmp', 'fileinput', 'findertools', + 'fl', 'flp', 'fm', 'fnmatch', 'formatter', 'fpectl', 'fpformat', 'fractions', + 'ftplib', 'functools', 'future_builtins', 'gc', 'gdbm', 'gensuitemodule', + 'getopt', 'getpass', 'gettext', 'gl', 'glob', 'grp', 'gzip', 'hashlib', + 'heapq', 'hmac', 'hotshot', 'html', 'htmlentitydefs', 'htmllib', 'http', + 'httplib', 'ic', 'icopen', 'imageop', 'imaplib', 'imgfile', 'imghdr', 'imp', + 'importlib', 'imputil', 'inspect', 'io', 'ipaddress', 'itertools', 'jpeg', + 'json', 'keyword', 'lib2to3', 'linecache', 'locale', 'logging', 'lzma', + 'macerrors', 'macostools', 'macpath', 'macresource', 'mailbox', 'mailcap', + 'marshal', 'math', 'md5', 'mhlib', 'mimetools', 'mimetypes', 'mimify', 'mmap', + 'modulefinder', 'msilib', 'msvcrt', 'multifile', 'multiprocessing', 'mutex', + 'netrc', 'new', 'nis', 'nntplib', 'numbers', 'operator', 'optparse', 'os', + 'ossaudiodev', 'parser', 'pathlib', 'pdb', 'pickle', 'pickletools', 'pipes', + 'pkgutil', 'platform', 'plistlib', 'popen2', 'poplib', 'posix', 'posixfile', + 'pprint', 'profile', 'pstats', 'pty', 'pwd', 'py_compile', 'pyclbr', 'pydoc', + 'queue', 'quopri', 'random', 're', 'readline', 'reprlib', 'resource', 'rexec', + 'rfc822', 'rlcompleter', 'robotparser', 'runpy', 'sched', 'secrets', 'select', + 'selectors', 'sets', 'sgmllib', 'sha', 'shelve', 'shlex', 'shutil', 'signal', + 'site', 'sitecustomize', 'smtpd', 'smtplib', 'sndhdr', 'socket', 'socketserver', + 'spwd', 'sqlite3', 'ssl', 'stat', 'statistics', 'statvfs', 'string', 'stringprep', + 'struct', 'subprocess', 'sunau', 'sunaudiodev', 'symbol', 'symtable', 'sys', + 'sysconfig', 'syslog', 'tabnanny', 'tarfile', 'telnetlib', 'tempfile', 'termios', + 'test', 'textwrap', 'this', 'thread', 'threading', 'time', 'timeit', 'tkinter', + 'token', 'tokenize', 'trace', 'traceback', 'tracemalloc', 'ttk', 'tty', 'turtle', + 'turtledemo', 'types', 'typing', 'unicodedata', 'unittest', 'urllib', 'urllib2', + 'urlparse', 'user', 'usercustomize', 'uu', 'uuid', 'venv', 'videoreader', + 'warnings', 'wave', 'weakref', 'webbrowser', 'whichdb', 'winreg', 'winsound', + 'wsgiref', 'xdrlib', 'xml', 'xmlrpc', 'xmlrpclib', 'zipapp', 'zipfile', + 'zipimport', 'zlib'], + 'known_third_party': ['google.appengine.api'], + 'known_first_party': [], + 'multi_line_output': WrapModes.GRID, + 'forced_separate': [], + 'indent': ' ' * 4, + 'length_sort': False, + 'add_imports': [], + 'remove_imports': [], + 'force_single_line': False, + 'default_section': 'FIRSTPARTY', + 'import_heading_future': '', + 'import_heading_stdlib': '', + 'import_heading_thirdparty': '', + 'import_heading_firstparty': '', + 'import_heading_localfolder': '', + 'balanced_wrapping': False, + 'use_parentheses': False, + 'order_by_type': True, + 'atomic': False, + 'lines_after_imports': -1, + 'lines_between_sections': 1, + 'lines_between_types': 0, + 'combine_as_imports': False, + 'combine_star': False, + 'include_trailing_comma': False, + 'from_first': False, + 'verbose': False, + 'quiet': False, + 'force_adds': False, + 'force_alphabetical_sort_within_sections': False, + 'force_alphabetical_sort': False, + 'force_grid_wrap': 0, + 'force_sort_within_sections': False, + 'show_diff': False, + 'ignore_whitespace': False} + + +@lru_cache() +def from_path(path): + computed_settings = default.copy() + _update_settings_with_config(path, '.editorconfig', '~/.editorconfig', ('*', '*.py', '**.py'), computed_settings) + _update_settings_with_config(path, '.isort.cfg', '~/.isort.cfg', ('settings', 'isort'), computed_settings) + _update_settings_with_config(path, 'setup.cfg', None, ('isort', ), computed_settings) + _update_settings_with_config(path, 'tox.ini', None, ('isort', ), computed_settings) + return computed_settings + + +def _update_settings_with_config(path, name, default, sections, computed_settings): + editor_config_file = default and os.path.expanduser(default) + tries = 0 + current_directory = path + while current_directory and tries < MAX_CONFIG_SEARCH_DEPTH: + potential_path = os.path.join(current_directory, native_str(name)) + if os.path.exists(potential_path): + editor_config_file = potential_path + break + + new_directory = os.path.split(current_directory)[0] + if current_directory == new_directory: + break + current_directory = new_directory + tries += 1 + + if editor_config_file and os.path.exists(editor_config_file): + _update_with_config_file(editor_config_file, sections, computed_settings) + + +def _update_with_config_file(file_path, sections, computed_settings): + settings = _get_config_data(file_path, sections).copy() + if not settings: + return + + if file_path.endswith('.editorconfig'): + indent_style = settings.pop('indent_style', '').strip() + indent_size = settings.pop('indent_size', '').strip() + if indent_style == 'space': + computed_settings['indent'] = ' ' * (indent_size and int(indent_size) or 4) + elif indent_style == 'tab': + computed_settings['indent'] = '\t' * (indent_size and int(indent_size) or 1) + + max_line_length = settings.pop('max_line_length', '').strip() + if max_line_length: + computed_settings['line_length'] = float('inf') if max_line_length == 'off' else int(max_line_length) + + for key, value in itemsview(settings): + access_key = key.replace('not_', '').lower() + existing_value_type = type(default.get(access_key, '')) + if existing_value_type in (list, tuple): + # sections has fixed order values; no adding or substraction from any set + if access_key == 'sections': + computed_settings[access_key] = tuple(_as_list(value)) + else: + existing_data = set(computed_settings.get(access_key, default.get(access_key))) + if key.startswith('not_'): + computed_settings[access_key] = list(existing_data.difference(_as_list(value))) + else: + computed_settings[access_key] = list(existing_data.union(_as_list(value))) + elif existing_value_type == bool and value.lower().strip() == 'false': + computed_settings[access_key] = False + elif key.startswith('known_'): + computed_settings[access_key] = list(_as_list(value)) + elif key == 'force_grid_wrap': + try: + result = existing_value_type(value) + except ValueError: + # backwards compat + result = default.get(access_key) if value.lower().strip() == 'false' else 2 + computed_settings[access_key] = result + else: + computed_settings[access_key] = existing_value_type(value) + + +def _as_list(value): + return filter(bool, [item.strip() for item in value.replace('\n', ',').split(',')]) + + +@lru_cache() +def _get_config_data(file_path, sections): + with open(file_path, 'rU') as config_file: + if file_path.endswith('.editorconfig'): + line = '\n' + last_position = config_file.tell() + while line: + line = config_file.readline() + if '[' in line: + config_file.seek(last_position) + break + last_position = config_file.tell() + + config = configparser.SafeConfigParser() + config.readfp(config_file) + settings = dict() + for section in sections: + if config.has_section(section): + settings.update(dict(config.items(section))) + + return settings + + return {} + + +def should_skip(filename, config, path='/'): + """Returns True if the file should be skipped based on the passed in settings.""" + for skip_path in config['skip']: + if posixpath.abspath(posixpath.join(path, filename)) == posixpath.abspath(skip_path.replace('\\', '/')): + return True + + position = os.path.split(filename) + while position[1]: + if position[1] in config['skip']: + return True + position = os.path.split(position[0]) + + for glob in config['skip_glob']: + if fnmatch.fnmatch(filename, glob): + return True + + return False diff --git a/ptvsd/pydevd/third_party/pep8/autopep8.py b/ptvsd/pydevd/third_party/pep8/autopep8.py new file mode 100644 index 00000000..7b66d307 --- /dev/null +++ b/ptvsd/pydevd/third_party/pep8/autopep8.py @@ -0,0 +1,3827 @@ +#!/usr/bin/env python + +# Copyright (C) 2010-2011 Hideo Hattori +# Copyright (C) 2011-2013 Hideo Hattori, Steven Myint +# Copyright (C) 2013-2016 Hideo Hattori, Steven Myint, Bill Wendling +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be +# included in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Automatically formats Python code to conform to the PEP 8 style guide. + +Fixes that only need be done once can be added by adding a function of the form +"fix_(source)" to this module. They should return the fixed source code. +These fixes are picked up by apply_global_fixes(). + +Fixes that depend on pycodestyle should be added as methods to FixPEP8. See the +class documentation for more information. + +""" + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function +from __future__ import unicode_literals + +import codecs +import collections +import copy +import difflib +import fnmatch +import inspect +import io +import keyword +import locale +import os +import re +import signal +import sys +import textwrap +import token +import tokenize + +import pycodestyle + +def check_lib2to3(): + try: + import lib2to3 + except ImportError: + sys.path.append(os.path.join(os.path.dirname(__file__), 'lib2to3')) + import lib2to3 + + + +try: + unicode +except NameError: + unicode = str + + +__version__ = '1.3' + + +CR = '\r' +LF = '\n' +CRLF = '\r\n' + + +PYTHON_SHEBANG_REGEX = re.compile(r'^#!.*\bpython[23]?\b\s*$') +LAMBDA_REGEX = re.compile(r'([\w.]+)\s=\slambda\s*([\(\)\w,\s.]*):') +COMPARE_NEGATIVE_REGEX = re.compile(r'\b(not)\s+([^][)(}{]+)\s+(in|is)\s') +BARE_EXCEPT_REGEX = re.compile(r'except\s*:') +STARTSWITH_DEF_REGEX = re.compile(r'^(async\s+def|def)\s.*\):') + + +# For generating line shortening candidates. +SHORTEN_OPERATOR_GROUPS = frozenset([ + frozenset([',']), + frozenset(['%']), + frozenset([',', '(', '[', '{']), + frozenset(['%', '(', '[', '{']), + frozenset([',', '(', '[', '{', '%', '+', '-', '*', '/', '//']), + frozenset(['%', '+', '-', '*', '/', '//']), +]) + + +DEFAULT_IGNORE = 'E24,W503' +DEFAULT_INDENT_SIZE = 4 + + +# W602 is handled separately due to the need to avoid "with_traceback". +CODE_TO_2TO3 = { + 'E231': ['ws_comma'], + 'E721': ['idioms'], + 'W601': ['has_key'], + 'W603': ['ne'], + 'W604': ['repr'], + 'W690': ['apply', + 'except', + 'exitfunc', + 'numliterals', + 'operator', + 'paren', + 'reduce', + 'renames', + 'standarderror', + 'sys_exc', + 'throw', + 'tuple_params', + 'xreadlines']} + + +if sys.platform == 'win32': # pragma: no cover + DEFAULT_CONFIG = os.path.expanduser(r'~\.pep8') +else: + DEFAULT_CONFIG = os.path.join(os.getenv('XDG_CONFIG_HOME') or + os.path.expanduser('~/.config'), 'pep8') +PROJECT_CONFIG = ('setup.cfg', 'tox.ini', '.pep8') + + +MAX_PYTHON_FILE_DETECTION_BYTES = 1024 + + +def open_with_encoding(filename, + encoding=None, mode='r', limit_byte_check=-1): + """Return opened file with a specific encoding.""" + if not encoding: + encoding = detect_encoding(filename, limit_byte_check=limit_byte_check) + + return io.open(filename, mode=mode, encoding=encoding, + newline='') # Preserve line endings + + +def detect_encoding(filename, limit_byte_check=-1): + """Return file encoding.""" + try: + with open(filename, 'rb') as input_file: + from lib2to3.pgen2 import tokenize as lib2to3_tokenize + encoding = lib2to3_tokenize.detect_encoding(input_file.readline)[0] + + with open_with_encoding(filename, encoding) as test_file: + test_file.read(limit_byte_check) + + return encoding + except (LookupError, SyntaxError, UnicodeDecodeError): + return 'latin-1' + + +def readlines_from_file(filename): + """Return contents of file.""" + with open_with_encoding(filename) as input_file: + return input_file.readlines() + + +def extended_blank_lines(logical_line, + blank_lines, + blank_before, + indent_level, + previous_logical): + """Check for missing blank lines after class declaration.""" + if previous_logical.startswith('def '): + if blank_lines and pycodestyle.DOCSTRING_REGEX.match(logical_line): + yield (0, 'E303 too many blank lines ({0})'.format(blank_lines)) + elif pycodestyle.DOCSTRING_REGEX.match(previous_logical): + # Missing blank line between class docstring and method declaration. + if ( + indent_level and + not blank_lines and + not blank_before and + logical_line.startswith(('def ')) and + '(self' in logical_line + ): + yield (0, 'E301 expected 1 blank line, found 0') + + +pycodestyle.register_check(extended_blank_lines) + + +def continued_indentation(logical_line, tokens, indent_level, indent_char, + noqa): + """Override pycodestyle's function to provide indentation information.""" + first_row = tokens[0][2][0] + nrows = 1 + tokens[-1][2][0] - first_row + if noqa or nrows == 1: + return + + # indent_next tells us whether the next block is indented. Assuming + # that it is indented by 4 spaces, then we should not allow 4-space + # indents on the final continuation line. In turn, some other + # indents are allowed to have an extra 4 spaces. + indent_next = logical_line.endswith(':') + + row = depth = 0 + valid_hangs = ( + (DEFAULT_INDENT_SIZE,) + if indent_char != '\t' else (DEFAULT_INDENT_SIZE, + 2 * DEFAULT_INDENT_SIZE) + ) + + # Remember how many brackets were opened on each line. + parens = [0] * nrows + + # Relative indents of physical lines. + rel_indent = [0] * nrows + + # For each depth, collect a list of opening rows. + open_rows = [[0]] + # For each depth, memorize the hanging indentation. + hangs = [None] + + # Visual indents. + indent_chances = {} + last_indent = tokens[0][2] + indent = [last_indent[1]] + + last_token_multiline = None + line = None + last_line = '' + last_line_begins_with_multiline = False + for token_type, text, start, end, line in tokens: + + newline = row < start[0] - first_row + if newline: + row = start[0] - first_row + newline = (not last_token_multiline and + token_type not in (tokenize.NL, tokenize.NEWLINE)) + last_line_begins_with_multiline = last_token_multiline + + if newline: + # This is the beginning of a continuation line. + last_indent = start + + # Record the initial indent. + rel_indent[row] = pycodestyle.expand_indent(line) - indent_level + + # Identify closing bracket. + close_bracket = (token_type == tokenize.OP and text in ']})') + + # Is the indent relative to an opening bracket line? + for open_row in reversed(open_rows[depth]): + hang = rel_indent[row] - rel_indent[open_row] + hanging_indent = hang in valid_hangs + if hanging_indent: + break + if hangs[depth]: + hanging_indent = (hang == hangs[depth]) + + visual_indent = (not close_bracket and hang > 0 and + indent_chances.get(start[1])) + + if close_bracket and indent[depth]: + # Closing bracket for visual indent. + if start[1] != indent[depth]: + yield (start, 'E124 {0}'.format(indent[depth])) + elif close_bracket and not hang: + pass + elif indent[depth] and start[1] < indent[depth]: + # Visual indent is broken. + yield (start, 'E128 {0}'.format(indent[depth])) + elif (hanging_indent or + (indent_next and + rel_indent[row] == 2 * DEFAULT_INDENT_SIZE)): + # Hanging indent is verified. + if close_bracket: + yield (start, 'E123 {0}'.format(indent_level + + rel_indent[open_row])) + hangs[depth] = hang + elif visual_indent is True: + # Visual indent is verified. + indent[depth] = start[1] + elif visual_indent in (text, unicode): + # Ignore token lined up with matching one from a previous line. + pass + else: + one_indented = (indent_level + rel_indent[open_row] + + DEFAULT_INDENT_SIZE) + # Indent is broken. + if hang <= 0: + error = ('E122', one_indented) + elif indent[depth]: + error = ('E127', indent[depth]) + elif not close_bracket and hangs[depth]: + error = ('E131', one_indented) + elif hang > DEFAULT_INDENT_SIZE: + error = ('E126', one_indented) + else: + hangs[depth] = hang + error = ('E121', one_indented) + + yield (start, '{0} {1}'.format(*error)) + + # Look for visual indenting. + if ( + parens[row] and + token_type not in (tokenize.NL, tokenize.COMMENT) and + not indent[depth] + ): + indent[depth] = start[1] + indent_chances[start[1]] = True + # Deal with implicit string concatenation. + elif (token_type in (tokenize.STRING, tokenize.COMMENT) or + text in ('u', 'ur', 'b', 'br')): + indent_chances[start[1]] = unicode + # Special case for the "if" statement because len("if (") is equal to + # 4. + elif not indent_chances and not row and not depth and text == 'if': + indent_chances[end[1] + 1] = True + elif text == ':' and line[end[1]:].isspace(): + open_rows[depth].append(row) + + # Keep track of bracket depth. + if token_type == tokenize.OP: + if text in '([{': + depth += 1 + indent.append(0) + hangs.append(None) + if len(open_rows) == depth: + open_rows.append([]) + open_rows[depth].append(row) + parens[row] += 1 + elif text in ')]}' and depth > 0: + # Parent indents should not be more than this one. + prev_indent = indent.pop() or last_indent[1] + hangs.pop() + for d in range(depth): + if indent[d] > prev_indent: + indent[d] = 0 + for ind in list(indent_chances): + if ind >= prev_indent: + del indent_chances[ind] + del open_rows[depth + 1:] + depth -= 1 + if depth: + indent_chances[indent[depth]] = True + for idx in range(row, -1, -1): + if parens[idx]: + parens[idx] -= 1 + break + assert len(indent) == depth + 1 + if ( + start[1] not in indent_chances and + # This is for purposes of speeding up E121 (GitHub #90). + not last_line.rstrip().endswith(',') + ): + # Allow to line up tokens. + indent_chances[start[1]] = text + + last_token_multiline = (start[0] != end[0]) + if last_token_multiline: + rel_indent[end[0] - first_row] = rel_indent[row] + + last_line = line + + if ( + indent_next and + not last_line_begins_with_multiline and + pycodestyle.expand_indent(line) == indent_level + DEFAULT_INDENT_SIZE + ): + pos = (start[0], indent[0] + 4) + desired_indent = indent_level + 2 * DEFAULT_INDENT_SIZE + if visual_indent: + yield (pos, 'E129 {0}'.format(desired_indent)) + else: + yield (pos, 'E125 {0}'.format(desired_indent)) + + +del pycodestyle._checks['logical_line'][pycodestyle.continued_indentation] +pycodestyle.register_check(continued_indentation) + + +class FixPEP8(object): + + """Fix invalid code. + + Fixer methods are prefixed "fix_". The _fix_source() method looks for these + automatically. + + The fixer method can take either one or two arguments (in addition to + self). The first argument is "result", which is the error information from + pycodestyle. The second argument, "logical", is required only for + logical-line fixes. + + The fixer method can return the list of modified lines or None. An empty + list would mean that no changes were made. None would mean that only the + line reported in the pycodestyle error was modified. Note that the modified + line numbers that are returned are indexed at 1. This typically would + correspond with the line number reported in the pycodestyle error + information. + + [fixed method list] + - e111,e114,e115,e116 + - e121,e122,e123,e124,e125,e126,e127,e128,e129 + - e201,e202,e203 + - e211 + - e221,e222,e223,e224,e225 + - e231 + - e251 + - e261,e262 + - e271,e272,e273,e274 + - e301,e302,e303,e304,e306 + - e401 + - e502 + - e701,e702,e703,e704 + - e711,e712,e713,e714 + - e722 + - e731 + - w291 + - w503 + + """ + + def __init__(self, filename, + options, + contents=None, + long_line_ignore_cache=None): + self.filename = filename + if contents is None: + self.source = readlines_from_file(filename) + else: + sio = io.StringIO(contents) + self.source = sio.readlines() + self.options = options + self.indent_word = _get_indentword(''.join(self.source)) + + self.long_line_ignore_cache = ( + set() if long_line_ignore_cache is None + else long_line_ignore_cache) + + # Many fixers are the same even though pycodestyle categorizes them + # differently. + self.fix_e115 = self.fix_e112 + self.fix_e116 = self.fix_e113 + self.fix_e121 = self._fix_reindent + self.fix_e122 = self._fix_reindent + self.fix_e123 = self._fix_reindent + self.fix_e124 = self._fix_reindent + self.fix_e126 = self._fix_reindent + self.fix_e127 = self._fix_reindent + self.fix_e128 = self._fix_reindent + self.fix_e129 = self._fix_reindent + self.fix_e202 = self.fix_e201 + self.fix_e203 = self.fix_e201 + self.fix_e211 = self.fix_e201 + self.fix_e221 = self.fix_e271 + self.fix_e222 = self.fix_e271 + self.fix_e223 = self.fix_e271 + self.fix_e226 = self.fix_e225 + self.fix_e227 = self.fix_e225 + self.fix_e228 = self.fix_e225 + self.fix_e241 = self.fix_e271 + self.fix_e242 = self.fix_e224 + self.fix_e261 = self.fix_e262 + self.fix_e272 = self.fix_e271 + self.fix_e273 = self.fix_e271 + self.fix_e274 = self.fix_e271 + self.fix_e306 = self.fix_e301 + self.fix_e501 = ( + self.fix_long_line_logically if + options and (options.aggressive >= 2 or options.experimental) else + self.fix_long_line_physically) + self.fix_e703 = self.fix_e702 + self.fix_w293 = self.fix_w291 + + def _fix_source(self, results): + try: + (logical_start, logical_end) = _find_logical(self.source) + logical_support = True + except (SyntaxError, tokenize.TokenError): # pragma: no cover + logical_support = False + + completed_lines = set() + for result in sorted(results, key=_priority_key): + if result['line'] in completed_lines: + continue + + fixed_methodname = 'fix_' + result['id'].lower() + if hasattr(self, fixed_methodname): + fix = getattr(self, fixed_methodname) + + line_index = result['line'] - 1 + original_line = self.source[line_index] + + is_logical_fix = len(_get_parameters(fix)) > 2 + if is_logical_fix: + logical = None + if logical_support: + logical = _get_logical(self.source, + result, + logical_start, + logical_end) + if logical and set(range( + logical[0][0] + 1, + logical[1][0] + 1)).intersection( + completed_lines): + continue + + modified_lines = fix(result, logical) + else: + modified_lines = fix(result) + + if modified_lines is None: + # Force logical fixes to report what they modified. + assert not is_logical_fix + + if self.source[line_index] == original_line: + modified_lines = [] + + if modified_lines: + completed_lines.update(modified_lines) + elif modified_lines == []: # Empty list means no fix + if self.options.verbose >= 2: + print( + '---> Not fixing {error} on line {line}'.format( + error=result['id'], line=result['line']), + file=sys.stderr) + else: # We assume one-line fix when None. + completed_lines.add(result['line']) + else: + if self.options.verbose >= 3: + print( + "---> '{0}' is not defined.".format(fixed_methodname), + file=sys.stderr) + + info = result['info'].strip() + print('---> {0}:{1}:{2}:{3}'.format(self.filename, + result['line'], + result['column'], + info), + file=sys.stderr) + + def fix(self): + """Return a version of the source code with PEP 8 violations fixed.""" + pep8_options = { + 'ignore': self.options.ignore, + 'select': self.options.select, + 'max_line_length': self.options.max_line_length, + } + results = _execute_pep8(pep8_options, self.source) + + if self.options.verbose: + progress = {} + for r in results: + if r['id'] not in progress: + progress[r['id']] = set() + progress[r['id']].add(r['line']) + print('---> {n} issue(s) to fix {progress}'.format( + n=len(results), progress=progress), file=sys.stderr) + + if self.options.line_range: + start, end = self.options.line_range + results = [r for r in results + if start <= r['line'] <= end] + + self._fix_source(filter_results(source=''.join(self.source), + results=results, + aggressive=self.options.aggressive)) + + if self.options.line_range: + # If number of lines has changed then change line_range. + count = sum(sline.count('\n') + for sline in self.source[start - 1:end]) + self.options.line_range[1] = start + count - 1 + + return ''.join(self.source) + + def _fix_reindent(self, result): + """Fix a badly indented line. + + This is done by adding or removing from its initial indent only. + + """ + num_indent_spaces = int(result['info'].split()[1]) + line_index = result['line'] - 1 + target = self.source[line_index] + + self.source[line_index] = ' ' * num_indent_spaces + target.lstrip() + + def fix_e112(self, result): + """Fix under-indented comments.""" + line_index = result['line'] - 1 + target = self.source[line_index] + + if not target.lstrip().startswith('#'): + # Don't screw with invalid syntax. + return [] + + self.source[line_index] = self.indent_word + target + + def fix_e113(self, result): + """Fix over-indented comments.""" + line_index = result['line'] - 1 + target = self.source[line_index] + + indent = _get_indentation(target) + stripped = target.lstrip() + + if not stripped.startswith('#'): + # Don't screw with invalid syntax. + return [] + + self.source[line_index] = indent[1:] + stripped + + def fix_e125(self, result): + """Fix indentation undistinguish from the next logical line.""" + num_indent_spaces = int(result['info'].split()[1]) + line_index = result['line'] - 1 + target = self.source[line_index] + + spaces_to_add = num_indent_spaces - len(_get_indentation(target)) + indent = len(_get_indentation(target)) + modified_lines = [] + + while len(_get_indentation(self.source[line_index])) >= indent: + self.source[line_index] = (' ' * spaces_to_add + + self.source[line_index]) + modified_lines.append(1 + line_index) # Line indexed at 1. + line_index -= 1 + + return modified_lines + + def fix_e131(self, result): + """Fix indentation undistinguish from the next logical line.""" + num_indent_spaces = int(result['info'].split()[1]) + line_index = result['line'] - 1 + target = self.source[line_index] + + spaces_to_add = num_indent_spaces - len(_get_indentation(target)) + + if spaces_to_add >= 0: + self.source[line_index] = (' ' * spaces_to_add + + self.source[line_index]) + else: + offset = abs(spaces_to_add) + self.source[line_index] = self.source[line_index][offset:] + + def fix_e201(self, result): + """Remove extraneous whitespace.""" + line_index = result['line'] - 1 + target = self.source[line_index] + offset = result['column'] - 1 + + fixed = fix_whitespace(target, + offset=offset, + replacement='') + + self.source[line_index] = fixed + + def fix_e224(self, result): + """Remove extraneous whitespace around operator.""" + target = self.source[result['line'] - 1] + offset = result['column'] - 1 + fixed = target[:offset] + target[offset:].replace('\t', ' ') + self.source[result['line'] - 1] = fixed + + def fix_e225(self, result): + """Fix missing whitespace around operator.""" + target = self.source[result['line'] - 1] + offset = result['column'] - 1 + fixed = target[:offset] + ' ' + target[offset:] + + # Only proceed if non-whitespace characters match. + # And make sure we don't break the indentation. + if ( + fixed.replace(' ', '') == target.replace(' ', '') and + _get_indentation(fixed) == _get_indentation(target) + ): + self.source[result['line'] - 1] = fixed + else: + return [] + + def fix_e231(self, result): + """Add missing whitespace.""" + line_index = result['line'] - 1 + target = self.source[line_index] + offset = result['column'] + fixed = target[:offset].rstrip() + ' ' + target[offset:].lstrip() + self.source[line_index] = fixed + + def fix_e251(self, result): + """Remove whitespace around parameter '=' sign.""" + line_index = result['line'] - 1 + target = self.source[line_index] + + # This is necessary since pycodestyle sometimes reports columns that + # goes past the end of the physical line. This happens in cases like, + # foo(bar\n=None) + c = min(result['column'] - 1, + len(target) - 1) + + if target[c].strip(): + fixed = target + else: + fixed = target[:c].rstrip() + target[c:].lstrip() + + # There could be an escaped newline + # + # def foo(a=\ + # 1) + if fixed.endswith(('=\\\n', '=\\\r\n', '=\\\r')): + self.source[line_index] = fixed.rstrip('\n\r \t\\') + self.source[line_index + 1] = self.source[line_index + 1].lstrip() + return [line_index + 1, line_index + 2] # Line indexed at 1 + + self.source[result['line'] - 1] = fixed + + def fix_e262(self, result): + """Fix spacing after comment hash.""" + target = self.source[result['line'] - 1] + offset = result['column'] + + code = target[:offset].rstrip(' \t#') + comment = target[offset:].lstrip(' \t#') + + fixed = code + (' # ' + comment if comment.strip() else '\n') + + self.source[result['line'] - 1] = fixed + + def fix_e271(self, result): + """Fix extraneous whitespace around keywords.""" + line_index = result['line'] - 1 + target = self.source[line_index] + offset = result['column'] - 1 + + fixed = fix_whitespace(target, + offset=offset, + replacement=' ') + + if fixed == target: + return [] + else: + self.source[line_index] = fixed + + def fix_e301(self, result): + """Add missing blank line.""" + cr = '\n' + self.source[result['line'] - 1] = cr + self.source[result['line'] - 1] + + def fix_e302(self, result): + """Add missing 2 blank lines.""" + add_linenum = 2 - int(result['info'].split()[-1]) + cr = '\n' * add_linenum + self.source[result['line'] - 1] = cr + self.source[result['line'] - 1] + + def fix_e303(self, result): + """Remove extra blank lines.""" + delete_linenum = int(result['info'].split('(')[1].split(')')[0]) - 2 + delete_linenum = max(1, delete_linenum) + + # We need to count because pycodestyle reports an offset line number if + # there are comments. + cnt = 0 + line = result['line'] - 2 + modified_lines = [] + while cnt < delete_linenum and line >= 0: + if not self.source[line].strip(): + self.source[line] = '' + modified_lines.append(1 + line) # Line indexed at 1 + cnt += 1 + line -= 1 + + return modified_lines + + def fix_e304(self, result): + """Remove blank line following function decorator.""" + line = result['line'] - 2 + if not self.source[line].strip(): + self.source[line] = '' + + def fix_e305(self, result): + """Add missing 2 blank lines after end of function or class.""" + cr = '\n' + # check comment line + offset = result['line'] - 2 + while True: + if offset < 0: + break + line = self.source[offset].lstrip() + if len(line) == 0: + break + if line[0] != '#': + break + offset -= 1 + offset += 1 + self.source[offset] = cr + self.source[offset] + + def fix_e401(self, result): + """Put imports on separate lines.""" + line_index = result['line'] - 1 + target = self.source[line_index] + offset = result['column'] - 1 + + if not target.lstrip().startswith('import'): + return [] + + indentation = re.split(pattern=r'\bimport\b', + string=target, maxsplit=1)[0] + fixed = (target[:offset].rstrip('\t ,') + '\n' + + indentation + 'import ' + target[offset:].lstrip('\t ,')) + self.source[line_index] = fixed + + def fix_long_line_logically(self, result, logical): + """Try to make lines fit within --max-line-length characters.""" + if ( + not logical or + len(logical[2]) == 1 or + self.source[result['line'] - 1].lstrip().startswith('#') + ): + return self.fix_long_line_physically(result) + + start_line_index = logical[0][0] + end_line_index = logical[1][0] + logical_lines = logical[2] + + previous_line = get_item(self.source, start_line_index - 1, default='') + next_line = get_item(self.source, end_line_index + 1, default='') + + single_line = join_logical_line(''.join(logical_lines)) + + try: + fixed = self.fix_long_line( + target=single_line, + previous_line=previous_line, + next_line=next_line, + original=''.join(logical_lines)) + except (SyntaxError, tokenize.TokenError): + return self.fix_long_line_physically(result) + + if fixed: + for line_index in range(start_line_index, end_line_index + 1): + self.source[line_index] = '' + self.source[start_line_index] = fixed + return range(start_line_index + 1, end_line_index + 1) + else: + return [] + + def fix_long_line_physically(self, result): + """Try to make lines fit within --max-line-length characters.""" + line_index = result['line'] - 1 + target = self.source[line_index] + + previous_line = get_item(self.source, line_index - 1, default='') + next_line = get_item(self.source, line_index + 1, default='') + + try: + fixed = self.fix_long_line( + target=target, + previous_line=previous_line, + next_line=next_line, + original=target) + except (SyntaxError, tokenize.TokenError): + return [] + + if fixed: + self.source[line_index] = fixed + return [line_index + 1] + else: + return [] + + def fix_long_line(self, target, previous_line, + next_line, original): + cache_entry = (target, previous_line, next_line) + if cache_entry in self.long_line_ignore_cache: + return [] + + if target.lstrip().startswith('#'): + # Wrap commented lines. + return shorten_comment( + line=target, + max_line_length=self.options.max_line_length, + last_comment=not next_line.lstrip().startswith('#')) + + fixed = get_fixed_long_line( + target=target, + previous_line=previous_line, + original=original, + indent_word=self.indent_word, + max_line_length=self.options.max_line_length, + aggressive=self.options.aggressive, + experimental=self.options.experimental, + verbose=self.options.verbose) + if fixed and not code_almost_equal(original, fixed): + return fixed + else: + self.long_line_ignore_cache.add(cache_entry) + return None + + def fix_e502(self, result): + """Remove extraneous escape of newline.""" + (line_index, _, target) = get_index_offset_contents(result, + self.source) + self.source[line_index] = target.rstrip('\n\r \t\\') + '\n' + + def fix_e701(self, result): + """Put colon-separated compound statement on separate lines.""" + line_index = result['line'] - 1 + target = self.source[line_index] + c = result['column'] + + fixed_source = (target[:c] + '\n' + + _get_indentation(target) + self.indent_word + + target[c:].lstrip('\n\r \t\\')) + self.source[result['line'] - 1] = fixed_source + return [result['line'], result['line'] + 1] + + def fix_e702(self, result, logical): + """Put semicolon-separated compound statement on separate lines.""" + if not logical: + return [] # pragma: no cover + logical_lines = logical[2] + + line_index = result['line'] - 1 + target = self.source[line_index] + + if target.rstrip().endswith('\\'): + # Normalize '1; \\\n2' into '1; 2'. + self.source[line_index] = target.rstrip('\n \r\t\\') + self.source[line_index + 1] = self.source[line_index + 1].lstrip() + return [line_index + 1, line_index + 2] + + if target.rstrip().endswith(';'): + self.source[line_index] = target.rstrip('\n \r\t;') + '\n' + return [line_index + 1] + + offset = result['column'] - 1 + first = target[:offset].rstrip(';').rstrip() + second = (_get_indentation(logical_lines[0]) + + target[offset:].lstrip(';').lstrip()) + + # Find inline comment. + inline_comment = None + if target[offset:].lstrip(';').lstrip()[:2] == '# ': + inline_comment = target[offset:].lstrip(';') + + if inline_comment: + self.source[line_index] = first + inline_comment + else: + self.source[line_index] = first + '\n' + second + return [line_index + 1] + + def fix_e704(self, result): + """Fix multiple statements on one line def""" + (line_index, _, target) = get_index_offset_contents(result, + self.source) + match = STARTSWITH_DEF_REGEX.match(target) + if match: + self.source[line_index] = '{0}\n{1}{2}'.format( + match.group(0), + _get_indentation(target) + self.indent_word, + target[match.end(0):].lstrip()) + + def fix_e711(self, result): + """Fix comparison with None.""" + (line_index, offset, target) = get_index_offset_contents(result, + self.source) + + right_offset = offset + 2 + if right_offset >= len(target): + return [] + + left = target[:offset].rstrip() + center = target[offset:right_offset] + right = target[right_offset:].lstrip() + + if not right.startswith('None'): + return [] + + if center.strip() == '==': + new_center = 'is' + elif center.strip() == '!=': + new_center = 'is not' + else: + return [] + + self.source[line_index] = ' '.join([left, new_center, right]) + + def fix_e712(self, result): + """Fix (trivial case of) comparison with boolean.""" + (line_index, offset, target) = get_index_offset_contents(result, + self.source) + + # Handle very easy "not" special cases. + if re.match(r'^\s*if [\w.]+ == False:$', target): + self.source[line_index] = re.sub(r'if ([\w.]+) == False:', + r'if not \1:', target, count=1) + elif re.match(r'^\s*if [\w.]+ != True:$', target): + self.source[line_index] = re.sub(r'if ([\w.]+) != True:', + r'if not \1:', target, count=1) + else: + right_offset = offset + 2 + if right_offset >= len(target): + return [] + + left = target[:offset].rstrip() + center = target[offset:right_offset] + right = target[right_offset:].lstrip() + + # Handle simple cases only. + new_right = None + if center.strip() == '==': + if re.match(r'\bTrue\b', right): + new_right = re.sub(r'\bTrue\b *', '', right, count=1) + elif center.strip() == '!=': + if re.match(r'\bFalse\b', right): + new_right = re.sub(r'\bFalse\b *', '', right, count=1) + + if new_right is None: + return [] + + if new_right[0].isalnum(): + new_right = ' ' + new_right + + self.source[line_index] = left + new_right + + def fix_e713(self, result): + """Fix (trivial case of) non-membership check.""" + (line_index, _, target) = get_index_offset_contents(result, + self.source) + + match = COMPARE_NEGATIVE_REGEX.search(target) + if match: + if match.group(3) == 'in': + pos_start = match.start(1) + self.source[line_index] = '{0}{1} {2} {3} {4}'.format( + target[:pos_start], match.group(2), match.group(1), + match.group(3), target[match.end():]) + + def fix_e714(self, result): + """Fix object identity should be 'is not' case.""" + (line_index, _, target) = get_index_offset_contents(result, + self.source) + + match = COMPARE_NEGATIVE_REGEX.search(target) + if match: + if match.group(3) == 'is': + pos_start = match.start(1) + self.source[line_index] = '{0}{1} {2} {3} {4}'.format( + target[:pos_start], match.group(2), match.group(3), + match.group(1), target[match.end():]) + + def fix_e722(self, result): + """fix bare except""" + (line_index, _, target) = get_index_offset_contents(result, + self.source) + if BARE_EXCEPT_REGEX.search(target): + self.source[line_index] = '{0}{1}'.format( + target[:result['column'] - 1], "except Exception:") + + def fix_e731(self, result): + """Fix do not assign a lambda expression check.""" + (line_index, _, target) = get_index_offset_contents(result, + self.source) + match = LAMBDA_REGEX.search(target) + if match: + end = match.end() + self.source[line_index] = '{0}def {1}({2}): return {3}'.format( + target[:match.start(0)], match.group(1), match.group(2), + target[end:].lstrip()) + + def fix_w291(self, result): + """Remove trailing whitespace.""" + fixed_line = self.source[result['line'] - 1].rstrip() + self.source[result['line'] - 1] = fixed_line + '\n' + + def fix_w391(self, _): + """Remove trailing blank lines.""" + blank_count = 0 + for line in reversed(self.source): + line = line.rstrip() + if line: + break + else: + blank_count += 1 + + original_length = len(self.source) + self.source = self.source[:original_length - blank_count] + return range(1, 1 + original_length) + + def fix_w503(self, result): + (line_index, _, target) = get_index_offset_contents(result, + self.source) + one_string_token = target.split()[0] + try: + ts = generate_tokens(one_string_token) + except tokenize.TokenError: + return + if not _is_binary_operator(ts[0][0], one_string_token): + return + i = target.index(one_string_token) + self.source[line_index] = '{0}{1}'.format( + target[:i], target[i + len(one_string_token):]) + nl = find_newline(self.source[line_index - 1:line_index]) + before_line = self.source[line_index - 1] + bl = before_line.index(nl) + self.source[line_index - 1] = '{0} {1}{2}'.format( + before_line[:bl], one_string_token, + before_line[bl:]) + + +def get_index_offset_contents(result, source): + """Return (line_index, column_offset, line_contents).""" + line_index = result['line'] - 1 + return (line_index, + result['column'] - 1, + source[line_index]) + + +def get_fixed_long_line(target, previous_line, original, + indent_word=' ', max_line_length=79, + aggressive=False, experimental=False, verbose=False): + """Break up long line and return result. + + Do this by generating multiple reformatted candidates and then + ranking the candidates to heuristically select the best option. + + """ + indent = _get_indentation(target) + source = target[len(indent):] + assert source.lstrip() == source + + # Check for partial multiline. + tokens = list(generate_tokens(source)) + + candidates = shorten_line( + tokens, source, indent, + indent_word, + max_line_length, + aggressive=aggressive, + experimental=experimental, + previous_line=previous_line) + + # Also sort alphabetically as a tie breaker (for determinism). + candidates = sorted( + sorted(set(candidates).union([target, original])), + key=lambda x: line_shortening_rank( + x, + indent_word, + max_line_length, + experimental=experimental)) + + if verbose >= 4: + print(('-' * 79 + '\n').join([''] + candidates + ['']), + file=wrap_output(sys.stderr, 'utf-8')) + + if candidates: + best_candidate = candidates[0] + # Don't allow things to get longer. + if longest_line_length(best_candidate) > longest_line_length(original): + return None + else: + return best_candidate + + +def longest_line_length(code): + """Return length of longest line.""" + return max(len(line) for line in code.splitlines()) + + +def join_logical_line(logical_line): + """Return single line based on logical line input.""" + indentation = _get_indentation(logical_line) + + return indentation + untokenize_without_newlines( + generate_tokens(logical_line.lstrip())) + '\n' + + +def untokenize_without_newlines(tokens): + """Return source code based on tokens.""" + text = '' + last_row = 0 + last_column = -1 + + for t in tokens: + token_string = t[1] + (start_row, start_column) = t[2] + (end_row, end_column) = t[3] + + if start_row > last_row: + last_column = 0 + if ( + (start_column > last_column or token_string == '\n') and + not text.endswith(' ') + ): + text += ' ' + + if token_string != '\n': + text += token_string + + last_row = end_row + last_column = end_column + + return text.rstrip() + + +def _find_logical(source_lines): + # Make a variable which is the index of all the starts of lines. + logical_start = [] + logical_end = [] + last_newline = True + parens = 0 + for t in generate_tokens(''.join(source_lines)): + if t[0] in [tokenize.COMMENT, tokenize.DEDENT, + tokenize.INDENT, tokenize.NL, + tokenize.ENDMARKER]: + continue + if not parens and t[0] in [tokenize.NEWLINE, tokenize.SEMI]: + last_newline = True + logical_end.append((t[3][0] - 1, t[2][1])) + continue + if last_newline and not parens: + logical_start.append((t[2][0] - 1, t[2][1])) + last_newline = False + if t[0] == tokenize.OP: + if t[1] in '([{': + parens += 1 + elif t[1] in '}])': + parens -= 1 + return (logical_start, logical_end) + + +def _get_logical(source_lines, result, logical_start, logical_end): + """Return the logical line corresponding to the result. + + Assumes input is already E702-clean. + + """ + row = result['line'] - 1 + col = result['column'] - 1 + ls = None + le = None + for i in range(0, len(logical_start), 1): + assert logical_end + x = logical_end[i] + if x[0] > row or (x[0] == row and x[1] > col): + le = x + ls = logical_start[i] + break + if ls is None: + return None + original = source_lines[ls[0]:le[0] + 1] + return ls, le, original + + +def get_item(items, index, default=None): + if 0 <= index < len(items): + return items[index] + else: + return default + + +def reindent(source, indent_size): + """Reindent all lines.""" + reindenter = Reindenter(source) + return reindenter.run(indent_size) + + +def code_almost_equal(a, b): + """Return True if code is similar. + + Ignore whitespace when comparing specific line. + + """ + split_a = split_and_strip_non_empty_lines(a) + split_b = split_and_strip_non_empty_lines(b) + + if len(split_a) != len(split_b): + return False + + for (index, _) in enumerate(split_a): + if ''.join(split_a[index].split()) != ''.join(split_b[index].split()): + return False + + return True + + +def split_and_strip_non_empty_lines(text): + """Return lines split by newline. + + Ignore empty lines. + + """ + return [line.strip() for line in text.splitlines() if line.strip()] + + +def fix_e265(source, aggressive=False): # pylint: disable=unused-argument + """Format block comments.""" + if '#' not in source: + # Optimization. + return source + + ignored_line_numbers = multiline_string_lines( + source, + include_docstrings=True) | set(commented_out_code_lines(source)) + + fixed_lines = [] + sio = io.StringIO(source) + for (line_number, line) in enumerate(sio.readlines(), start=1): + if ( + line.lstrip().startswith('#') and + line_number not in ignored_line_numbers and + not pycodestyle.noqa(line) + ): + indentation = _get_indentation(line) + line = line.lstrip() + + # Normalize beginning if not a shebang. + if len(line) > 1: + pos = next((index for index, c in enumerate(line) + if c != '#')) + if ( + # Leave multiple spaces like '# ' alone. + (line[:pos].count('#') > 1 or line[1].isalnum()) and + # Leave stylistic outlined blocks alone. + not line.rstrip().endswith('#') + ): + line = '# ' + line.lstrip('# \t') + + fixed_lines.append(indentation + line) + else: + fixed_lines.append(line) + + return ''.join(fixed_lines) + + +def refactor(source, fixer_names, ignore=None, filename=''): + """Return refactored code using lib2to3. + + Skip if ignore string is produced in the refactored code. + + """ + check_lib2to3() + from lib2to3 import pgen2 + try: + new_text = refactor_with_2to3(source, + fixer_names=fixer_names, + filename=filename) + except (pgen2.parse.ParseError, + SyntaxError, + UnicodeDecodeError, + UnicodeEncodeError): + return source + + if ignore: + if ignore in new_text and ignore not in source: + return source + + return new_text + + +def code_to_2to3(select, ignore): + fixes = set() + for code, fix in CODE_TO_2TO3.items(): + if code_match(code, select=select, ignore=ignore): + fixes |= set(fix) + return fixes + + +def fix_2to3(source, + aggressive=True, select=None, ignore=None, filename=''): + """Fix various deprecated code (via lib2to3).""" + if not aggressive: + return source + + select = select or [] + ignore = ignore or [] + + return refactor(source, + code_to_2to3(select=select, + ignore=ignore), + filename=filename) + + +def fix_w602(source, aggressive=True): + """Fix deprecated form of raising exception.""" + if not aggressive: + return source + + return refactor(source, ['raise'], + ignore='with_traceback') + + +def find_newline(source): + """Return type of newline used in source. + + Input is a list of lines. + + """ + assert not isinstance(source, unicode) + + counter = collections.defaultdict(int) + for line in source: + if line.endswith(CRLF): + counter[CRLF] += 1 + elif line.endswith(CR): + counter[CR] += 1 + elif line.endswith(LF): + counter[LF] += 1 + + return (sorted(counter, key=counter.get, reverse=True) or [LF])[0] + + +def _get_indentword(source): + """Return indentation type.""" + indent_word = ' ' # Default in case source has no indentation + try: + for t in generate_tokens(source): + if t[0] == token.INDENT: + indent_word = t[1] + break + except (SyntaxError, tokenize.TokenError): + pass + return indent_word + + +def _get_indentation(line): + """Return leading whitespace.""" + if line.strip(): + non_whitespace_index = len(line) - len(line.lstrip()) + return line[:non_whitespace_index] + else: + return '' + + +def get_diff_text(old, new, filename): + """Return text of unified diff between old and new.""" + newline = '\n' + diff = difflib.unified_diff( + old, new, + 'original/' + filename, + 'fixed/' + filename, + lineterm=newline) + + text = '' + for line in diff: + text += line + + # Work around missing newline (http://bugs.python.org/issue2142). + if text and not line.endswith(newline): + text += newline + r'\ No newline at end of file' + newline + + return text + + +def _priority_key(pep8_result): + """Key for sorting PEP8 results. + + Global fixes should be done first. This is important for things like + indentation. + + """ + priority = [ + # Fix multiline colon-based before semicolon based. + 'e701', + # Break multiline statements early. + 'e702', + # Things that make lines longer. + 'e225', 'e231', + # Remove extraneous whitespace before breaking lines. + 'e201', + # Shorten whitespace in comment before resorting to wrapping. + 'e262' + ] + middle_index = 10000 + lowest_priority = [ + # We need to shorten lines last since the logical fixer can get in a + # loop, which causes us to exit early. + 'e501' + ] + key = pep8_result['id'].lower() + try: + return priority.index(key) + except ValueError: + try: + return middle_index + lowest_priority.index(key) + 1 + except ValueError: + return middle_index + + +def shorten_line(tokens, source, indentation, indent_word, max_line_length, + aggressive=False, experimental=False, previous_line=''): + """Separate line at OPERATOR. + + Multiple candidates will be yielded. + + """ + for candidate in _shorten_line(tokens=tokens, + source=source, + indentation=indentation, + indent_word=indent_word, + aggressive=aggressive, + previous_line=previous_line): + yield candidate + + if aggressive: + for key_token_strings in SHORTEN_OPERATOR_GROUPS: + shortened = _shorten_line_at_tokens( + tokens=tokens, + source=source, + indentation=indentation, + indent_word=indent_word, + key_token_strings=key_token_strings, + aggressive=aggressive) + + if shortened is not None and shortened != source: + yield shortened + + if experimental: + for shortened in _shorten_line_at_tokens_new( + tokens=tokens, + source=source, + indentation=indentation, + max_line_length=max_line_length): + + yield shortened + + +def _shorten_line(tokens, source, indentation, indent_word, + aggressive=False, previous_line=''): + """Separate line at OPERATOR. + + The input is expected to be free of newlines except for inside multiline + strings and at the end. + + Multiple candidates will be yielded. + + """ + for (token_type, + token_string, + start_offset, + end_offset) in token_offsets(tokens): + + if ( + token_type == tokenize.COMMENT and + not is_probably_part_of_multiline(previous_line) and + not is_probably_part_of_multiline(source) and + not source[start_offset + 1:].strip().lower().startswith( + ('noqa', 'pragma:', 'pylint:')) + ): + # Move inline comments to previous line. + first = source[:start_offset] + second = source[start_offset:] + yield (indentation + second.strip() + '\n' + + indentation + first.strip() + '\n') + elif token_type == token.OP and token_string != '=': + # Don't break on '=' after keyword as this violates PEP 8. + + assert token_type != token.INDENT + + first = source[:end_offset] + + second_indent = indentation + if first.rstrip().endswith('('): + second_indent += indent_word + elif '(' in first: + second_indent += ' ' * (1 + first.find('(')) + else: + second_indent += indent_word + + second = (second_indent + source[end_offset:].lstrip()) + if ( + not second.strip() or + second.lstrip().startswith('#') + ): + continue + + # Do not begin a line with a comma + if second.lstrip().startswith(','): + continue + # Do end a line with a dot + if first.rstrip().endswith('.'): + continue + if token_string in '+-*/': + fixed = first + ' \\' + '\n' + second + else: + fixed = first + '\n' + second + + # Only fix if syntax is okay. + if check_syntax(normalize_multiline(fixed) + if aggressive else fixed): + yield indentation + fixed + + +def _is_binary_operator(token_type, text): + return ((token_type == tokenize.OP or text in ['and', 'or']) and + text not in '()[]{},:.;@=%~') + + +# A convenient way to handle tokens. +Token = collections.namedtuple('Token', ['token_type', 'token_string', + 'spos', 'epos', 'line']) + + +class ReformattedLines(object): + + """The reflowed lines of atoms. + + Each part of the line is represented as an "atom." They can be moved + around when need be to get the optimal formatting. + + """ + + ########################################################################### + # Private Classes + + class _Indent(object): + + """Represent an indentation in the atom stream.""" + + def __init__(self, indent_amt): + self._indent_amt = indent_amt + + def emit(self): + return ' ' * self._indent_amt + + @property + def size(self): + return self._indent_amt + + class _Space(object): + + """Represent a space in the atom stream.""" + + def emit(self): + return ' ' + + @property + def size(self): + return 1 + + class _LineBreak(object): + + """Represent a line break in the atom stream.""" + + def emit(self): + return '\n' + + @property + def size(self): + return 0 + + def __init__(self, max_line_length): + self._max_line_length = max_line_length + self._lines = [] + self._bracket_depth = 0 + self._prev_item = None + self._prev_prev_item = None + + def __repr__(self): + return self.emit() + + ########################################################################### + # Public Methods + + def add(self, obj, indent_amt, break_after_open_bracket): + if isinstance(obj, Atom): + self._add_item(obj, indent_amt) + return + + self._add_container(obj, indent_amt, break_after_open_bracket) + + def add_comment(self, item): + num_spaces = 2 + if len(self._lines) > 1: + if isinstance(self._lines[-1], self._Space): + num_spaces -= 1 + if len(self._lines) > 2: + if isinstance(self._lines[-2], self._Space): + num_spaces -= 1 + + while num_spaces > 0: + self._lines.append(self._Space()) + num_spaces -= 1 + self._lines.append(item) + + def add_indent(self, indent_amt): + self._lines.append(self._Indent(indent_amt)) + + def add_line_break(self, indent): + self._lines.append(self._LineBreak()) + self.add_indent(len(indent)) + + def add_line_break_at(self, index, indent_amt): + self._lines.insert(index, self._LineBreak()) + self._lines.insert(index + 1, self._Indent(indent_amt)) + + def add_space_if_needed(self, curr_text, equal=False): + if ( + not self._lines or isinstance( + self._lines[-1], (self._LineBreak, self._Indent, self._Space)) + ): + return + + prev_text = unicode(self._prev_item) + prev_prev_text = ( + unicode(self._prev_prev_item) if self._prev_prev_item else '') + + if ( + # The previous item was a keyword or identifier and the current + # item isn't an operator that doesn't require a space. + ((self._prev_item.is_keyword or self._prev_item.is_string or + self._prev_item.is_name or self._prev_item.is_number) and + (curr_text[0] not in '([{.,:}])' or + (curr_text[0] == '=' and equal))) or + + # Don't place spaces around a '.', unless it's in an 'import' + # statement. + ((prev_prev_text != 'from' and prev_text[-1] != '.' and + curr_text != 'import') and + + # Don't place a space before a colon. + curr_text[0] != ':' and + + # Don't split up ending brackets by spaces. + ((prev_text[-1] in '}])' and curr_text[0] not in '.,}])') or + + # Put a space after a colon or comma. + prev_text[-1] in ':,' or + + # Put space around '=' if asked to. + (equal and prev_text == '=') or + + # Put spaces around non-unary arithmetic operators. + ((self._prev_prev_item and + (prev_text not in '+-' and + (self._prev_prev_item.is_name or + self._prev_prev_item.is_number or + self._prev_prev_item.is_string)) and + prev_text in ('+', '-', '%', '*', '/', '//', '**', 'in'))))) + ): + self._lines.append(self._Space()) + + def previous_item(self): + """Return the previous non-whitespace item.""" + return self._prev_item + + def fits_on_current_line(self, item_extent): + return self.current_size() + item_extent <= self._max_line_length + + def current_size(self): + """The size of the current line minus the indentation.""" + size = 0 + for item in reversed(self._lines): + size += item.size + if isinstance(item, self._LineBreak): + break + + return size + + def line_empty(self): + return (self._lines and + isinstance(self._lines[-1], + (self._LineBreak, self._Indent))) + + def emit(self): + string = '' + for item in self._lines: + if isinstance(item, self._LineBreak): + string = string.rstrip() + string += item.emit() + + return string.rstrip() + '\n' + + ########################################################################### + # Private Methods + + def _add_item(self, item, indent_amt): + """Add an item to the line. + + Reflow the line to get the best formatting after the item is + inserted. The bracket depth indicates if the item is being + inserted inside of a container or not. + + """ + if self._prev_item and self._prev_item.is_string and item.is_string: + # Place consecutive string literals on separate lines. + self._lines.append(self._LineBreak()) + self._lines.append(self._Indent(indent_amt)) + + item_text = unicode(item) + if self._lines and self._bracket_depth: + # Adding the item into a container. + self._prevent_default_initializer_splitting(item, indent_amt) + + if item_text in '.,)]}': + self._split_after_delimiter(item, indent_amt) + + elif self._lines and not self.line_empty(): + # Adding the item outside of a container. + if self.fits_on_current_line(len(item_text)): + self._enforce_space(item) + + else: + # Line break for the new item. + self._lines.append(self._LineBreak()) + self._lines.append(self._Indent(indent_amt)) + + self._lines.append(item) + self._prev_item, self._prev_prev_item = item, self._prev_item + + if item_text in '([{': + self._bracket_depth += 1 + + elif item_text in '}])': + self._bracket_depth -= 1 + assert self._bracket_depth >= 0 + + def _add_container(self, container, indent_amt, break_after_open_bracket): + actual_indent = indent_amt + 1 + + if ( + unicode(self._prev_item) != '=' and + not self.line_empty() and + not self.fits_on_current_line( + container.size + self._bracket_depth + 2) + ): + + if unicode(container)[0] == '(' and self._prev_item.is_name: + # Don't split before the opening bracket of a call. + break_after_open_bracket = True + actual_indent = indent_amt + 4 + elif ( + break_after_open_bracket or + unicode(self._prev_item) not in '([{' + ): + # If the container doesn't fit on the current line and the + # current line isn't empty, place the container on the next + # line. + self._lines.append(self._LineBreak()) + self._lines.append(self._Indent(indent_amt)) + break_after_open_bracket = False + else: + actual_indent = self.current_size() + 1 + break_after_open_bracket = False + + if isinstance(container, (ListComprehension, IfExpression)): + actual_indent = indent_amt + + # Increase the continued indentation only if recursing on a + # container. + container.reflow(self, ' ' * actual_indent, + break_after_open_bracket=break_after_open_bracket) + + def _prevent_default_initializer_splitting(self, item, indent_amt): + """Prevent splitting between a default initializer. + + When there is a default initializer, it's best to keep it all on + the same line. It's nicer and more readable, even if it goes + over the maximum allowable line length. This goes back along the + current line to determine if we have a default initializer, and, + if so, to remove extraneous whitespaces and add a line + break/indent before it if needed. + + """ + if unicode(item) == '=': + # This is the assignment in the initializer. Just remove spaces for + # now. + self._delete_whitespace() + return + + if (not self._prev_item or not self._prev_prev_item or + unicode(self._prev_item) != '='): + return + + self._delete_whitespace() + prev_prev_index = self._lines.index(self._prev_prev_item) + + if ( + isinstance(self._lines[prev_prev_index - 1], self._Indent) or + self.fits_on_current_line(item.size + 1) + ): + # The default initializer is already the only item on this line. + # Don't insert a newline here. + return + + # Replace the space with a newline/indent combo. + if isinstance(self._lines[prev_prev_index - 1], self._Space): + del self._lines[prev_prev_index - 1] + + self.add_line_break_at(self._lines.index(self._prev_prev_item), + indent_amt) + + def _split_after_delimiter(self, item, indent_amt): + """Split the line only after a delimiter.""" + self._delete_whitespace() + + if self.fits_on_current_line(item.size): + return + + last_space = None + for item in reversed(self._lines): + if ( + last_space and + (not isinstance(item, Atom) or not item.is_colon) + ): + break + else: + last_space = None + if isinstance(item, self._Space): + last_space = item + if isinstance(item, (self._LineBreak, self._Indent)): + return + + if not last_space: + return + + self.add_line_break_at(self._lines.index(last_space), indent_amt) + + def _enforce_space(self, item): + """Enforce a space in certain situations. + + There are cases where we will want a space where normally we + wouldn't put one. This just enforces the addition of a space. + + """ + if isinstance(self._lines[-1], + (self._Space, self._LineBreak, self._Indent)): + return + + if not self._prev_item: + return + + item_text = unicode(item) + prev_text = unicode(self._prev_item) + + # Prefer a space around a '.' in an import statement, and between the + # 'import' and '('. + if ( + (item_text == '.' and prev_text == 'from') or + (item_text == 'import' and prev_text == '.') or + (item_text == '(' and prev_text == 'import') + ): + self._lines.append(self._Space()) + + def _delete_whitespace(self): + """Delete all whitespace from the end of the line.""" + while isinstance(self._lines[-1], (self._Space, self._LineBreak, + self._Indent)): + del self._lines[-1] + + +class Atom(object): + + """The smallest unbreakable unit that can be reflowed.""" + + def __init__(self, atom): + self._atom = atom + + def __repr__(self): + return self._atom.token_string + + def __len__(self): + return self.size + + def reflow( + self, reflowed_lines, continued_indent, extent, + break_after_open_bracket=False, + is_list_comp_or_if_expr=False, + next_is_dot=False + ): + if self._atom.token_type == tokenize.COMMENT: + reflowed_lines.add_comment(self) + return + + total_size = extent if extent else self.size + + if self._atom.token_string not in ',:([{}])': + # Some atoms will need an extra 1-sized space token after them. + total_size += 1 + + prev_item = reflowed_lines.previous_item() + if ( + not is_list_comp_or_if_expr and + not reflowed_lines.fits_on_current_line(total_size) and + not (next_is_dot and + reflowed_lines.fits_on_current_line(self.size + 1)) and + not reflowed_lines.line_empty() and + not self.is_colon and + not (prev_item and prev_item.is_name and + unicode(self) == '(') + ): + # Start a new line if there is already something on the line and + # adding this atom would make it go over the max line length. + reflowed_lines.add_line_break(continued_indent) + else: + reflowed_lines.add_space_if_needed(unicode(self)) + + reflowed_lines.add(self, len(continued_indent), + break_after_open_bracket) + + def emit(self): + return self.__repr__() + + @property + def is_keyword(self): + return keyword.iskeyword(self._atom.token_string) + + @property + def is_string(self): + return self._atom.token_type == tokenize.STRING + + @property + def is_name(self): + return self._atom.token_type == tokenize.NAME + + @property + def is_number(self): + return self._atom.token_type == tokenize.NUMBER + + @property + def is_comma(self): + return self._atom.token_string == ',' + + @property + def is_colon(self): + return self._atom.token_string == ':' + + @property + def size(self): + return len(self._atom.token_string) + + +class Container(object): + + """Base class for all container types.""" + + def __init__(self, items): + self._items = items + + def __repr__(self): + string = '' + last_was_keyword = False + + for item in self._items: + if item.is_comma: + string += ', ' + elif item.is_colon: + string += ': ' + else: + item_string = unicode(item) + if ( + string and + (last_was_keyword or + (not string.endswith(tuple('([{,.:}]) ')) and + not item_string.startswith(tuple('([{,.:}])')))) + ): + string += ' ' + string += item_string + + last_was_keyword = item.is_keyword + return string + + def __iter__(self): + for element in self._items: + yield element + + def __getitem__(self, idx): + return self._items[idx] + + def reflow(self, reflowed_lines, continued_indent, + break_after_open_bracket=False): + last_was_container = False + for (index, item) in enumerate(self._items): + next_item = get_item(self._items, index + 1) + + if isinstance(item, Atom): + is_list_comp_or_if_expr = ( + isinstance(self, (ListComprehension, IfExpression))) + item.reflow(reflowed_lines, continued_indent, + self._get_extent(index), + is_list_comp_or_if_expr=is_list_comp_or_if_expr, + next_is_dot=(next_item and + unicode(next_item) == '.')) + if last_was_container and item.is_comma: + reflowed_lines.add_line_break(continued_indent) + last_was_container = False + else: # isinstance(item, Container) + reflowed_lines.add(item, len(continued_indent), + break_after_open_bracket) + last_was_container = not isinstance(item, (ListComprehension, + IfExpression)) + + if ( + break_after_open_bracket and index == 0 and + # Prefer to keep empty containers together instead of + # separating them. + unicode(item) == self.open_bracket and + (not next_item or unicode(next_item) != self.close_bracket) and + (len(self._items) != 3 or not isinstance(next_item, Atom)) + ): + reflowed_lines.add_line_break(continued_indent) + break_after_open_bracket = False + else: + next_next_item = get_item(self._items, index + 2) + if ( + unicode(item) not in ['.', '%', 'in'] and + next_item and not isinstance(next_item, Container) and + unicode(next_item) != ':' and + next_next_item and (not isinstance(next_next_item, Atom) or + unicode(next_item) == 'not') and + not reflowed_lines.line_empty() and + not reflowed_lines.fits_on_current_line( + self._get_extent(index + 1) + 2) + ): + reflowed_lines.add_line_break(continued_indent) + + def _get_extent(self, index): + """The extent of the full element. + + E.g., the length of a function call or keyword. + + """ + extent = 0 + prev_item = get_item(self._items, index - 1) + seen_dot = prev_item and unicode(prev_item) == '.' + while index < len(self._items): + item = get_item(self._items, index) + index += 1 + + if isinstance(item, (ListComprehension, IfExpression)): + break + + if isinstance(item, Container): + if prev_item and prev_item.is_name: + if seen_dot: + extent += 1 + else: + extent += item.size + + prev_item = item + continue + elif (unicode(item) not in ['.', '=', ':', 'not'] and + not item.is_name and not item.is_string): + break + + if unicode(item) == '.': + seen_dot = True + + extent += item.size + prev_item = item + + return extent + + @property + def is_string(self): + return False + + @property + def size(self): + return len(self.__repr__()) + + @property + def is_keyword(self): + return False + + @property + def is_name(self): + return False + + @property + def is_comma(self): + return False + + @property + def is_colon(self): + return False + + @property + def open_bracket(self): + return None + + @property + def close_bracket(self): + return None + + +class Tuple(Container): + + """A high-level representation of a tuple.""" + + @property + def open_bracket(self): + return '(' + + @property + def close_bracket(self): + return ')' + + +class List(Container): + + """A high-level representation of a list.""" + + @property + def open_bracket(self): + return '[' + + @property + def close_bracket(self): + return ']' + + +class DictOrSet(Container): + + """A high-level representation of a dictionary or set.""" + + @property + def open_bracket(self): + return '{' + + @property + def close_bracket(self): + return '}' + + +class ListComprehension(Container): + + """A high-level representation of a list comprehension.""" + + @property + def size(self): + length = 0 + for item in self._items: + if isinstance(item, IfExpression): + break + length += item.size + return length + + +class IfExpression(Container): + + """A high-level representation of an if-expression.""" + + +def _parse_container(tokens, index, for_or_if=None): + """Parse a high-level container, such as a list, tuple, etc.""" + + # Store the opening bracket. + items = [Atom(Token(*tokens[index]))] + index += 1 + + num_tokens = len(tokens) + while index < num_tokens: + tok = Token(*tokens[index]) + + if tok.token_string in ',)]}': + # First check if we're at the end of a list comprehension or + # if-expression. Don't add the ending token as part of the list + # comprehension or if-expression, because they aren't part of those + # constructs. + if for_or_if == 'for': + return (ListComprehension(items), index - 1) + + elif for_or_if == 'if': + return (IfExpression(items), index - 1) + + # We've reached the end of a container. + items.append(Atom(tok)) + + # If not, then we are at the end of a container. + if tok.token_string == ')': + # The end of a tuple. + return (Tuple(items), index) + + elif tok.token_string == ']': + # The end of a list. + return (List(items), index) + + elif tok.token_string == '}': + # The end of a dictionary or set. + return (DictOrSet(items), index) + + elif tok.token_string in '([{': + # A sub-container is being defined. + (container, index) = _parse_container(tokens, index) + items.append(container) + + elif tok.token_string == 'for': + (container, index) = _parse_container(tokens, index, 'for') + items.append(container) + + elif tok.token_string == 'if': + (container, index) = _parse_container(tokens, index, 'if') + items.append(container) + + else: + items.append(Atom(tok)) + + index += 1 + + return (None, None) + + +def _parse_tokens(tokens): + """Parse the tokens. + + This converts the tokens into a form where we can manipulate them + more easily. + + """ + + index = 0 + parsed_tokens = [] + + num_tokens = len(tokens) + while index < num_tokens: + tok = Token(*tokens[index]) + + assert tok.token_type != token.INDENT + if tok.token_type == tokenize.NEWLINE: + # There's only one newline and it's at the end. + break + + if tok.token_string in '([{': + (container, index) = _parse_container(tokens, index) + if not container: + return None + parsed_tokens.append(container) + else: + parsed_tokens.append(Atom(tok)) + + index += 1 + + return parsed_tokens + + +def _reflow_lines(parsed_tokens, indentation, max_line_length, + start_on_prefix_line): + """Reflow the lines so that it looks nice.""" + + if unicode(parsed_tokens[0]) == 'def': + # A function definition gets indented a bit more. + continued_indent = indentation + ' ' * 2 * DEFAULT_INDENT_SIZE + else: + continued_indent = indentation + ' ' * DEFAULT_INDENT_SIZE + + break_after_open_bracket = not start_on_prefix_line + + lines = ReformattedLines(max_line_length) + lines.add_indent(len(indentation.lstrip('\r\n'))) + + if not start_on_prefix_line: + # If splitting after the opening bracket will cause the first element + # to be aligned weirdly, don't try it. + first_token = get_item(parsed_tokens, 0) + second_token = get_item(parsed_tokens, 1) + + if ( + first_token and second_token and + unicode(second_token)[0] == '(' and + len(indentation) + len(first_token) + 1 == len(continued_indent) + ): + return None + + for item in parsed_tokens: + lines.add_space_if_needed(unicode(item), equal=True) + + save_continued_indent = continued_indent + if start_on_prefix_line and isinstance(item, Container): + start_on_prefix_line = False + continued_indent = ' ' * (lines.current_size() + 1) + + item.reflow(lines, continued_indent, break_after_open_bracket) + continued_indent = save_continued_indent + + return lines.emit() + + +def _shorten_line_at_tokens_new(tokens, source, indentation, + max_line_length): + """Shorten the line taking its length into account. + + The input is expected to be free of newlines except for inside + multiline strings and at the end. + + """ + # Yield the original source so to see if it's a better choice than the + # shortened candidate lines we generate here. + yield indentation + source + + parsed_tokens = _parse_tokens(tokens) + + if parsed_tokens: + # Perform two reflows. The first one starts on the same line as the + # prefix. The second starts on the line after the prefix. + fixed = _reflow_lines(parsed_tokens, indentation, max_line_length, + start_on_prefix_line=True) + if fixed and check_syntax(normalize_multiline(fixed.lstrip())): + yield fixed + + fixed = _reflow_lines(parsed_tokens, indentation, max_line_length, + start_on_prefix_line=False) + if fixed and check_syntax(normalize_multiline(fixed.lstrip())): + yield fixed + + +def _shorten_line_at_tokens(tokens, source, indentation, indent_word, + key_token_strings, aggressive): + """Separate line by breaking at tokens in key_token_strings. + + The input is expected to be free of newlines except for inside + multiline strings and at the end. + + """ + offsets = [] + for (index, _t) in enumerate(token_offsets(tokens)): + (token_type, + token_string, + start_offset, + end_offset) = _t + + assert token_type != token.INDENT + + if token_string in key_token_strings: + # Do not break in containers with zero or one items. + unwanted_next_token = { + '(': ')', + '[': ']', + '{': '}'}.get(token_string) + if unwanted_next_token: + if ( + get_item(tokens, + index + 1, + default=[None, None])[1] == unwanted_next_token or + get_item(tokens, + index + 2, + default=[None, None])[1] == unwanted_next_token + ): + continue + + if ( + index > 2 and token_string == '(' and + tokens[index - 1][1] in ',(%[' + ): + # Don't split after a tuple start, or before a tuple start if + # the tuple is in a list. + continue + + if end_offset < len(source) - 1: + # Don't split right before newline. + offsets.append(end_offset) + else: + # Break at adjacent strings. These were probably meant to be on + # separate lines in the first place. + previous_token = get_item(tokens, index - 1) + if ( + token_type == tokenize.STRING and + previous_token and previous_token[0] == tokenize.STRING + ): + offsets.append(start_offset) + + current_indent = None + fixed = None + for line in split_at_offsets(source, offsets): + if fixed: + fixed += '\n' + current_indent + line + + for symbol in '([{': + if line.endswith(symbol): + current_indent += indent_word + else: + # First line. + fixed = line + assert not current_indent + current_indent = indent_word + + assert fixed is not None + + if check_syntax(normalize_multiline(fixed) + if aggressive > 1 else fixed): + return indentation + fixed + else: + return None + + +def token_offsets(tokens): + """Yield tokens and offsets.""" + end_offset = 0 + previous_end_row = 0 + previous_end_column = 0 + for t in tokens: + token_type = t[0] + token_string = t[1] + (start_row, start_column) = t[2] + (end_row, end_column) = t[3] + + # Account for the whitespace between tokens. + end_offset += start_column + if previous_end_row == start_row: + end_offset -= previous_end_column + + # Record the start offset of the token. + start_offset = end_offset + + # Account for the length of the token itself. + end_offset += len(token_string) + + yield (token_type, + token_string, + start_offset, + end_offset) + + previous_end_row = end_row + previous_end_column = end_column + + +def normalize_multiline(line): + """Normalize multiline-related code that will cause syntax error. + + This is for purposes of checking syntax. + + """ + if line.startswith('def ') and line.rstrip().endswith(':'): + return line + ' pass' + elif line.startswith('return '): + return 'def _(): ' + line + elif line.startswith('@'): + return line + 'def _(): pass' + elif line.startswith('class '): + return line + ' pass' + elif line.startswith(('if ', 'elif ', 'for ', 'while ')): + return line + ' pass' + else: + return line + + +def fix_whitespace(line, offset, replacement): + """Replace whitespace at offset and return fixed line.""" + # Replace escaped newlines too + left = line[:offset].rstrip('\n\r \t\\') + right = line[offset:].lstrip('\n\r \t\\') + if right.startswith('#'): + return line + else: + return left + replacement + right + + +def _execute_pep8(pep8_options, source): + """Execute pycodestyle via python method calls.""" + class QuietReport(pycodestyle.BaseReport): + + """Version of checker that does not print.""" + + def __init__(self, options): + super(QuietReport, self).__init__(options) + self.__full_error_results = [] + + def error(self, line_number, offset, text, check): + """Collect errors.""" + code = super(QuietReport, self).error(line_number, + offset, + text, + check) + if code: + self.__full_error_results.append( + {'id': code, + 'line': line_number, + 'column': offset + 1, + 'info': text}) + + def full_error_results(self): + """Return error results in detail. + + Results are in the form of a list of dictionaries. Each + dictionary contains 'id', 'line', 'column', and 'info'. + + """ + return self.__full_error_results + + checker = pycodestyle.Checker('', lines=source, reporter=QuietReport, + **pep8_options) + checker.check_all() + return checker.report.full_error_results() + + +def _remove_leading_and_normalize(line): + return line.lstrip().rstrip(CR + LF) + '\n' + + +class Reindenter(object): + + """Reindents badly-indented code to uniformly use four-space indentation. + + Released to the public domain, by Tim Peters, 03 October 2000. + + """ + + def __init__(self, input_text): + sio = io.StringIO(input_text) + source_lines = sio.readlines() + + self.string_content_line_numbers = multiline_string_lines(input_text) + + # File lines, rstripped & tab-expanded. Dummy at start is so + # that we can use tokenize's 1-based line numbering easily. + # Note that a line is all-blank iff it is a newline. + self.lines = [] + for line_number, line in enumerate(source_lines, start=1): + # Do not modify if inside a multiline string. + if line_number in self.string_content_line_numbers: + self.lines.append(line) + else: + # Only expand leading tabs. + self.lines.append(_get_indentation(line).expandtabs() + + _remove_leading_and_normalize(line)) + + self.lines.insert(0, None) + self.index = 1 # index into self.lines of next line + self.input_text = input_text + + def run(self, indent_size=DEFAULT_INDENT_SIZE): + """Fix indentation and return modified line numbers. + + Line numbers are indexed at 1. + + """ + if indent_size < 1: + return self.input_text + + try: + stats = _reindent_stats(tokenize.generate_tokens(self.getline)) + except (SyntaxError, tokenize.TokenError): + return self.input_text + # Remove trailing empty lines. + lines = self.lines + # Sentinel. + stats.append((len(lines), 0)) + # Map count of leading spaces to # we want. + have2want = {} + # Program after transformation. + after = [] + # Copy over initial empty lines -- there's nothing to do until + # we see a line with *something* on it. + i = stats[0][0] + after.extend(lines[1:i]) + for i in range(len(stats) - 1): + thisstmt, thislevel = stats[i] + nextstmt = stats[i + 1][0] + have = _leading_space_count(lines[thisstmt]) + want = thislevel * indent_size + if want < 0: + # A comment line. + if have: + # An indented comment line. If we saw the same + # indentation before, reuse what it most recently + # mapped to. + want = have2want.get(have, -1) + if want < 0: + # Then it probably belongs to the next real stmt. + for j in range(i + 1, len(stats) - 1): + jline, jlevel = stats[j] + if jlevel >= 0: + if have == _leading_space_count(lines[jline]): + want = jlevel * indent_size + break + if want < 0: # Maybe it's a hanging + # comment like this one, + # in which case we should shift it like its base + # line got shifted. + for j in range(i - 1, -1, -1): + jline, jlevel = stats[j] + if jlevel >= 0: + want = (have + _leading_space_count( + after[jline - 1]) - + _leading_space_count(lines[jline])) + break + if want < 0: + # Still no luck -- leave it alone. + want = have + else: + want = 0 + assert want >= 0 + have2want[have] = want + diff = want - have + if diff == 0 or have == 0: + after.extend(lines[thisstmt:nextstmt]) + else: + for line_number, line in enumerate(lines[thisstmt:nextstmt], + start=thisstmt): + if line_number in self.string_content_line_numbers: + after.append(line) + elif diff > 0: + if line == '\n': + after.append(line) + else: + after.append(' ' * diff + line) + else: + remove = min(_leading_space_count(line), -diff) + after.append(line[remove:]) + + return ''.join(after) + + def getline(self): + """Line-getter for tokenize.""" + if self.index >= len(self.lines): + line = '' + else: + line = self.lines[self.index] + self.index += 1 + return line + + +def _reindent_stats(tokens): + """Return list of (lineno, indentlevel) pairs. + + One for each stmt and comment line. indentlevel is -1 for comment + lines, as a signal that tokenize doesn't know what to do about them; + indeed, they're our headache! + + """ + find_stmt = 1 # Next token begins a fresh stmt? + level = 0 # Current indent level. + stats = [] + + for t in tokens: + token_type = t[0] + sline = t[2][0] + line = t[4] + + if token_type == tokenize.NEWLINE: + # A program statement, or ENDMARKER, will eventually follow, + # after some (possibly empty) run of tokens of the form + # (NL | COMMENT)* (INDENT | DEDENT+)? + find_stmt = 1 + + elif token_type == tokenize.INDENT: + find_stmt = 1 + level += 1 + + elif token_type == tokenize.DEDENT: + find_stmt = 1 + level -= 1 + + elif token_type == tokenize.COMMENT: + if find_stmt: + stats.append((sline, -1)) + # But we're still looking for a new stmt, so leave + # find_stmt alone. + + elif token_type == tokenize.NL: + pass + + elif find_stmt: + # This is the first "real token" following a NEWLINE, so it + # must be the first token of the next program statement, or an + # ENDMARKER. + find_stmt = 0 + if line: # Not endmarker. + stats.append((sline, level)) + + return stats + + +def _leading_space_count(line): + """Return number of leading spaces in line.""" + i = 0 + while i < len(line) and line[i] == ' ': + i += 1 + return i + + +def refactor_with_2to3(source_text, fixer_names, filename=''): + """Use lib2to3 to refactor the source. + + Return the refactored source code. + + """ + from lib2to3.refactor import RefactoringTool + fixers = ['lib2to3.fixes.fix_' + name for name in fixer_names] + tool = RefactoringTool(fixer_names=fixers, explicit=fixers) + + from lib2to3.pgen2 import tokenize as lib2to3_tokenize + try: + # The name parameter is necessary particularly for the "import" fixer. + return unicode(tool.refactor_string(source_text, name=filename)) + except lib2to3_tokenize.TokenError: + return source_text + + +def check_syntax(code): + """Return True if syntax is okay.""" + try: + return compile(code, '', 'exec') + except (SyntaxError, TypeError, UnicodeDecodeError): + return False + + +def filter_results(source, results, aggressive): + """Filter out spurious reports from pycodestyle. + + If aggressive is True, we allow possibly unsafe fixes (E711, E712). + + """ + non_docstring_string_line_numbers = multiline_string_lines( + source, include_docstrings=False) + all_string_line_numbers = multiline_string_lines( + source, include_docstrings=True) + + commented_out_code_line_numbers = commented_out_code_lines(source) + + has_e901 = any(result['id'].lower() == 'e901' for result in results) + + for r in results: + issue_id = r['id'].lower() + + if r['line'] in non_docstring_string_line_numbers: + if issue_id.startswith(('e1', 'e501', 'w191')): + continue + + if r['line'] in all_string_line_numbers: + if issue_id in ['e501']: + continue + + # We must offset by 1 for lines that contain the trailing contents of + # multiline strings. + if not aggressive and (r['line'] + 1) in all_string_line_numbers: + # Do not modify multiline strings in non-aggressive mode. Remove + # trailing whitespace could break doctests. + if issue_id.startswith(('w29', 'w39')): + continue + + if aggressive <= 0: + if issue_id.startswith(('e711', 'e72', 'w6')): + continue + + if aggressive <= 1: + if issue_id.startswith(('e712', 'e713', 'e714', 'w5')): + continue + + if aggressive <= 2: + if issue_id.startswith(('e704', 'w5')): + continue + + if r['line'] in commented_out_code_line_numbers: + if issue_id.startswith(('e26', 'e501')): + continue + + # Do not touch indentation if there is a token error caused by + # incomplete multi-line statement. Otherwise, we risk screwing up the + # indentation. + if has_e901: + if issue_id.startswith(('e1', 'e7')): + continue + + yield r + + +def multiline_string_lines(source, include_docstrings=False): + """Return line numbers that are within multiline strings. + + The line numbers are indexed at 1. + + Docstrings are ignored. + + """ + line_numbers = set() + previous_token_type = '' + try: + for t in generate_tokens(source): + token_type = t[0] + start_row = t[2][0] + end_row = t[3][0] + + if token_type == tokenize.STRING and start_row != end_row: + if ( + include_docstrings or + previous_token_type != tokenize.INDENT + ): + # We increment by one since we want the contents of the + # string. + line_numbers |= set(range(1 + start_row, 1 + end_row)) + + previous_token_type = token_type + except (SyntaxError, tokenize.TokenError): + pass + + return line_numbers + + +def commented_out_code_lines(source): + """Return line numbers of comments that are likely code. + + Commented-out code is bad practice, but modifying it just adds even + more clutter. + + """ + line_numbers = [] + try: + for t in generate_tokens(source): + token_type = t[0] + token_string = t[1] + start_row = t[2][0] + line = t[4] + + # Ignore inline comments. + if not line.lstrip().startswith('#'): + continue + + if token_type == tokenize.COMMENT: + stripped_line = token_string.lstrip('#').strip() + if ( + ' ' in stripped_line and + '#' not in stripped_line and + check_syntax(stripped_line) + ): + line_numbers.append(start_row) + except (SyntaxError, tokenize.TokenError): + pass + + return line_numbers + + +def shorten_comment(line, max_line_length, last_comment=False): + """Return trimmed or split long comment line. + + If there are no comments immediately following it, do a text wrap. + Doing this wrapping on all comments in general would lead to jagged + comment text. + + """ + assert len(line) > max_line_length + line = line.rstrip() + + # PEP 8 recommends 72 characters for comment text. + indentation = _get_indentation(line) + '# ' + max_line_length = min(max_line_length, + len(indentation) + 72) + + MIN_CHARACTER_REPEAT = 5 + if ( + len(line) - len(line.rstrip(line[-1])) >= MIN_CHARACTER_REPEAT and + not line[-1].isalnum() + ): + # Trim comments that end with things like --------- + return line[:max_line_length] + '\n' + elif last_comment and re.match(r'\s*#+\s*\w+', line): + split_lines = textwrap.wrap(line.lstrip(' \t#'), + initial_indent=indentation, + subsequent_indent=indentation, + width=max_line_length, + break_long_words=False, + break_on_hyphens=False) + return '\n'.join(split_lines) + '\n' + else: + return line + '\n' + + +def normalize_line_endings(lines, newline): + """Return fixed line endings. + + All lines will be modified to use the most common line ending. + + """ + return [line.rstrip('\n\r') + newline for line in lines] + + +def mutual_startswith(a, b): + return b.startswith(a) or a.startswith(b) + + +def code_match(code, select, ignore): + if ignore: + assert not isinstance(ignore, unicode) + for ignored_code in [c.strip() for c in ignore]: + if mutual_startswith(code.lower(), ignored_code.lower()): + return False + + if select: + assert not isinstance(select, unicode) + for selected_code in [c.strip() for c in select]: + if mutual_startswith(code.lower(), selected_code.lower()): + return True + return False + + return True + + +def fix_code(source, options=None, encoding=None, apply_config=False): + """Return fixed source code. + + "encoding" will be used to decode "source" if it is a byte string. + + """ + options = _get_options(options, apply_config) + + if not isinstance(source, unicode): + source = source.decode(encoding or get_encoding()) + + sio = io.StringIO(source) + return fix_lines(sio.readlines(), options=options) + + +def _get_options(raw_options, apply_config): + """Return parsed options.""" + if not raw_options: + return parse_args([''], apply_config=apply_config) + + if isinstance(raw_options, dict): + options = parse_args([''], apply_config=apply_config) + for name, value in raw_options.items(): + if not hasattr(options, name): + raise ValueError("No such option '{}'".format(name)) + + # Check for very basic type errors. + expected_type = type(getattr(options, name)) + if not isinstance(expected_type, (str, unicode)): + if isinstance(value, (str, unicode)): + raise ValueError( + "Option '{}' should not be a string".format(name)) + setattr(options, name, value) + else: + options = raw_options + + return options + + +def fix_lines(source_lines, options, filename=''): + """Return fixed source code.""" + # Transform everything to line feed. Then change them back to original + # before returning fixed source code. + original_newline = find_newline(source_lines) + tmp_source = ''.join(normalize_line_endings(source_lines, '\n')) + + # Keep a history to break out of cycles. + previous_hashes = set() + + if options.line_range: + # Disable "apply_local_fixes()" for now due to issue #175. + fixed_source = tmp_source + else: + # Apply global fixes only once (for efficiency). + fixed_source = apply_global_fixes(tmp_source, + options, + filename=filename) + + passes = 0 + long_line_ignore_cache = set() + while hash(fixed_source) not in previous_hashes: + if options.pep8_passes >= 0 and passes > options.pep8_passes: + break + passes += 1 + + previous_hashes.add(hash(fixed_source)) + + tmp_source = copy.copy(fixed_source) + + fix = FixPEP8( + filename, + options, + contents=tmp_source, + long_line_ignore_cache=long_line_ignore_cache) + + fixed_source = fix.fix() + + sio = io.StringIO(fixed_source) + return ''.join(normalize_line_endings(sio.readlines(), original_newline)) + + +def fix_file(filename, options=None, output=None, apply_config=False): + if not options: + options = parse_args([filename], apply_config=apply_config) + + original_source = readlines_from_file(filename) + + fixed_source = original_source + + if options.in_place or output: + encoding = detect_encoding(filename) + + if output: + output = LineEndingWrapper(wrap_output(output, encoding=encoding)) + + fixed_source = fix_lines(fixed_source, options, filename=filename) + + if options.diff: + new = io.StringIO(fixed_source) + new = new.readlines() + diff = get_diff_text(original_source, new, filename) + if output: + output.write(diff) + output.flush() + else: + return diff + elif options.in_place: + fp = open_with_encoding(filename, encoding=encoding, mode='w') + fp.write(fixed_source) + fp.close() + else: + if output: + output.write(fixed_source) + output.flush() + else: + return fixed_source + + +def global_fixes(): + """Yield multiple (code, function) tuples.""" + for function in list(globals().values()): + if inspect.isfunction(function): + arguments = _get_parameters(function) + if arguments[:1] != ['source']: + continue + + code = extract_code_from_function(function) + if code: + yield (code, function) + + +def _get_parameters(function): + # pylint: disable=deprecated-method + if sys.version_info >= (3, 3): + # We need to match "getargspec()", which includes "self" as the first + # value for methods. + # https://bugs.python.org/issue17481#msg209469 + if inspect.ismethod(function): + function = function.__func__ + + return list(inspect.signature(function).parameters) + else: + return inspect.getargspec(function)[0] + + +def apply_global_fixes(source, options, where='global', filename=''): + """Run global fixes on source code. + + These are fixes that only need be done once (unlike those in + FixPEP8, which are dependent on pycodestyle). + + """ + if any(code_match(code, select=options.select, ignore=options.ignore) + for code in ['E101', 'E111']): + source = reindent(source, + indent_size=options.indent_size) + + for (code, function) in global_fixes(): + if code_match(code, select=options.select, ignore=options.ignore): + if options.verbose: + print('---> Applying {0} fix for {1}'.format(where, + code.upper()), + file=sys.stderr) + source = function(source, + aggressive=options.aggressive) + + source = fix_2to3(source, + aggressive=options.aggressive, + select=options.select, + ignore=options.ignore, + filename=filename) + + return source + + +def extract_code_from_function(function): + """Return code handled by function.""" + if not function.__name__.startswith('fix_'): + return None + + code = re.sub('^fix_', '', function.__name__) + if not code: + return None + + try: + int(code[1:]) + except ValueError: + return None + + return code + + +def _get_package_version(): + packages = ["pycodestyle: {0}".format(pycodestyle.__version__)] + return ", ".join(packages) + + +def create_parser(): + """Return command-line parser.""" + # Do import locally to be friendly to those who use autopep8 as a library + # and are supporting Python 2.6. + import argparse + + parser = argparse.ArgumentParser(description=docstring_summary(__doc__), + prog='autopep8') + parser.add_argument('--version', action='version', + version='%(prog)s {0} ({1})'.format( + __version__, _get_package_version())) + parser.add_argument('-v', '--verbose', action='count', + default=0, + help='print verbose messages; ' + 'multiple -v result in more verbose messages') + parser.add_argument('-d', '--diff', action='store_true', + help='print the diff for the fixed source') + parser.add_argument('-i', '--in-place', action='store_true', + help='make changes to files in place') + parser.add_argument('--global-config', metavar='filename', + default=DEFAULT_CONFIG, + help='path to a global pep8 config file; if this file ' + 'does not exist then this is ignored ' + '(default: {0})'.format(DEFAULT_CONFIG)) + parser.add_argument('--ignore-local-config', action='store_true', + help="don't look for and apply local config files; " + 'if not passed, defaults are updated with any ' + "config files in the project's root directory") + parser.add_argument('-r', '--recursive', action='store_true', + help='run recursively over directories; ' + 'must be used with --in-place or --diff') + parser.add_argument('-j', '--jobs', type=int, metavar='n', default=1, + help='number of parallel jobs; ' + 'match CPU count if value is less than 1') + parser.add_argument('-p', '--pep8-passes', metavar='n', + default=-1, type=int, + help='maximum number of additional pep8 passes ' + '(default: infinite)') + parser.add_argument('-a', '--aggressive', action='count', default=0, + help='enable non-whitespace changes; ' + 'multiple -a result in more aggressive changes') + parser.add_argument('--experimental', action='store_true', + help='enable experimental fixes') + parser.add_argument('--exclude', metavar='globs', + help='exclude file/directory names that match these ' + 'comma-separated globs') + parser.add_argument('--list-fixes', action='store_true', + help='list codes for fixes; ' + 'used by --ignore and --select') + parser.add_argument('--ignore', metavar='errors', default='', + help='do not fix these errors/warnings ' + '(default: {0})'.format(DEFAULT_IGNORE)) + parser.add_argument('--select', metavar='errors', default='', + help='fix only these errors/warnings (e.g. E4,W)') + parser.add_argument('--max-line-length', metavar='n', default=79, type=int, + help='set maximum allowed line length ' + '(default: %(default)s)') + parser.add_argument('--line-range', '--range', metavar='line', + default=None, type=int, nargs=2, + help='only fix errors found within this inclusive ' + 'range of line numbers (e.g. 1 99); ' + 'line numbers are indexed at 1') + parser.add_argument('--indent-size', default=DEFAULT_INDENT_SIZE, + type=int, help=argparse.SUPPRESS) + parser.add_argument('files', nargs='*', + help="files to format or '-' for standard in") + + return parser + + +def parse_args(arguments, apply_config=False): + """Parse command-line options.""" + parser = create_parser() + args = parser.parse_args(arguments) + + if not args.files and not args.list_fixes: + parser.error('incorrect number of arguments') + + args.files = [decode_filename(name) for name in args.files] + + if apply_config: + parser = read_config(args, parser) + args = parser.parse_args(arguments) + args.files = [decode_filename(name) for name in args.files] + + if '-' in args.files: + if len(args.files) > 1: + parser.error('cannot mix stdin and regular files') + + if args.diff: + parser.error('--diff cannot be used with standard input') + + if args.in_place: + parser.error('--in-place cannot be used with standard input') + + if args.recursive: + parser.error('--recursive cannot be used with standard input') + + if len(args.files) > 1 and not (args.in_place or args.diff): + parser.error('autopep8 only takes one filename as argument ' + 'unless the "--in-place" or "--diff" args are ' + 'used') + + if args.recursive and not (args.in_place or args.diff): + parser.error('--recursive must be used with --in-place or --diff') + + if args.in_place and args.diff: + parser.error('--in-place and --diff are mutually exclusive') + + if args.max_line_length <= 0: + parser.error('--max-line-length must be greater than 0') + + if args.select: + args.select = _split_comma_separated(args.select) + + if args.ignore: + args.ignore = _split_comma_separated(args.ignore) + elif not args.select: + if args.aggressive: + # Enable everything by default if aggressive. + args.select = set(['E', 'W']) + else: + args.ignore = _split_comma_separated(DEFAULT_IGNORE) + + if args.exclude: + args.exclude = _split_comma_separated(args.exclude) + else: + args.exclude = set([]) + + if args.jobs < 1: + # Do not import multiprocessing globally in case it is not supported + # on the platform. + import multiprocessing + args.jobs = multiprocessing.cpu_count() + + if args.jobs > 1 and not args.in_place: + parser.error('parallel jobs requires --in-place') + + if args.line_range: + if args.line_range[0] <= 0: + parser.error('--range must be positive numbers') + if args.line_range[0] > args.line_range[1]: + parser.error('First value of --range should be less than or equal ' + 'to the second') + + return args + + +def read_config(args, parser): + """Read both user configuration and local configuration.""" + try: + from configparser import ConfigParser as SafeConfigParser + from configparser import Error + except ImportError: + from ConfigParser import SafeConfigParser + from ConfigParser import Error + + config = SafeConfigParser() + + try: + config.read(args.global_config) + + if not args.ignore_local_config: + parent = tail = args.files and os.path.abspath( + os.path.commonprefix(args.files)) + while tail: + if config.read([os.path.join(parent, fn) + for fn in PROJECT_CONFIG]): + break + (parent, tail) = os.path.split(parent) + + defaults = dict() + option_list = dict([(o.dest, o.type or type(o.default)) + for o in parser._actions]) + + for section in ['pep8', 'pycodestyle']: + if not config.has_section(section): + continue + for k, v in config.items(section): + norm_opt = k.lstrip('-').replace('-', '_') + opt_type = option_list[norm_opt] + if opt_type is int: + value = config.getint(section, k) + elif opt_type is bool: + value = config.getboolean(section, k) + else: + value = config.get(section, k) + defaults[norm_opt] = value + + parser.set_defaults(**defaults) + except Error: + # Ignore for now. + pass + + return parser + + +def _split_comma_separated(string): + """Return a set of strings.""" + return set(text.strip() for text in string.split(',') if text.strip()) + + +def decode_filename(filename): + """Return Unicode filename.""" + if isinstance(filename, unicode): + return filename + else: + return filename.decode(sys.getfilesystemencoding()) + + +def supported_fixes(): + """Yield pep8 error codes that autopep8 fixes. + + Each item we yield is a tuple of the code followed by its + description. + + """ + yield ('E101', docstring_summary(reindent.__doc__)) + + instance = FixPEP8(filename=None, options=None, contents='') + for attribute in dir(instance): + code = re.match('fix_([ew][0-9][0-9][0-9])', attribute) + if code: + yield ( + code.group(1).upper(), + re.sub(r'\s+', ' ', + docstring_summary(getattr(instance, attribute).__doc__)) + ) + + for (code, function) in sorted(global_fixes()): + yield (code.upper() + (4 - len(code)) * ' ', + re.sub(r'\s+', ' ', docstring_summary(function.__doc__))) + + for code in sorted(CODE_TO_2TO3): + yield (code.upper() + (4 - len(code)) * ' ', + re.sub(r'\s+', ' ', docstring_summary(fix_2to3.__doc__))) + + +def docstring_summary(docstring): + """Return summary of docstring.""" + return docstring.split('\n')[0] if docstring else '' + + +def line_shortening_rank(candidate, indent_word, max_line_length, + experimental=False): + """Return rank of candidate. + + This is for sorting candidates. + + """ + if not candidate.strip(): + return 0 + + rank = 0 + lines = candidate.rstrip().split('\n') + + offset = 0 + if ( + not lines[0].lstrip().startswith('#') and + lines[0].rstrip()[-1] not in '([{' + ): + for (opening, closing) in ('()', '[]', '{}'): + # Don't penalize empty containers that aren't split up. Things like + # this "foo(\n )" aren't particularly good. + opening_loc = lines[0].find(opening) + closing_loc = lines[0].find(closing) + if opening_loc >= 0: + if closing_loc < 0 or closing_loc != opening_loc + 1: + offset = max(offset, 1 + opening_loc) + + current_longest = max(offset + len(x.strip()) for x in lines) + + rank += 4 * max(0, current_longest - max_line_length) + + rank += len(lines) + + # Too much variation in line length is ugly. + rank += 2 * standard_deviation(len(line) for line in lines) + + bad_staring_symbol = { + '(': ')', + '[': ']', + '{': '}'}.get(lines[0][-1]) + + if len(lines) > 1: + if ( + bad_staring_symbol and + lines[1].lstrip().startswith(bad_staring_symbol) + ): + rank += 20 + + for lineno, current_line in enumerate(lines): + current_line = current_line.strip() + + if current_line.startswith('#'): + continue + + for bad_start in ['.', '%', '+', '-', '/']: + if current_line.startswith(bad_start): + rank += 100 + + # Do not tolerate operators on their own line. + if current_line == bad_start: + rank += 1000 + + if ( + current_line.endswith(('.', '%', '+', '-', '/')) and + "': " in current_line + ): + rank += 1000 + + if current_line.endswith(('(', '[', '{', '.')): + # Avoid lonely opening. They result in longer lines. + if len(current_line) <= len(indent_word): + rank += 100 + + # Avoid the ugliness of ", (\n". + if ( + current_line.endswith('(') and + current_line[:-1].rstrip().endswith(',') + ): + rank += 100 + + # Avoid the ugliness of "something[\n" and something[index][\n. + if ( + current_line.endswith('[') and + len(current_line) > 1 and + (current_line[-2].isalnum() or current_line[-2] in ']') + ): + rank += 300 + + # Also avoid the ugliness of "foo.\nbar" + if current_line.endswith('.'): + rank += 100 + + if has_arithmetic_operator(current_line): + rank += 100 + + # Avoid breaking at unary operators. + if re.match(r'.*[(\[{]\s*[\-\+~]$', current_line.rstrip('\\ ')): + rank += 1000 + + if re.match(r'.*lambda\s*\*$', current_line.rstrip('\\ ')): + rank += 1000 + + if current_line.endswith(('%', '(', '[', '{')): + rank -= 20 + + # Try to break list comprehensions at the "for". + if current_line.startswith('for '): + rank -= 50 + + if current_line.endswith('\\'): + # If a line ends in \-newline, it may be part of a + # multiline string. In that case, we would like to know + # how long that line is without the \-newline. If it's + # longer than the maximum, or has comments, then we assume + # that the \-newline is an okay candidate and only + # penalize it a bit. + total_len = len(current_line) + lineno += 1 + while lineno < len(lines): + total_len += len(lines[lineno]) + + if lines[lineno].lstrip().startswith('#'): + total_len = max_line_length + break + + if not lines[lineno].endswith('\\'): + break + + lineno += 1 + + if total_len < max_line_length: + rank += 10 + else: + rank += 100 if experimental else 1 + + # Prefer breaking at commas rather than colon. + if ',' in current_line and current_line.endswith(':'): + rank += 10 + + # Avoid splitting dictionaries between key and value. + if current_line.endswith(':'): + rank += 100 + + rank += 10 * count_unbalanced_brackets(current_line) + + return max(0, rank) + + +def standard_deviation(numbers): + """Return standard devation.""" + numbers = list(numbers) + if not numbers: + return 0 + mean = sum(numbers) / len(numbers) + return (sum((n - mean) ** 2 for n in numbers) / + len(numbers)) ** .5 + + +def has_arithmetic_operator(line): + """Return True if line contains any arithmetic operators.""" + for operator in pycodestyle.ARITHMETIC_OP: + if operator in line: + return True + + return False + + +def count_unbalanced_brackets(line): + """Return number of unmatched open/close brackets.""" + count = 0 + for opening, closing in ['()', '[]', '{}']: + count += abs(line.count(opening) - line.count(closing)) + + return count + + +def split_at_offsets(line, offsets): + """Split line at offsets. + + Return list of strings. + + """ + result = [] + + previous_offset = 0 + current_offset = 0 + for current_offset in sorted(offsets): + if current_offset < len(line) and previous_offset != current_offset: + result.append(line[previous_offset:current_offset].strip()) + previous_offset = current_offset + + result.append(line[current_offset:]) + + return result + + +class LineEndingWrapper(object): + + r"""Replace line endings to work with sys.stdout. + + It seems that sys.stdout expects only '\n' as the line ending, no matter + the platform. Otherwise, we get repeated line endings. + + """ + + def __init__(self, output): + self.__output = output + + def write(self, s): + self.__output.write(s.replace('\r\n', '\n').replace('\r', '\n')) + + def flush(self): + self.__output.flush() + + +def match_file(filename, exclude): + """Return True if file is okay for modifying/recursing.""" + base_name = os.path.basename(filename) + + if base_name.startswith('.'): + return False + + for pattern in exclude: + if fnmatch.fnmatch(base_name, pattern): + return False + if fnmatch.fnmatch(filename, pattern): + return False + + if not os.path.isdir(filename) and not is_python_file(filename): + return False + + return True + + +def find_files(filenames, recursive, exclude): + """Yield filenames.""" + while filenames: + name = filenames.pop(0) + if recursive and os.path.isdir(name): + for root, directories, children in os.walk(name): + filenames += [os.path.join(root, f) for f in children + if match_file(os.path.join(root, f), + exclude)] + directories[:] = [d for d in directories + if match_file(os.path.join(root, d), + exclude)] + else: + yield name + + +def _fix_file(parameters): + """Helper function for optionally running fix_file() in parallel.""" + if parameters[1].verbose: + print('[file:{0}]'.format(parameters[0]), file=sys.stderr) + try: + fix_file(*parameters) + except IOError as error: + print(unicode(error), file=sys.stderr) + + +def fix_multiple_files(filenames, options, output=None): + """Fix list of files. + + Optionally fix files recursively. + + """ + filenames = find_files(filenames, options.recursive, options.exclude) + if options.jobs > 1: + import multiprocessing + pool = multiprocessing.Pool(options.jobs) + pool.map(_fix_file, + [(name, options) for name in filenames]) + else: + for name in filenames: + _fix_file((name, options, output)) + + +def is_python_file(filename): + """Return True if filename is Python file.""" + if filename.endswith('.py'): + return True + + try: + with open_with_encoding( + filename, + limit_byte_check=MAX_PYTHON_FILE_DETECTION_BYTES) as f: + text = f.read(MAX_PYTHON_FILE_DETECTION_BYTES) + if not text: + return False + first_line = text.splitlines()[0] + except (IOError, IndexError): + return False + + if not PYTHON_SHEBANG_REGEX.match(first_line): + return False + + return True + + +def is_probably_part_of_multiline(line): + """Return True if line is likely part of a multiline string. + + When multiline strings are involved, pep8 reports the error as being + at the start of the multiline string, which doesn't work for us. + + """ + return ( + '"""' in line or + "'''" in line or + line.rstrip().endswith('\\') + ) + + +def wrap_output(output, encoding): + """Return output with specified encoding.""" + return codecs.getwriter(encoding)(output.buffer + if hasattr(output, 'buffer') + else output) + + +def get_encoding(): + """Return preferred encoding.""" + return locale.getpreferredencoding() or sys.getdefaultencoding() + + +def main(argv=None, apply_config=True): + """Command-line entry.""" + if argv is None: + argv = sys.argv + + try: + # Exit on broken pipe. + signal.signal(signal.SIGPIPE, signal.SIG_DFL) + except AttributeError: # pragma: no cover + # SIGPIPE is not available on Windows. + pass + + try: + args = parse_args(argv[1:], apply_config=apply_config) + + if args.list_fixes: + for code, description in sorted(supported_fixes()): + print('{code} - {description}'.format( + code=code, description=description)) + return 0 + + if args.files == ['-']: + assert not args.in_place + + encoding = sys.stdin.encoding or get_encoding() + + # LineEndingWrapper is unnecessary here due to the symmetry between + # standard in and standard out. + wrap_output(sys.stdout, encoding=encoding).write( + fix_code(sys.stdin.read(), args, encoding=encoding)) + else: + if args.in_place or args.diff: + args.files = list(set(args.files)) + else: + assert len(args.files) == 1 + assert not args.recursive + + fix_multiple_files(args.files, args, sys.stdout) + except KeyboardInterrupt: + return 1 # pragma: no cover + + +class CachedTokenizer(object): + + """A one-element cache around tokenize.generate_tokens(). + + Original code written by Ned Batchelder, in coverage.py. + + """ + + def __init__(self): + self.last_text = None + self.last_tokens = None + + def generate_tokens(self, text): + """A stand-in for tokenize.generate_tokens().""" + if text != self.last_text: + string_io = io.StringIO(text) + self.last_tokens = list( + tokenize.generate_tokens(string_io.readline) + ) + self.last_text = text + return self.last_tokens + + +_cached_tokenizer = CachedTokenizer() +generate_tokens = _cached_tokenizer.generate_tokens + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/.gitignore b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/.gitignore new file mode 100644 index 00000000..1c45ce5b --- /dev/null +++ b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/.gitignore @@ -0,0 +1 @@ +*.pickle diff --git a/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/Grammar.txt b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/Grammar.txt new file mode 100644 index 00000000..1e1f24cf --- /dev/null +++ b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/Grammar.txt @@ -0,0 +1,158 @@ +# Grammar for 2to3. This grammar supports Python 2.x and 3.x. + +# Note: Changing the grammar specified in this file will most likely +# require corresponding changes in the parser module +# (../Modules/parsermodule.c). If you can't make the changes to +# that module yourself, please co-ordinate the required changes +# with someone who can; ask around on python-dev for help. Fred +# Drake will probably be listening there. + +# NOTE WELL: You should also follow all the steps listed in PEP 306, +# "How to Change Python's Grammar" + +# Commands for Kees Blom's railroad program +#diagram:token NAME +#diagram:token NUMBER +#diagram:token STRING +#diagram:token NEWLINE +#diagram:token ENDMARKER +#diagram:token INDENT +#diagram:output\input python.bla +#diagram:token DEDENT +#diagram:output\textwidth 20.04cm\oddsidemargin 0.0cm\evensidemargin 0.0cm +#diagram:rules + +# Start symbols for the grammar: +# file_input is a module or sequence of commands read from an input file; +# single_input is a single interactive statement; +# eval_input is the input for the eval() and input() functions. +# NB: compound_stmt in single_input is followed by extra NEWLINE! +file_input: (NEWLINE | stmt)* ENDMARKER +single_input: NEWLINE | simple_stmt | compound_stmt NEWLINE +eval_input: testlist NEWLINE* ENDMARKER + +decorator: '@' dotted_name [ '(' [arglist] ')' ] NEWLINE +decorators: decorator+ +decorated: decorators (classdef | funcdef) +funcdef: 'def' NAME parameters ['->' test] ':' suite +parameters: '(' [typedargslist] ')' +typedargslist: ((tfpdef ['=' test] ',')* + ('*' [tname] (',' tname ['=' test])* [',' '**' tname] | '**' tname) + | tfpdef ['=' test] (',' tfpdef ['=' test])* [',']) +tname: NAME [':' test] +tfpdef: tname | '(' tfplist ')' +tfplist: tfpdef (',' tfpdef)* [','] +varargslist: ((vfpdef ['=' test] ',')* + ('*' [vname] (',' vname ['=' test])* [',' '**' vname] | '**' vname) + | vfpdef ['=' test] (',' vfpdef ['=' test])* [',']) +vname: NAME +vfpdef: vname | '(' vfplist ')' +vfplist: vfpdef (',' vfpdef)* [','] + +stmt: simple_stmt | compound_stmt +simple_stmt: small_stmt (';' small_stmt)* [';'] NEWLINE +small_stmt: (expr_stmt | print_stmt | del_stmt | pass_stmt | flow_stmt | + import_stmt | global_stmt | exec_stmt | assert_stmt) +expr_stmt: testlist_star_expr (augassign (yield_expr|testlist) | + ('=' (yield_expr|testlist_star_expr))*) +testlist_star_expr: (test|star_expr) (',' (test|star_expr))* [','] +augassign: ('+=' | '-=' | '*=' | '/=' | '%=' | '&=' | '|=' | '^=' | + '<<=' | '>>=' | '**=' | '//=') +# For normal assignments, additional restrictions enforced by the interpreter +print_stmt: 'print' ( [ test (',' test)* [','] ] | + '>>' test [ (',' test)+ [','] ] ) +del_stmt: 'del' exprlist +pass_stmt: 'pass' +flow_stmt: break_stmt | continue_stmt | return_stmt | raise_stmt | yield_stmt +break_stmt: 'break' +continue_stmt: 'continue' +return_stmt: 'return' [testlist] +yield_stmt: yield_expr +raise_stmt: 'raise' [test ['from' test | ',' test [',' test]]] +import_stmt: import_name | import_from +import_name: 'import' dotted_as_names +import_from: ('from' ('.'* dotted_name | '.'+) + 'import' ('*' | '(' import_as_names ')' | import_as_names)) +import_as_name: NAME ['as' NAME] +dotted_as_name: dotted_name ['as' NAME] +import_as_names: import_as_name (',' import_as_name)* [','] +dotted_as_names: dotted_as_name (',' dotted_as_name)* +dotted_name: NAME ('.' NAME)* +global_stmt: ('global' | 'nonlocal') NAME (',' NAME)* +exec_stmt: 'exec' expr ['in' test [',' test]] +assert_stmt: 'assert' test [',' test] + +compound_stmt: if_stmt | while_stmt | for_stmt | try_stmt | with_stmt | funcdef | classdef | decorated +if_stmt: 'if' test ':' suite ('elif' test ':' suite)* ['else' ':' suite] +while_stmt: 'while' test ':' suite ['else' ':' suite] +for_stmt: 'for' exprlist 'in' testlist ':' suite ['else' ':' suite] +try_stmt: ('try' ':' suite + ((except_clause ':' suite)+ + ['else' ':' suite] + ['finally' ':' suite] | + 'finally' ':' suite)) +with_stmt: 'with' with_item (',' with_item)* ':' suite +with_item: test ['as' expr] +with_var: 'as' expr +# NB compile.c makes sure that the default except clause is last +except_clause: 'except' [test [(',' | 'as') test]] +suite: simple_stmt | NEWLINE INDENT stmt+ DEDENT + +# Backward compatibility cruft to support: +# [ x for x in lambda: True, lambda: False if x() ] +# even while also allowing: +# lambda x: 5 if x else 2 +# (But not a mix of the two) +testlist_safe: old_test [(',' old_test)+ [',']] +old_test: or_test | old_lambdef +old_lambdef: 'lambda' [varargslist] ':' old_test + +test: or_test ['if' or_test 'else' test] | lambdef +or_test: and_test ('or' and_test)* +and_test: not_test ('and' not_test)* +not_test: 'not' not_test | comparison +comparison: expr (comp_op expr)* +comp_op: '<'|'>'|'=='|'>='|'<='|'<>'|'!='|'in'|'not' 'in'|'is'|'is' 'not' +star_expr: '*' expr +expr: xor_expr ('|' xor_expr)* +xor_expr: and_expr ('^' and_expr)* +and_expr: shift_expr ('&' shift_expr)* +shift_expr: arith_expr (('<<'|'>>') arith_expr)* +arith_expr: term (('+'|'-') term)* +term: factor (('*'|'/'|'%'|'//') factor)* +factor: ('+'|'-'|'~') factor | power +power: atom trailer* ['**' factor] +atom: ('(' [yield_expr|testlist_gexp] ')' | + '[' [listmaker] ']' | + '{' [dictsetmaker] '}' | + '`' testlist1 '`' | + NAME | NUMBER | STRING+ | '.' '.' '.') +listmaker: (test|star_expr) ( comp_for | (',' (test|star_expr))* [','] ) +testlist_gexp: (test|star_expr) ( comp_for | (',' (test|star_expr))* [','] ) +lambdef: 'lambda' [varargslist] ':' test +trailer: '(' [arglist] ')' | '[' subscriptlist ']' | '.' NAME +subscriptlist: subscript (',' subscript)* [','] +subscript: test | [test] ':' [test] [sliceop] +sliceop: ':' [test] +exprlist: (expr|star_expr) (',' (expr|star_expr))* [','] +testlist: test (',' test)* [','] +dictsetmaker: ( (test ':' test (comp_for | (',' test ':' test)* [','])) | + (test (comp_for | (',' test)* [','])) ) + +classdef: 'class' NAME ['(' [arglist] ')'] ':' suite + +arglist: (argument ',')* (argument [','] + |'*' test (',' argument)* [',' '**' test] + |'**' test) +argument: test [comp_for] | test '=' test # Really [keyword '='] test + +comp_iter: comp_for | comp_if +comp_for: 'for' exprlist 'in' testlist_safe [comp_iter] +comp_if: 'if' old_test [comp_iter] + +testlist1: test (',' test)* + +# not used in grammar, but may appear in "node" passed from Parser to Compiler +encoding_decl: NAME + +yield_expr: 'yield' [testlist] diff --git a/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/PatternGrammar.txt b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/PatternGrammar.txt new file mode 100644 index 00000000..36bf8148 --- /dev/null +++ b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/PatternGrammar.txt @@ -0,0 +1,28 @@ +# Copyright 2006 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +# A grammar to describe tree matching patterns. +# Not shown here: +# - 'TOKEN' stands for any token (leaf node) +# - 'any' stands for any node (leaf or interior) +# With 'any' we can still specify the sub-structure. + +# The start symbol is 'Matcher'. + +Matcher: Alternatives ENDMARKER + +Alternatives: Alternative ('|' Alternative)* + +Alternative: (Unit | NegatedUnit)+ + +Unit: [NAME '='] ( STRING [Repeater] + | NAME [Details] [Repeater] + | '(' Alternatives ')' [Repeater] + | '[' Alternatives ']' + ) + +NegatedUnit: 'not' (STRING | NAME [Details] | '(' Alternatives ')') + +Repeater: '*' | '+' | '{' NUMBER [',' NUMBER] '}' + +Details: '<' Alternatives '>' diff --git a/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/__init__.py b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/__init__.py new file mode 100644 index 00000000..ea30561d --- /dev/null +++ b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/__init__.py @@ -0,0 +1 @@ +#empty diff --git a/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/__main__.py b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/__main__.py new file mode 100644 index 00000000..80688baf --- /dev/null +++ b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/__main__.py @@ -0,0 +1,4 @@ +import sys +from .main import main + +sys.exit(main("lib2to3.fixes")) diff --git a/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/btm_matcher.py b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/btm_matcher.py new file mode 100644 index 00000000..736ba2b9 --- /dev/null +++ b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/btm_matcher.py @@ -0,0 +1,168 @@ +"""A bottom-up tree matching algorithm implementation meant to speed +up 2to3's matching process. After the tree patterns are reduced to +their rarest linear path, a linear Aho-Corasick automaton is +created. The linear automaton traverses the linear paths from the +leaves to the root of the AST and returns a set of nodes for further +matching. This reduces significantly the number of candidate nodes.""" + +__author__ = "George Boutsioukis " + +import logging +import itertools +from collections import defaultdict + +from . import pytree +from .btm_utils import reduce_tree + +class BMNode(object): + """Class for a node of the Aho-Corasick automaton used in matching""" + count = itertools.count() + def __init__(self): + self.transition_table = {} + self.fixers = [] + self.id = next(BMNode.count) + self.content = '' + +class BottomMatcher(object): + """The main matcher class. After instantiating the patterns should + be added using the add_fixer method""" + + def __init__(self): + self.match = set() + self.root = BMNode() + self.nodes = [self.root] + self.fixers = [] + self.logger = logging.getLogger("RefactoringTool") + + def add_fixer(self, fixer): + """Reduces a fixer's pattern tree to a linear path and adds it + to the matcher(a common Aho-Corasick automaton). The fixer is + appended on the matching states and called when they are + reached""" + self.fixers.append(fixer) + tree = reduce_tree(fixer.pattern_tree) + linear = tree.get_linear_subpattern() + match_nodes = self.add(linear, start=self.root) + for match_node in match_nodes: + match_node.fixers.append(fixer) + + def add(self, pattern, start): + "Recursively adds a linear pattern to the AC automaton" + #print("adding pattern", pattern, "to", start) + if not pattern: + #print("empty pattern") + return [start] + if isinstance(pattern[0], tuple): + #alternatives + #print("alternatives") + match_nodes = [] + for alternative in pattern[0]: + #add all alternatives, and add the rest of the pattern + #to each end node + end_nodes = self.add(alternative, start=start) + for end in end_nodes: + match_nodes.extend(self.add(pattern[1:], end)) + return match_nodes + else: + #single token + #not last + if pattern[0] not in start.transition_table: + #transition did not exist, create new + next_node = BMNode() + start.transition_table[pattern[0]] = next_node + else: + #transition exists already, follow + next_node = start.transition_table[pattern[0]] + + if pattern[1:]: + end_nodes = self.add(pattern[1:], start=next_node) + else: + end_nodes = [next_node] + return end_nodes + + def run(self, leaves): + """The main interface with the bottom matcher. The tree is + traversed from the bottom using the constructed + automaton. Nodes are only checked once as the tree is + retraversed. When the automaton fails, we give it one more + shot(in case the above tree matches as a whole with the + rejected leaf), then we break for the next leaf. There is the + special case of multiple arguments(see code comments) where we + recheck the nodes + + Args: + The leaves of the AST tree to be matched + + Returns: + A dictionary of node matches with fixers as the keys + """ + current_ac_node = self.root + results = defaultdict(list) + for leaf in leaves: + current_ast_node = leaf + while current_ast_node: + current_ast_node.was_checked = True + for child in current_ast_node.children: + # multiple statements, recheck + if isinstance(child, pytree.Leaf) and child.value == u";": + current_ast_node.was_checked = False + break + if current_ast_node.type == 1: + #name + node_token = current_ast_node.value + else: + node_token = current_ast_node.type + + if node_token in current_ac_node.transition_table: + #token matches + current_ac_node = current_ac_node.transition_table[node_token] + for fixer in current_ac_node.fixers: + if not fixer in results: + results[fixer] = [] + results[fixer].append(current_ast_node) + + else: + #matching failed, reset automaton + current_ac_node = self.root + if (current_ast_node.parent is not None + and current_ast_node.parent.was_checked): + #the rest of the tree upwards has been checked, next leaf + break + + #recheck the rejected node once from the root + if node_token in current_ac_node.transition_table: + #token matches + current_ac_node = current_ac_node.transition_table[node_token] + for fixer in current_ac_node.fixers: + if not fixer in results.keys(): + results[fixer] = [] + results[fixer].append(current_ast_node) + + current_ast_node = current_ast_node.parent + return results + + def print_ac(self): + "Prints a graphviz diagram of the BM automaton(for debugging)" + print("digraph g{") + def print_node(node): + for subnode_key in node.transition_table.keys(): + subnode = node.transition_table[subnode_key] + print("%d -> %d [label=%s] //%s" % + (node.id, subnode.id, type_repr(subnode_key), str(subnode.fixers))) + if subnode_key == 1: + print(subnode.content) + print_node(subnode) + print_node(self.root) + print("}") + +# taken from pytree.py for debugging; only used by print_ac +_type_reprs = {} +def type_repr(type_num): + global _type_reprs + if not _type_reprs: + from .pygram import python_symbols + # printing tokens is possible but not as useful + # from .pgen2 import token // token.__dict__.items(): + for name, val in python_symbols.__dict__.items(): + if type(val) == int: _type_reprs[val] = name + return _type_reprs.setdefault(type_num, type_num) diff --git a/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/btm_utils.py b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/btm_utils.py new file mode 100644 index 00000000..2276dc9e --- /dev/null +++ b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/btm_utils.py @@ -0,0 +1,283 @@ +"Utility functions used by the btm_matcher module" + +from . import pytree +from .pgen2 import grammar, token +from .pygram import pattern_symbols, python_symbols + +syms = pattern_symbols +pysyms = python_symbols +tokens = grammar.opmap +token_labels = token + +TYPE_ANY = -1 +TYPE_ALTERNATIVES = -2 +TYPE_GROUP = -3 + +class MinNode(object): + """This class serves as an intermediate representation of the + pattern tree during the conversion to sets of leaf-to-root + subpatterns""" + + def __init__(self, type=None, name=None): + self.type = type + self.name = name + self.children = [] + self.leaf = False + self.parent = None + self.alternatives = [] + self.group = [] + + def __repr__(self): + return str(self.type) + ' ' + str(self.name) + + def leaf_to_root(self): + """Internal method. Returns a characteristic path of the + pattern tree. This method must be run for all leaves until the + linear subpatterns are merged into a single""" + node = self + subp = [] + while node: + if node.type == TYPE_ALTERNATIVES: + node.alternatives.append(subp) + if len(node.alternatives) == len(node.children): + #last alternative + subp = [tuple(node.alternatives)] + node.alternatives = [] + node = node.parent + continue + else: + node = node.parent + subp = None + break + + if node.type == TYPE_GROUP: + node.group.append(subp) + #probably should check the number of leaves + if len(node.group) == len(node.children): + subp = get_characteristic_subpattern(node.group) + node.group = [] + node = node.parent + continue + else: + node = node.parent + subp = None + break + + if node.type == token_labels.NAME and node.name: + #in case of type=name, use the name instead + subp.append(node.name) + else: + subp.append(node.type) + + node = node.parent + return subp + + def get_linear_subpattern(self): + """Drives the leaf_to_root method. The reason that + leaf_to_root must be run multiple times is because we need to + reject 'group' matches; for example the alternative form + (a | b c) creates a group [b c] that needs to be matched. Since + matching multiple linear patterns overcomes the automaton's + capabilities, leaf_to_root merges each group into a single + choice based on 'characteristic'ity, + + i.e. (a|b c) -> (a|b) if b more characteristic than c + + Returns: The most 'characteristic'(as defined by + get_characteristic_subpattern) path for the compiled pattern + tree. + """ + + for l in self.leaves(): + subp = l.leaf_to_root() + if subp: + return subp + + def leaves(self): + "Generator that returns the leaves of the tree" + for child in self.children: + for x in child.leaves(): + yield x + if not self.children: + yield self + +def reduce_tree(node, parent=None): + """ + Internal function. Reduces a compiled pattern tree to an + intermediate representation suitable for feeding the + automaton. This also trims off any optional pattern elements(like + [a], a*). + """ + + new_node = None + #switch on the node type + if node.type == syms.Matcher: + #skip + node = node.children[0] + + if node.type == syms.Alternatives : + #2 cases + if len(node.children) <= 2: + #just a single 'Alternative', skip this node + new_node = reduce_tree(node.children[0], parent) + else: + #real alternatives + new_node = MinNode(type=TYPE_ALTERNATIVES) + #skip odd children('|' tokens) + for child in node.children: + if node.children.index(child)%2: + continue + reduced = reduce_tree(child, new_node) + if reduced is not None: + new_node.children.append(reduced) + elif node.type == syms.Alternative: + if len(node.children) > 1: + + new_node = MinNode(type=TYPE_GROUP) + for child in node.children: + reduced = reduce_tree(child, new_node) + if reduced: + new_node.children.append(reduced) + if not new_node.children: + # delete the group if all of the children were reduced to None + new_node = None + + else: + new_node = reduce_tree(node.children[0], parent) + + elif node.type == syms.Unit: + if (isinstance(node.children[0], pytree.Leaf) and + node.children[0].value == '('): + #skip parentheses + return reduce_tree(node.children[1], parent) + if ((isinstance(node.children[0], pytree.Leaf) and + node.children[0].value == '[') + or + (len(node.children)>1 and + hasattr(node.children[1], "value") and + node.children[1].value == '[')): + #skip whole unit if its optional + return None + + leaf = True + details_node = None + alternatives_node = None + has_repeater = False + repeater_node = None + has_variable_name = False + + for child in node.children: + if child.type == syms.Details: + leaf = False + details_node = child + elif child.type == syms.Repeater: + has_repeater = True + repeater_node = child + elif child.type == syms.Alternatives: + alternatives_node = child + if hasattr(child, 'value') and child.value == '=': # variable name + has_variable_name = True + + #skip variable name + if has_variable_name: + #skip variable name, '=' + name_leaf = node.children[2] + if hasattr(name_leaf, 'value') and name_leaf.value == '(': + # skip parenthesis + name_leaf = node.children[3] + else: + name_leaf = node.children[0] + + #set node type + if name_leaf.type == token_labels.NAME: + #(python) non-name or wildcard + if name_leaf.value == 'any': + new_node = MinNode(type=TYPE_ANY) + else: + if hasattr(token_labels, name_leaf.value): + new_node = MinNode(type=getattr(token_labels, name_leaf.value)) + else: + new_node = MinNode(type=getattr(pysyms, name_leaf.value)) + + elif name_leaf.type == token_labels.STRING: + #(python) name or character; remove the apostrophes from + #the string value + name = name_leaf.value.strip("'") + if name in tokens: + new_node = MinNode(type=tokens[name]) + else: + new_node = MinNode(type=token_labels.NAME, name=name) + elif name_leaf.type == syms.Alternatives: + new_node = reduce_tree(alternatives_node, parent) + + #handle repeaters + if has_repeater: + if repeater_node.children[0].value == '*': + #reduce to None + new_node = None + elif repeater_node.children[0].value == '+': + #reduce to a single occurence i.e. do nothing + pass + else: + #TODO: handle {min, max} repeaters + raise NotImplementedError + pass + + #add children + if details_node and new_node is not None: + for child in details_node.children[1:-1]: + #skip '<', '>' markers + reduced = reduce_tree(child, new_node) + if reduced is not None: + new_node.children.append(reduced) + if new_node: + new_node.parent = parent + return new_node + + +def get_characteristic_subpattern(subpatterns): + """Picks the most characteristic from a list of linear patterns + Current order used is: + names > common_names > common_chars + """ + if not isinstance(subpatterns, list): + return subpatterns + if len(subpatterns)==1: + return subpatterns[0] + + # first pick out the ones containing variable names + subpatterns_with_names = [] + subpatterns_with_common_names = [] + common_names = ['in', 'for', 'if' , 'not', 'None'] + subpatterns_with_common_chars = [] + common_chars = "[]().,:" + for subpattern in subpatterns: + if any(rec_test(subpattern, lambda x: type(x) is str)): + if any(rec_test(subpattern, + lambda x: isinstance(x, str) and x in common_chars)): + subpatterns_with_common_chars.append(subpattern) + elif any(rec_test(subpattern, + lambda x: isinstance(x, str) and x in common_names)): + subpatterns_with_common_names.append(subpattern) + + else: + subpatterns_with_names.append(subpattern) + + if subpatterns_with_names: + subpatterns = subpatterns_with_names + elif subpatterns_with_common_names: + subpatterns = subpatterns_with_common_names + elif subpatterns_with_common_chars: + subpatterns = subpatterns_with_common_chars + # of the remaining subpatterns pick out the longest one + return max(subpatterns, key=len) + +def rec_test(sequence, test_func): + """Tests test_func on all items of sequence and items of included + sub-iterables""" + for x in sequence: + if isinstance(x, (list, tuple)): + for y in rec_test(x, test_func): + yield y + else: + yield test_func(x) diff --git a/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixer_base.py b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixer_base.py new file mode 100644 index 00000000..f6421ba3 --- /dev/null +++ b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixer_base.py @@ -0,0 +1,189 @@ +# Copyright 2006 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Base class for fixers (optional, but recommended).""" + +# Python imports +import logging +import itertools + +# Local imports +from .patcomp import PatternCompiler +from . import pygram +from .fixer_util import does_tree_import + +class BaseFix(object): + + """Optional base class for fixers. + + The subclass name must be FixFooBar where FooBar is the result of + removing underscores and capitalizing the words of the fix name. + For example, the class name for a fixer named 'has_key' should be + FixHasKey. + """ + + PATTERN = None # Most subclasses should override with a string literal + pattern = None # Compiled pattern, set by compile_pattern() + pattern_tree = None # Tree representation of the pattern + options = None # Options object passed to initializer + filename = None # The filename (set by set_filename) + logger = None # A logger (set by set_filename) + numbers = itertools.count(1) # For new_name() + used_names = set() # A set of all used NAMEs + order = "post" # Does the fixer prefer pre- or post-order traversal + explicit = False # Is this ignored by refactor.py -f all? + run_order = 5 # Fixers will be sorted by run order before execution + # Lower numbers will be run first. + _accept_type = None # [Advanced and not public] This tells RefactoringTool + # which node type to accept when there's not a pattern. + + keep_line_order = False # For the bottom matcher: match with the + # original line order + BM_compatible = False # Compatibility with the bottom matching + # module; every fixer should set this + # manually + + # Shortcut for access to Python grammar symbols + syms = pygram.python_symbols + + def __init__(self, options, log): + """Initializer. Subclass may override. + + Args: + options: an dict containing the options passed to RefactoringTool + that could be used to customize the fixer through the command line. + log: a list to append warnings and other messages to. + """ + self.options = options + self.log = log + self.compile_pattern() + + def compile_pattern(self): + """Compiles self.PATTERN into self.pattern. + + Subclass may override if it doesn't want to use + self.{pattern,PATTERN} in .match(). + """ + if self.PATTERN is not None: + PC = PatternCompiler() + self.pattern, self.pattern_tree = PC.compile_pattern(self.PATTERN, + with_tree=True) + + def set_filename(self, filename): + """Set the filename, and a logger derived from it. + + The main refactoring tool should call this. + """ + self.filename = filename + self.logger = logging.getLogger(filename) + + def match(self, node): + """Returns match for a given parse tree node. + + Should return a true or false object (not necessarily a bool). + It may return a non-empty dict of matching sub-nodes as + returned by a matching pattern. + + Subclass may override. + """ + results = {"node": node} + return self.pattern.match(node, results) and results + + def transform(self, node, results): + """Returns the transformation for a given parse tree node. + + Args: + node: the root of the parse tree that matched the fixer. + results: a dict mapping symbolic names to part of the match. + + Returns: + None, or a node that is a modified copy of the + argument node. The node argument may also be modified in-place to + effect the same change. + + Subclass *must* override. + """ + raise NotImplementedError() + + def new_name(self, template=u"xxx_todo_changeme"): + """Return a string suitable for use as an identifier + + The new name is guaranteed not to conflict with other identifiers. + """ + name = template + while name in self.used_names: + name = template + unicode(self.numbers.next()) + self.used_names.add(name) + return name + + def log_message(self, message): + if self.first_log: + self.first_log = False + self.log.append("### In file %s ###" % self.filename) + self.log.append(message) + + def cannot_convert(self, node, reason=None): + """Warn the user that a given chunk of code is not valid Python 3, + but that it cannot be converted automatically. + + First argument is the top-level node for the code in question. + Optional second argument is why it can't be converted. + """ + lineno = node.get_lineno() + for_output = node.clone() + for_output.prefix = u"" + msg = "Line %d: could not convert: %s" + self.log_message(msg % (lineno, for_output)) + if reason: + self.log_message(reason) + + def warning(self, node, reason): + """Used for warning the user about possible uncertainty in the + translation. + + First argument is the top-level node for the code in question. + Optional second argument is why it can't be converted. + """ + lineno = node.get_lineno() + self.log_message("Line %d: %s" % (lineno, reason)) + + def start_tree(self, tree, filename): + """Some fixers need to maintain tree-wide state. + This method is called once, at the start of tree fix-up. + + tree - the root node of the tree to be processed. + filename - the name of the file the tree came from. + """ + self.used_names = tree.used_names + self.set_filename(filename) + self.numbers = itertools.count(1) + self.first_log = True + + def finish_tree(self, tree, filename): + """Some fixers need to maintain tree-wide state. + This method is called once, at the conclusion of tree fix-up. + + tree - the root node of the tree to be processed. + filename - the name of the file the tree came from. + """ + pass + + +class ConditionalFix(BaseFix): + """ Base class for fixers which not execute if an import is found. """ + + # This is the name of the import which, if found, will cause the test to be skipped + skip_on = None + + def start_tree(self, *args): + super(ConditionalFix, self).start_tree(*args) + self._should_skip = None + + def should_skip(self, node): + if self._should_skip is not None: + return self._should_skip + pkg = self.skip_on.split(".") + name = pkg[-1] + pkg = ".".join(pkg[:-1]) + self._should_skip = does_tree_import(pkg, name, node) + return self._should_skip diff --git a/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixer_util.py b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixer_util.py new file mode 100644 index 00000000..78fdf26d --- /dev/null +++ b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixer_util.py @@ -0,0 +1,432 @@ +"""Utility functions, node construction macros, etc.""" +# Author: Collin Winter + +from itertools import islice + +# Local imports +from .pgen2 import token +from .pytree import Leaf, Node +from .pygram import python_symbols as syms +from . import patcomp + + +########################################################### +### Common node-construction "macros" +########################################################### + +def KeywordArg(keyword, value): + return Node(syms.argument, + [keyword, Leaf(token.EQUAL, u"="), value]) + +def LParen(): + return Leaf(token.LPAR, u"(") + +def RParen(): + return Leaf(token.RPAR, u")") + +def Assign(target, source): + """Build an assignment statement""" + if not isinstance(target, list): + target = [target] + if not isinstance(source, list): + source.prefix = u" " + source = [source] + + return Node(syms.atom, + target + [Leaf(token.EQUAL, u"=", prefix=u" ")] + source) + +def Name(name, prefix=None): + """Return a NAME leaf""" + return Leaf(token.NAME, name, prefix=prefix) + +def Attr(obj, attr): + """A node tuple for obj.attr""" + return [obj, Node(syms.trailer, [Dot(), attr])] + +def Comma(): + """A comma leaf""" + return Leaf(token.COMMA, u",") + +def Dot(): + """A period (.) leaf""" + return Leaf(token.DOT, u".") + +def ArgList(args, lparen=LParen(), rparen=RParen()): + """A parenthesised argument list, used by Call()""" + node = Node(syms.trailer, [lparen.clone(), rparen.clone()]) + if args: + node.insert_child(1, Node(syms.arglist, args)) + return node + +def Call(func_name, args=None, prefix=None): + """A function call""" + node = Node(syms.power, [func_name, ArgList(args)]) + if prefix is not None: + node.prefix = prefix + return node + +def Newline(): + """A newline literal""" + return Leaf(token.NEWLINE, u"\n") + +def BlankLine(): + """A blank line""" + return Leaf(token.NEWLINE, u"") + +def Number(n, prefix=None): + return Leaf(token.NUMBER, n, prefix=prefix) + +def Subscript(index_node): + """A numeric or string subscript""" + return Node(syms.trailer, [Leaf(token.LBRACE, u"["), + index_node, + Leaf(token.RBRACE, u"]")]) + +def String(string, prefix=None): + """A string leaf""" + return Leaf(token.STRING, string, prefix=prefix) + +def ListComp(xp, fp, it, test=None): + """A list comprehension of the form [xp for fp in it if test]. + + If test is None, the "if test" part is omitted. + """ + xp.prefix = u"" + fp.prefix = u" " + it.prefix = u" " + for_leaf = Leaf(token.NAME, u"for") + for_leaf.prefix = u" " + in_leaf = Leaf(token.NAME, u"in") + in_leaf.prefix = u" " + inner_args = [for_leaf, fp, in_leaf, it] + if test: + test.prefix = u" " + if_leaf = Leaf(token.NAME, u"if") + if_leaf.prefix = u" " + inner_args.append(Node(syms.comp_if, [if_leaf, test])) + inner = Node(syms.listmaker, [xp, Node(syms.comp_for, inner_args)]) + return Node(syms.atom, + [Leaf(token.LBRACE, u"["), + inner, + Leaf(token.RBRACE, u"]")]) + +def FromImport(package_name, name_leafs): + """ Return an import statement in the form: + from package import name_leafs""" + # XXX: May not handle dotted imports properly (eg, package_name='foo.bar') + #assert package_name == '.' or '.' not in package_name, "FromImport has "\ + # "not been tested with dotted package names -- use at your own "\ + # "peril!" + + for leaf in name_leafs: + # Pull the leaves out of their old tree + leaf.remove() + + children = [Leaf(token.NAME, u"from"), + Leaf(token.NAME, package_name, prefix=u" "), + Leaf(token.NAME, u"import", prefix=u" "), + Node(syms.import_as_names, name_leafs)] + imp = Node(syms.import_from, children) + return imp + + +########################################################### +### Determine whether a node represents a given literal +########################################################### + +def is_tuple(node): + """Does the node represent a tuple literal?""" + if isinstance(node, Node) and node.children == [LParen(), RParen()]: + return True + return (isinstance(node, Node) + and len(node.children) == 3 + and isinstance(node.children[0], Leaf) + and isinstance(node.children[1], Node) + and isinstance(node.children[2], Leaf) + and node.children[0].value == u"(" + and node.children[2].value == u")") + +def is_list(node): + """Does the node represent a list literal?""" + return (isinstance(node, Node) + and len(node.children) > 1 + and isinstance(node.children[0], Leaf) + and isinstance(node.children[-1], Leaf) + and node.children[0].value == u"[" + and node.children[-1].value == u"]") + + +########################################################### +### Misc +########################################################### + +def parenthesize(node): + return Node(syms.atom, [LParen(), node, RParen()]) + + +consuming_calls = set(["sorted", "list", "set", "any", "all", "tuple", "sum", + "min", "max", "enumerate"]) + +def attr_chain(obj, attr): + """Follow an attribute chain. + + If you have a chain of objects where a.foo -> b, b.foo-> c, etc, + use this to iterate over all objects in the chain. Iteration is + terminated by getattr(x, attr) is None. + + Args: + obj: the starting object + attr: the name of the chaining attribute + + Yields: + Each successive object in the chain. + """ + next = getattr(obj, attr) + while next: + yield next + next = getattr(next, attr) + +p0 = """for_stmt< 'for' any 'in' node=any ':' any* > + | comp_for< 'for' any 'in' node=any any* > + """ +p1 = """ +power< + ( 'iter' | 'list' | 'tuple' | 'sorted' | 'set' | 'sum' | + 'any' | 'all' | 'enumerate' | (any* trailer< '.' 'join' >) ) + trailer< '(' node=any ')' > + any* +> +""" +p2 = """ +power< + ( 'sorted' | 'enumerate' ) + trailer< '(' arglist ')' > + any* +> +""" +pats_built = False +def in_special_context(node): + """ Returns true if node is in an environment where all that is required + of it is being iterable (ie, it doesn't matter if it returns a list + or an iterator). + See test_map_nochange in test_fixers.py for some examples and tests. + """ + global p0, p1, p2, pats_built + if not pats_built: + p0 = patcomp.compile_pattern(p0) + p1 = patcomp.compile_pattern(p1) + p2 = patcomp.compile_pattern(p2) + pats_built = True + patterns = [p0, p1, p2] + for pattern, parent in zip(patterns, attr_chain(node, "parent")): + results = {} + if pattern.match(parent, results) and results["node"] is node: + return True + return False + +def is_probably_builtin(node): + """ + Check that something isn't an attribute or function name etc. + """ + prev = node.prev_sibling + if prev is not None and prev.type == token.DOT: + # Attribute lookup. + return False + parent = node.parent + if parent.type in (syms.funcdef, syms.classdef): + return False + if parent.type == syms.expr_stmt and parent.children[0] is node: + # Assignment. + return False + if parent.type == syms.parameters or \ + (parent.type == syms.typedargslist and ( + (prev is not None and prev.type == token.COMMA) or + parent.children[0] is node + )): + # The name of an argument. + return False + return True + +def find_indentation(node): + """Find the indentation of *node*.""" + while node is not None: + if node.type == syms.suite and len(node.children) > 2: + indent = node.children[1] + if indent.type == token.INDENT: + return indent.value + node = node.parent + return u"" + +########################################################### +### The following functions are to find bindings in a suite +########################################################### + +def make_suite(node): + if node.type == syms.suite: + return node + node = node.clone() + parent, node.parent = node.parent, None + suite = Node(syms.suite, [node]) + suite.parent = parent + return suite + +def find_root(node): + """Find the top level namespace.""" + # Scamper up to the top level namespace + while node.type != syms.file_input: + node = node.parent + if not node: + raise ValueError("root found before file_input node was found.") + return node + +def does_tree_import(package, name, node): + """ Returns true if name is imported from package at the + top level of the tree which node belongs to. + To cover the case of an import like 'import foo', use + None for the package and 'foo' for the name. """ + binding = find_binding(name, find_root(node), package) + return bool(binding) + +def is_import(node): + """Returns true if the node is an import statement.""" + return node.type in (syms.import_name, syms.import_from) + +def touch_import(package, name, node): + """ Works like `does_tree_import` but adds an import statement + if it was not imported. """ + def is_import_stmt(node): + return (node.type == syms.simple_stmt and node.children and + is_import(node.children[0])) + + root = find_root(node) + + if does_tree_import(package, name, root): + return + + # figure out where to insert the new import. First try to find + # the first import and then skip to the last one. + insert_pos = offset = 0 + for idx, node in enumerate(root.children): + if not is_import_stmt(node): + continue + for offset, node2 in enumerate(root.children[idx:]): + if not is_import_stmt(node2): + break + insert_pos = idx + offset + break + + # if there are no imports where we can insert, find the docstring. + # if that also fails, we stick to the beginning of the file + if insert_pos == 0: + for idx, node in enumerate(root.children): + if (node.type == syms.simple_stmt and node.children and + node.children[0].type == token.STRING): + insert_pos = idx + 1 + break + + if package is None: + import_ = Node(syms.import_name, [ + Leaf(token.NAME, u"import"), + Leaf(token.NAME, name, prefix=u" ") + ]) + else: + import_ = FromImport(package, [Leaf(token.NAME, name, prefix=u" ")]) + + children = [import_, Newline()] + root.insert_child(insert_pos, Node(syms.simple_stmt, children)) + + +_def_syms = set([syms.classdef, syms.funcdef]) +def find_binding(name, node, package=None): + """ Returns the node which binds variable name, otherwise None. + If optional argument package is supplied, only imports will + be returned. + See test cases for examples.""" + for child in node.children: + ret = None + if child.type == syms.for_stmt: + if _find(name, child.children[1]): + return child + n = find_binding(name, make_suite(child.children[-1]), package) + if n: ret = n + elif child.type in (syms.if_stmt, syms.while_stmt): + n = find_binding(name, make_suite(child.children[-1]), package) + if n: ret = n + elif child.type == syms.try_stmt: + n = find_binding(name, make_suite(child.children[2]), package) + if n: + ret = n + else: + for i, kid in enumerate(child.children[3:]): + if kid.type == token.COLON and kid.value == ":": + # i+3 is the colon, i+4 is the suite + n = find_binding(name, make_suite(child.children[i+4]), package) + if n: ret = n + elif child.type in _def_syms and child.children[1].value == name: + ret = child + elif _is_import_binding(child, name, package): + ret = child + elif child.type == syms.simple_stmt: + ret = find_binding(name, child, package) + elif child.type == syms.expr_stmt: + if _find(name, child.children[0]): + ret = child + + if ret: + if not package: + return ret + if is_import(ret): + return ret + return None + +_block_syms = set([syms.funcdef, syms.classdef, syms.trailer]) +def _find(name, node): + nodes = [node] + while nodes: + node = nodes.pop() + if node.type > 256 and node.type not in _block_syms: + nodes.extend(node.children) + elif node.type == token.NAME and node.value == name: + return node + return None + +def _is_import_binding(node, name, package=None): + """ Will reuturn node if node will import name, or node + will import * from package. None is returned otherwise. + See test cases for examples. """ + + if node.type == syms.import_name and not package: + imp = node.children[1] + if imp.type == syms.dotted_as_names: + for child in imp.children: + if child.type == syms.dotted_as_name: + if child.children[2].value == name: + return node + elif child.type == token.NAME and child.value == name: + return node + elif imp.type == syms.dotted_as_name: + last = imp.children[-1] + if last.type == token.NAME and last.value == name: + return node + elif imp.type == token.NAME and imp.value == name: + return node + elif node.type == syms.import_from: + # unicode(...) is used to make life easier here, because + # from a.b import parses to ['import', ['a', '.', 'b'], ...] + if package and unicode(node.children[1]).strip() != package: + return None + n = node.children[3] + if package and _find(u"as", n): + # See test_from_import_as for explanation + return None + elif n.type == syms.import_as_names and _find(name, n): + return node + elif n.type == syms.import_as_name: + child = n.children[2] + if child.type == token.NAME and child.value == name: + return node + elif n.type == token.NAME and n.value == name: + return node + elif package and n.type == token.STAR: + return node + return None diff --git a/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/__init__.py b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/__init__.py new file mode 100644 index 00000000..b93054b3 --- /dev/null +++ b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/__init__.py @@ -0,0 +1 @@ +# Dummy file to make this directory a package. diff --git a/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_apply.py b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_apply.py new file mode 100644 index 00000000..a7dc3a04 --- /dev/null +++ b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_apply.py @@ -0,0 +1,59 @@ +# Copyright 2006 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Fixer for apply(). + +This converts apply(func, v, k) into (func)(*v, **k).""" + +# Local imports +from .. import pytree +from ..pgen2 import token +from .. import fixer_base +from ..fixer_util import Call, Comma, parenthesize + +class FixApply(fixer_base.BaseFix): + BM_compatible = True + + PATTERN = """ + power< 'apply' + trailer< + '(' + arglist< + (not argument + ')' + > + > + """ + + def transform(self, node, results): + syms = self.syms + assert results + func = results["func"] + args = results["args"] + kwds = results.get("kwds") + prefix = node.prefix + func = func.clone() + if (func.type not in (token.NAME, syms.atom) and + (func.type != syms.power or + func.children[-2].type == token.DOUBLESTAR)): + # Need to parenthesize + func = parenthesize(func) + func.prefix = "" + args = args.clone() + args.prefix = "" + if kwds is not None: + kwds = kwds.clone() + kwds.prefix = "" + l_newargs = [pytree.Leaf(token.STAR, u"*"), args] + if kwds is not None: + l_newargs.extend([Comma(), + pytree.Leaf(token.DOUBLESTAR, u"**"), + kwds]) + l_newargs[-2].prefix = u" " # that's the ** token + # XXX Sometimes we could be cleverer, e.g. apply(f, (x, y) + t) + # can be translated into f(x, y, *t) instead of f(*(x, y) + t) + #new = pytree.Node(syms.power, (func, ArgList(l_newargs))) + return Call(func, l_newargs, prefix=prefix) diff --git a/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_basestring.py b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_basestring.py new file mode 100644 index 00000000..a3c9a436 --- /dev/null +++ b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_basestring.py @@ -0,0 +1,14 @@ +"""Fixer for basestring -> str.""" +# Author: Christian Heimes + +# Local imports +from .. import fixer_base +from ..fixer_util import Name + +class FixBasestring(fixer_base.BaseFix): + BM_compatible = True + + PATTERN = "'basestring'" + + def transform(self, node, results): + return Name(u"str", prefix=node.prefix) diff --git a/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_buffer.py b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_buffer.py new file mode 100644 index 00000000..c6b09280 --- /dev/null +++ b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_buffer.py @@ -0,0 +1,22 @@ +# Copyright 2007 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Fixer that changes buffer(...) into memoryview(...).""" + +# Local imports +from .. import fixer_base +from ..fixer_util import Name + + +class FixBuffer(fixer_base.BaseFix): + BM_compatible = True + + explicit = True # The user must ask for this fixer + + PATTERN = """ + power< name='buffer' trailer< '(' [any] ')' > any* > + """ + + def transform(self, node, results): + name = results["name"] + name.replace(Name(u"memoryview", prefix=name.prefix)) diff --git a/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_callable.py b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_callable.py new file mode 100644 index 00000000..df33d614 --- /dev/null +++ b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_callable.py @@ -0,0 +1,37 @@ +# Copyright 2007 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Fixer for callable(). + +This converts callable(obj) into isinstance(obj, collections.Callable), adding a +collections import if needed.""" + +# Local imports +from lib2to3 import fixer_base +from lib2to3.fixer_util import Call, Name, String, Attr, touch_import + +class FixCallable(fixer_base.BaseFix): + BM_compatible = True + + order = "pre" + + # Ignore callable(*args) or use of keywords. + # Either could be a hint that the builtin callable() is not being used. + PATTERN = """ + power< 'callable' + trailer< lpar='(' + ( not(arglist | argument) any ','> ) + rpar=')' > + after=any* + > + """ + + def transform(self, node, results): + func = results['func'] + + touch_import(None, u'collections', node=node) + + args = [func.clone(), String(u', ')] + args.extend(Attr(Name(u'collections'), Name(u'Callable'))) + return Call(Name(u'isinstance'), args, prefix=node.prefix) diff --git a/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_dict.py b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_dict.py new file mode 100644 index 00000000..f681e4d7 --- /dev/null +++ b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_dict.py @@ -0,0 +1,107 @@ +# Copyright 2007 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Fixer for dict methods. + +d.keys() -> list(d.keys()) +d.items() -> list(d.items()) +d.values() -> list(d.values()) + +d.iterkeys() -> iter(d.keys()) +d.iteritems() -> iter(d.items()) +d.itervalues() -> iter(d.values()) + +d.viewkeys() -> d.keys() +d.viewitems() -> d.items() +d.viewvalues() -> d.values() + +Except in certain very specific contexts: the iter() can be dropped +when the context is list(), sorted(), iter() or for...in; the list() +can be dropped when the context is list() or sorted() (but not iter() +or for...in!). Special contexts that apply to both: list(), sorted(), tuple() +set(), any(), all(), sum(). + +Note: iter(d.keys()) could be written as iter(d) but since the +original d.iterkeys() was also redundant we don't fix this. And there +are (rare) contexts where it makes a difference (e.g. when passing it +as an argument to a function that introspects the argument). +""" + +# Local imports +from .. import pytree +from .. import patcomp +from ..pgen2 import token +from .. import fixer_base +from ..fixer_util import Name, Call, LParen, RParen, ArgList, Dot +from .. import fixer_util + + +iter_exempt = fixer_util.consuming_calls | set(["iter"]) + + +class FixDict(fixer_base.BaseFix): + BM_compatible = True + + PATTERN = """ + power< head=any+ + trailer< '.' method=('keys'|'items'|'values'| + 'iterkeys'|'iteritems'|'itervalues'| + 'viewkeys'|'viewitems'|'viewvalues') > + parens=trailer< '(' ')' > + tail=any* + > + """ + + def transform(self, node, results): + head = results["head"] + method = results["method"][0] # Extract node for method name + tail = results["tail"] + syms = self.syms + method_name = method.value + isiter = method_name.startswith(u"iter") + isview = method_name.startswith(u"view") + if isiter or isview: + method_name = method_name[4:] + assert method_name in (u"keys", u"items", u"values"), repr(method) + head = [n.clone() for n in head] + tail = [n.clone() for n in tail] + special = not tail and self.in_special_context(node, isiter) + args = head + [pytree.Node(syms.trailer, + [Dot(), + Name(method_name, + prefix=method.prefix)]), + results["parens"].clone()] + new = pytree.Node(syms.power, args) + if not (special or isview): + new.prefix = u"" + new = Call(Name(u"iter" if isiter else u"list"), [new]) + if tail: + new = pytree.Node(syms.power, [new] + tail) + new.prefix = node.prefix + return new + + P1 = "power< func=NAME trailer< '(' node=any ')' > any* >" + p1 = patcomp.compile_pattern(P1) + + P2 = """for_stmt< 'for' any 'in' node=any ':' any* > + | comp_for< 'for' any 'in' node=any any* > + """ + p2 = patcomp.compile_pattern(P2) + + def in_special_context(self, node, isiter): + if node.parent is None: + return False + results = {} + if (node.parent.parent is not None and + self.p1.match(node.parent.parent, results) and + results["node"] is node): + if isiter: + # iter(d.iterkeys()) -> iter(d.keys()), etc. + return results["func"].value in iter_exempt + else: + # list(d.keys()) -> list(d.keys()), etc. + return results["func"].value in fixer_util.consuming_calls + if not isiter: + return False + # for ... in d.iterkeys() -> for ... in d.keys(), etc. + return self.p2.match(node.parent, results) and results["node"] is node diff --git a/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_except.py b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_except.py new file mode 100644 index 00000000..e324718f --- /dev/null +++ b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_except.py @@ -0,0 +1,93 @@ +"""Fixer for except statements with named exceptions. + +The following cases will be converted: + +- "except E, T:" where T is a name: + + except E as T: + +- "except E, T:" where T is not a name, tuple or list: + + except E as t: + T = t + + This is done because the target of an "except" clause must be a + name. + +- "except E, T:" where T is a tuple or list literal: + + except E as t: + T = t.args +""" +# Author: Collin Winter + +# Local imports +from .. import pytree +from ..pgen2 import token +from .. import fixer_base +from ..fixer_util import Assign, Attr, Name, is_tuple, is_list, syms + +def find_excepts(nodes): + for i, n in enumerate(nodes): + if n.type == syms.except_clause: + if n.children[0].value == u'except': + yield (n, nodes[i+2]) + +class FixExcept(fixer_base.BaseFix): + BM_compatible = True + + PATTERN = """ + try_stmt< 'try' ':' (simple_stmt | suite) + cleanup=(except_clause ':' (simple_stmt | suite))+ + tail=(['except' ':' (simple_stmt | suite)] + ['else' ':' (simple_stmt | suite)] + ['finally' ':' (simple_stmt | suite)]) > + """ + + def transform(self, node, results): + syms = self.syms + + tail = [n.clone() for n in results["tail"]] + + try_cleanup = [ch.clone() for ch in results["cleanup"]] + for except_clause, e_suite in find_excepts(try_cleanup): + if len(except_clause.children) == 4: + (E, comma, N) = except_clause.children[1:4] + comma.replace(Name(u"as", prefix=u" ")) + + if N.type != token.NAME: + # Generate a new N for the except clause + new_N = Name(self.new_name(), prefix=u" ") + target = N.clone() + target.prefix = u"" + N.replace(new_N) + new_N = new_N.clone() + + # Insert "old_N = new_N" as the first statement in + # the except body. This loop skips leading whitespace + # and indents + #TODO(cwinter) suite-cleanup + suite_stmts = e_suite.children + for i, stmt in enumerate(suite_stmts): + if isinstance(stmt, pytree.Node): + break + + # The assignment is different if old_N is a tuple or list + # In that case, the assignment is old_N = new_N.args + if is_tuple(N) or is_list(N): + assign = Assign(target, Attr(new_N, Name(u'args'))) + else: + assign = Assign(target, new_N) + + #TODO(cwinter) stopgap until children becomes a smart list + for child in reversed(suite_stmts[:i]): + e_suite.insert_child(0, child) + e_suite.insert_child(i, assign) + elif N.prefix == u"": + # No space after a comma is legal; no space after "as", + # not so much. + N.prefix = u" " + + #TODO(cwinter) fix this when children becomes a smart list + children = [c.clone() for c in node.children[:3]] + try_cleanup + tail + return pytree.Node(node.type, children) diff --git a/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_exec.py b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_exec.py new file mode 100644 index 00000000..50e18544 --- /dev/null +++ b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_exec.py @@ -0,0 +1,40 @@ +# Copyright 2006 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Fixer for exec. + +This converts usages of the exec statement into calls to a built-in +exec() function. + +exec code in ns1, ns2 -> exec(code, ns1, ns2) +""" + +# Local imports +from .. import pytree +from .. import fixer_base +from ..fixer_util import Comma, Name, Call + + +class FixExec(fixer_base.BaseFix): + BM_compatible = True + + PATTERN = """ + exec_stmt< 'exec' a=any 'in' b=any [',' c=any] > + | + exec_stmt< 'exec' (not atom<'(' [any] ')'>) a=any > + """ + + def transform(self, node, results): + assert results + syms = self.syms + a = results["a"] + b = results.get("b") + c = results.get("c") + args = [a.clone()] + args[0].prefix = "" + if b is not None: + args.extend([Comma(), b.clone()]) + if c is not None: + args.extend([Comma(), c.clone()]) + + return Call(Name(u"exec"), args, prefix=node.prefix) diff --git a/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_execfile.py b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_execfile.py new file mode 100644 index 00000000..2f29d3b2 --- /dev/null +++ b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_execfile.py @@ -0,0 +1,52 @@ +# Copyright 2006 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Fixer for execfile. + +This converts usages of the execfile function into calls to the built-in +exec() function. +""" + +from .. import fixer_base +from ..fixer_util import (Comma, Name, Call, LParen, RParen, Dot, Node, + ArgList, String, syms) + + +class FixExecfile(fixer_base.BaseFix): + BM_compatible = True + + PATTERN = """ + power< 'execfile' trailer< '(' arglist< filename=any [',' globals=any [',' locals=any ] ] > ')' > > + | + power< 'execfile' trailer< '(' filename=any ')' > > + """ + + def transform(self, node, results): + assert results + filename = results["filename"] + globals = results.get("globals") + locals = results.get("locals") + + # Copy over the prefix from the right parentheses end of the execfile + # call. + execfile_paren = node.children[-1].children[-1].clone() + # Construct open().read(). + open_args = ArgList([filename.clone()], rparen=execfile_paren) + open_call = Node(syms.power, [Name(u"open"), open_args]) + read = [Node(syms.trailer, [Dot(), Name(u'read')]), + Node(syms.trailer, [LParen(), RParen()])] + open_expr = [open_call] + read + # Wrap the open call in a compile call. This is so the filename will be + # preserved in the execed code. + filename_arg = filename.clone() + filename_arg.prefix = u" " + exec_str = String(u"'exec'", u" ") + compile_args = open_expr + [Comma(), filename_arg, Comma(), exec_str] + compile_call = Call(Name(u"compile"), compile_args, u"") + # Finally, replace the execfile call with an exec call. + args = [compile_call] + if globals is not None: + args.extend([Comma(), globals.clone()]) + if locals is not None: + args.extend([Comma(), locals.clone()]) + return Call(Name(u"exec"), args, prefix=node.prefix) diff --git a/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_exitfunc.py b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_exitfunc.py new file mode 100644 index 00000000..89fb3db5 --- /dev/null +++ b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_exitfunc.py @@ -0,0 +1,72 @@ +""" +Convert use of sys.exitfunc to use the atexit module. +""" + +# Author: Benjamin Peterson + +from lib2to3 import pytree, fixer_base +from lib2to3.fixer_util import Name, Attr, Call, Comma, Newline, syms + + +class FixExitfunc(fixer_base.BaseFix): + keep_line_order = True + BM_compatible = True + + PATTERN = """ + ( + sys_import=import_name<'import' + ('sys' + | + dotted_as_names< (any ',')* 'sys' (',' any)* > + ) + > + | + expr_stmt< + power< 'sys' trailer< '.' 'exitfunc' > > + '=' func=any > + ) + """ + + def __init__(self, *args): + super(FixExitfunc, self).__init__(*args) + + def start_tree(self, tree, filename): + super(FixExitfunc, self).start_tree(tree, filename) + self.sys_import = None + + def transform(self, node, results): + # First, find a the sys import. We'll just hope it's global scope. + if "sys_import" in results: + if self.sys_import is None: + self.sys_import = results["sys_import"] + return + + func = results["func"].clone() + func.prefix = u"" + register = pytree.Node(syms.power, + Attr(Name(u"atexit"), Name(u"register")) + ) + call = Call(register, [func], node.prefix) + node.replace(call) + + if self.sys_import is None: + # That's interesting. + self.warning(node, "Can't find sys import; Please add an atexit " + "import at the top of your file.") + return + + # Now add an atexit import after the sys import. + names = self.sys_import.children[1] + if names.type == syms.dotted_as_names: + names.append_child(Comma()) + names.append_child(Name(u"atexit", u" ")) + else: + containing_stmt = self.sys_import.parent + position = containing_stmt.children.index(self.sys_import) + stmt_container = containing_stmt.parent + new_import = pytree.Node(syms.import_name, + [Name(u"import"), Name(u"atexit", u" ")] + ) + new = pytree.Node(syms.simple_stmt, [new_import]) + containing_stmt.insert_child(position + 1, Newline()) + containing_stmt.insert_child(position + 2, new) diff --git a/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_filter.py b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_filter.py new file mode 100644 index 00000000..18ee2ffc --- /dev/null +++ b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_filter.py @@ -0,0 +1,76 @@ +# Copyright 2007 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Fixer that changes filter(F, X) into list(filter(F, X)). + +We avoid the transformation if the filter() call is directly contained +in iter(<>), list(<>), tuple(<>), sorted(<>), ...join(<>), or +for V in <>:. + +NOTE: This is still not correct if the original code was depending on +filter(F, X) to return a string if X is a string and a tuple if X is a +tuple. That would require type inference, which we don't do. Let +Python 2.6 figure it out. +""" + +# Local imports +from ..pgen2 import token +from .. import fixer_base +from ..fixer_util import Name, Call, ListComp, in_special_context + +class FixFilter(fixer_base.ConditionalFix): + BM_compatible = True + + PATTERN = """ + filter_lambda=power< + 'filter' + trailer< + '(' + arglist< + lambdef< 'lambda' + (fp=NAME | vfpdef< '(' fp=NAME ')'> ) ':' xp=any + > + ',' + it=any + > + ')' + > + > + | + power< + 'filter' + trailer< '(' arglist< none='None' ',' seq=any > ')' > + > + | + power< + 'filter' + args=trailer< '(' [any] ')' > + > + """ + + skip_on = "future_builtins.filter" + + def transform(self, node, results): + if self.should_skip(node): + return + + if "filter_lambda" in results: + new = ListComp(results.get("fp").clone(), + results.get("fp").clone(), + results.get("it").clone(), + results.get("xp").clone()) + + elif "none" in results: + new = ListComp(Name(u"_f"), + Name(u"_f"), + results["seq"].clone(), + Name(u"_f")) + + else: + if in_special_context(node): + return None + new = node.clone() + new.prefix = u"" + new = Call(Name(u"list"), [new]) + new.prefix = node.prefix + return new diff --git a/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_funcattrs.py b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_funcattrs.py new file mode 100644 index 00000000..9e45c028 --- /dev/null +++ b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_funcattrs.py @@ -0,0 +1,21 @@ +"""Fix function attribute names (f.func_x -> f.__x__).""" +# Author: Collin Winter + +# Local imports +from .. import fixer_base +from ..fixer_util import Name + + +class FixFuncattrs(fixer_base.BaseFix): + BM_compatible = True + + PATTERN = """ + power< any+ trailer< '.' attr=('func_closure' | 'func_doc' | 'func_globals' + | 'func_name' | 'func_defaults' | 'func_code' + | 'func_dict') > any* > + """ + + def transform(self, node, results): + attr = results["attr"][0] + attr.replace(Name((u"__%s__" % attr.value[5:]), + prefix=attr.prefix)) diff --git a/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_future.py b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_future.py new file mode 100644 index 00000000..fbcb86af --- /dev/null +++ b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_future.py @@ -0,0 +1,22 @@ +"""Remove __future__ imports + +from __future__ import foo is replaced with an empty line. +""" +# Author: Christian Heimes + +# Local imports +from .. import fixer_base +from ..fixer_util import BlankLine + +class FixFuture(fixer_base.BaseFix): + BM_compatible = True + + PATTERN = """import_from< 'from' module_name="__future__" 'import' any >""" + + # This should be run last -- some things check for the import + run_order = 10 + + def transform(self, node, results): + new = BlankLine() + new.prefix = node.prefix + return new diff --git a/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_getcwdu.py b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_getcwdu.py new file mode 100644 index 00000000..82233c89 --- /dev/null +++ b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_getcwdu.py @@ -0,0 +1,19 @@ +""" +Fixer that changes os.getcwdu() to os.getcwd(). +""" +# Author: Victor Stinner + +# Local imports +from .. import fixer_base +from ..fixer_util import Name + +class FixGetcwdu(fixer_base.BaseFix): + BM_compatible = True + + PATTERN = """ + power< 'os' trailer< dot='.' name='getcwdu' > any* > + """ + + def transform(self, node, results): + name = results["name"] + name.replace(Name(u"getcwd", prefix=name.prefix)) diff --git a/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_has_key.py b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_has_key.py new file mode 100644 index 00000000..bead4cb5 --- /dev/null +++ b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_has_key.py @@ -0,0 +1,110 @@ +# Copyright 2006 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Fixer for has_key(). + +Calls to .has_key() methods are expressed in terms of the 'in' +operator: + + d.has_key(k) -> k in d + +CAVEATS: +1) While the primary target of this fixer is dict.has_key(), the + fixer will change any has_key() method call, regardless of its + class. + +2) Cases like this will not be converted: + + m = d.has_key + if m(k): + ... + + Only *calls* to has_key() are converted. While it is possible to + convert the above to something like + + m = d.__contains__ + if m(k): + ... + + this is currently not done. +""" + +# Local imports +from .. import pytree +from ..pgen2 import token +from .. import fixer_base +from ..fixer_util import Name, parenthesize + + +class FixHasKey(fixer_base.BaseFix): + BM_compatible = True + + PATTERN = """ + anchor=power< + before=any+ + trailer< '.' 'has_key' > + trailer< + '(' + ( not(arglist | argument) arg=any ','> + ) + ')' + > + after=any* + > + | + negation=not_test< + 'not' + anchor=power< + before=any+ + trailer< '.' 'has_key' > + trailer< + '(' + ( not(arglist | argument) arg=any ','> + ) + ')' + > + > + > + """ + + def transform(self, node, results): + assert results + syms = self.syms + if (node.parent.type == syms.not_test and + self.pattern.match(node.parent)): + # Don't transform a node matching the first alternative of the + # pattern when its parent matches the second alternative + return None + negation = results.get("negation") + anchor = results["anchor"] + prefix = node.prefix + before = [n.clone() for n in results["before"]] + arg = results["arg"].clone() + after = results.get("after") + if after: + after = [n.clone() for n in after] + if arg.type in (syms.comparison, syms.not_test, syms.and_test, + syms.or_test, syms.test, syms.lambdef, syms.argument): + arg = parenthesize(arg) + if len(before) == 1: + before = before[0] + else: + before = pytree.Node(syms.power, before) + before.prefix = u" " + n_op = Name(u"in", prefix=u" ") + if negation: + n_not = Name(u"not", prefix=u" ") + n_op = pytree.Node(syms.comp_op, (n_not, n_op)) + new = pytree.Node(syms.comparison, (arg, n_op, before)) + if after: + new = parenthesize(new) + new = pytree.Node(syms.power, (new,) + tuple(after)) + if node.parent.type in (syms.comparison, syms.expr, syms.xor_expr, + syms.and_expr, syms.shift_expr, + syms.arith_expr, syms.term, + syms.factor, syms.power): + new = parenthesize(new) + new.prefix = prefix + return new diff --git a/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_idioms.py b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_idioms.py new file mode 100644 index 00000000..37b6eefa --- /dev/null +++ b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_idioms.py @@ -0,0 +1,152 @@ +"""Adjust some old Python 2 idioms to their modern counterparts. + +* Change some type comparisons to isinstance() calls: + type(x) == T -> isinstance(x, T) + type(x) is T -> isinstance(x, T) + type(x) != T -> not isinstance(x, T) + type(x) is not T -> not isinstance(x, T) + +* Change "while 1:" into "while True:". + +* Change both + + v = list(EXPR) + v.sort() + foo(v) + +and the more general + + v = EXPR + v.sort() + foo(v) + +into + + v = sorted(EXPR) + foo(v) +""" +# Author: Jacques Frechet, Collin Winter + +# Local imports +from .. import fixer_base +from ..fixer_util import Call, Comma, Name, Node, BlankLine, syms + +CMP = "(n='!=' | '==' | 'is' | n=comp_op< 'is' 'not' >)" +TYPE = "power< 'type' trailer< '(' x=any ')' > >" + +class FixIdioms(fixer_base.BaseFix): + explicit = True # The user must ask for this fixer + + PATTERN = r""" + isinstance=comparison< %s %s T=any > + | + isinstance=comparison< T=any %s %s > + | + while_stmt< 'while' while='1' ':' any+ > + | + sorted=any< + any* + simple_stmt< + expr_stmt< id1=any '=' + power< list='list' trailer< '(' (not arglist) any ')' > > + > + '\n' + > + sort= + simple_stmt< + power< id2=any + trailer< '.' 'sort' > trailer< '(' ')' > + > + '\n' + > + next=any* + > + | + sorted=any< + any* + simple_stmt< expr_stmt< id1=any '=' expr=any > '\n' > + sort= + simple_stmt< + power< id2=any + trailer< '.' 'sort' > trailer< '(' ')' > + > + '\n' + > + next=any* + > + """ % (TYPE, CMP, CMP, TYPE) + + def match(self, node): + r = super(FixIdioms, self).match(node) + # If we've matched one of the sort/sorted subpatterns above, we + # want to reject matches where the initial assignment and the + # subsequent .sort() call involve different identifiers. + if r and "sorted" in r: + if r["id1"] == r["id2"]: + return r + return None + return r + + def transform(self, node, results): + if "isinstance" in results: + return self.transform_isinstance(node, results) + elif "while" in results: + return self.transform_while(node, results) + elif "sorted" in results: + return self.transform_sort(node, results) + else: + raise RuntimeError("Invalid match") + + def transform_isinstance(self, node, results): + x = results["x"].clone() # The thing inside of type() + T = results["T"].clone() # The type being compared against + x.prefix = u"" + T.prefix = u" " + test = Call(Name(u"isinstance"), [x, Comma(), T]) + if "n" in results: + test.prefix = u" " + test = Node(syms.not_test, [Name(u"not"), test]) + test.prefix = node.prefix + return test + + def transform_while(self, node, results): + one = results["while"] + one.replace(Name(u"True", prefix=one.prefix)) + + def transform_sort(self, node, results): + sort_stmt = results["sort"] + next_stmt = results["next"] + list_call = results.get("list") + simple_expr = results.get("expr") + + if list_call: + list_call.replace(Name(u"sorted", prefix=list_call.prefix)) + elif simple_expr: + new = simple_expr.clone() + new.prefix = u"" + simple_expr.replace(Call(Name(u"sorted"), [new], + prefix=simple_expr.prefix)) + else: + raise RuntimeError("should not have reached here") + sort_stmt.remove() + + btwn = sort_stmt.prefix + # Keep any prefix lines between the sort_stmt and the list_call and + # shove them right after the sorted() call. + if u"\n" in btwn: + if next_stmt: + # The new prefix should be everything from the sort_stmt's + # prefix up to the last newline, then the old prefix after a new + # line. + prefix_lines = (btwn.rpartition(u"\n")[0], next_stmt[0].prefix) + next_stmt[0].prefix = u"\n".join(prefix_lines) + else: + assert list_call.parent + assert list_call.next_sibling is None + # Put a blank line after list_call and set its prefix. + end_line = BlankLine() + list_call.parent.append_child(end_line) + assert list_call.next_sibling is end_line + # The new prefix should be everything up to the first new line + # of sort_stmt's prefix. + end_line.prefix = btwn.rpartition(u"\n")[0] diff --git a/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_import.py b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_import.py new file mode 100644 index 00000000..201e811e --- /dev/null +++ b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_import.py @@ -0,0 +1,99 @@ +"""Fixer for import statements. +If spam is being imported from the local directory, this import: + from spam import eggs +Becomes: + from .spam import eggs + +And this import: + import spam +Becomes: + from . import spam +""" + +# Local imports +from .. import fixer_base +from os.path import dirname, join, exists, sep +from ..fixer_util import FromImport, syms, token + + +def traverse_imports(names): + """ + Walks over all the names imported in a dotted_as_names node. + """ + pending = [names] + while pending: + node = pending.pop() + if node.type == token.NAME: + yield node.value + elif node.type == syms.dotted_name: + yield "".join([ch.value for ch in node.children]) + elif node.type == syms.dotted_as_name: + pending.append(node.children[0]) + elif node.type == syms.dotted_as_names: + pending.extend(node.children[::-2]) + else: + raise AssertionError("unkown node type") + + +class FixImport(fixer_base.BaseFix): + BM_compatible = True + + PATTERN = """ + import_from< 'from' imp=any 'import' ['('] any [')'] > + | + import_name< 'import' imp=any > + """ + + def start_tree(self, tree, name): + super(FixImport, self).start_tree(tree, name) + self.skip = "absolute_import" in tree.future_features + + def transform(self, node, results): + if self.skip: + return + imp = results['imp'] + + if node.type == syms.import_from: + # Some imps are top-level (eg: 'import ham') + # some are first level (eg: 'import ham.eggs') + # some are third level (eg: 'import ham.eggs as spam') + # Hence, the loop + while not hasattr(imp, 'value'): + imp = imp.children[0] + if self.probably_a_local_import(imp.value): + imp.value = u"." + imp.value + imp.changed() + else: + have_local = False + have_absolute = False + for mod_name in traverse_imports(imp): + if self.probably_a_local_import(mod_name): + have_local = True + else: + have_absolute = True + if have_absolute: + if have_local: + # We won't handle both sibling and absolute imports in the + # same statement at the moment. + self.warning(node, "absolute and local imports together") + return + + new = FromImport(u".", [imp]) + new.prefix = node.prefix + return new + + def probably_a_local_import(self, imp_name): + if imp_name.startswith(u"."): + # Relative imports are certainly not local imports. + return False + imp_name = imp_name.split(u".", 1)[0] + base_path = dirname(self.filename) + base_path = join(base_path, imp_name) + # If there is no __init__.py next to the file its not in a package + # so can't be a relative import. + if not exists(join(dirname(base_path), "__init__.py")): + return False + for ext in [".py", sep, ".pyc", ".so", ".sl", ".pyd"]: + if exists(base_path + ext): + return True + return False diff --git a/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_imports.py b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_imports.py new file mode 100644 index 00000000..93c9e678 --- /dev/null +++ b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_imports.py @@ -0,0 +1,145 @@ +"""Fix incompatible imports and module references.""" +# Authors: Collin Winter, Nick Edds + +# Local imports +from .. import fixer_base +from ..fixer_util import Name, attr_chain + +MAPPING = {'StringIO': 'io', + 'cStringIO': 'io', + 'cPickle': 'pickle', + '__builtin__' : 'builtins', + 'copy_reg': 'copyreg', + 'Queue': 'queue', + 'SocketServer': 'socketserver', + 'ConfigParser': 'configparser', + 'repr': 'reprlib', + 'FileDialog': 'tkinter.filedialog', + 'tkFileDialog': 'tkinter.filedialog', + 'SimpleDialog': 'tkinter.simpledialog', + 'tkSimpleDialog': 'tkinter.simpledialog', + 'tkColorChooser': 'tkinter.colorchooser', + 'tkCommonDialog': 'tkinter.commondialog', + 'Dialog': 'tkinter.dialog', + 'Tkdnd': 'tkinter.dnd', + 'tkFont': 'tkinter.font', + 'tkMessageBox': 'tkinter.messagebox', + 'ScrolledText': 'tkinter.scrolledtext', + 'Tkconstants': 'tkinter.constants', + 'Tix': 'tkinter.tix', + 'ttk': 'tkinter.ttk', + 'Tkinter': 'tkinter', + 'markupbase': '_markupbase', + '_winreg': 'winreg', + 'thread': '_thread', + 'dummy_thread': '_dummy_thread', + # anydbm and whichdb are handled by fix_imports2 + 'dbhash': 'dbm.bsd', + 'dumbdbm': 'dbm.dumb', + 'dbm': 'dbm.ndbm', + 'gdbm': 'dbm.gnu', + 'xmlrpclib': 'xmlrpc.client', + 'DocXMLRPCServer': 'xmlrpc.server', + 'SimpleXMLRPCServer': 'xmlrpc.server', + 'httplib': 'http.client', + 'htmlentitydefs' : 'html.entities', + 'HTMLParser' : 'html.parser', + 'Cookie': 'http.cookies', + 'cookielib': 'http.cookiejar', + 'BaseHTTPServer': 'http.server', + 'SimpleHTTPServer': 'http.server', + 'CGIHTTPServer': 'http.server', + #'test.test_support': 'test.support', + 'commands': 'subprocess', + 'UserString' : 'collections', + 'UserList' : 'collections', + 'urlparse' : 'urllib.parse', + 'robotparser' : 'urllib.robotparser', +} + + +def alternates(members): + return "(" + "|".join(map(repr, members)) + ")" + + +def build_pattern(mapping=MAPPING): + mod_list = ' | '.join(["module_name='%s'" % key for key in mapping]) + bare_names = alternates(mapping.keys()) + + yield """name_import=import_name< 'import' ((%s) | + multiple_imports=dotted_as_names< any* (%s) any* >) > + """ % (mod_list, mod_list) + yield """import_from< 'from' (%s) 'import' ['('] + ( any | import_as_name< any 'as' any > | + import_as_names< any* >) [')'] > + """ % mod_list + yield """import_name< 'import' (dotted_as_name< (%s) 'as' any > | + multiple_imports=dotted_as_names< + any* dotted_as_name< (%s) 'as' any > any* >) > + """ % (mod_list, mod_list) + + # Find usages of module members in code e.g. thread.foo(bar) + yield "power< bare_with_attr=(%s) trailer<'.' any > any* >" % bare_names + + +class FixImports(fixer_base.BaseFix): + + BM_compatible = True + keep_line_order = True + # This is overridden in fix_imports2. + mapping = MAPPING + + # We want to run this fixer late, so fix_import doesn't try to make stdlib + # renames into relative imports. + run_order = 6 + + def build_pattern(self): + return "|".join(build_pattern(self.mapping)) + + def compile_pattern(self): + # We override this, so MAPPING can be pragmatically altered and the + # changes will be reflected in PATTERN. + self.PATTERN = self.build_pattern() + super(FixImports, self).compile_pattern() + + # Don't match the node if it's within another match. + def match(self, node): + match = super(FixImports, self).match + results = match(node) + if results: + # Module usage could be in the trailer of an attribute lookup, so we + # might have nested matches when "bare_with_attr" is present. + if "bare_with_attr" not in results and \ + any(match(obj) for obj in attr_chain(node, "parent")): + return False + return results + return False + + def start_tree(self, tree, filename): + super(FixImports, self).start_tree(tree, filename) + self.replace = {} + + def transform(self, node, results): + import_mod = results.get("module_name") + if import_mod: + mod_name = import_mod.value + new_name = unicode(self.mapping[mod_name]) + import_mod.replace(Name(new_name, prefix=import_mod.prefix)) + if "name_import" in results: + # If it's not a "from x import x, y" or "import x as y" import, + # marked its usage to be replaced. + self.replace[mod_name] = new_name + if "multiple_imports" in results: + # This is a nasty hack to fix multiple imports on a line (e.g., + # "import StringIO, urlparse"). The problem is that I can't + # figure out an easy way to make a pattern recognize the keys of + # MAPPING randomly sprinkled in an import statement. + results = self.match(node) + if results: + self.transform(node, results) + else: + # Replace usage of the module. + bare_name = results["bare_with_attr"][0] + new_name = self.replace.get(bare_name.value) + if new_name: + bare_name.replace(Name(new_name, prefix=bare_name.prefix)) diff --git a/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_imports2.py b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_imports2.py new file mode 100644 index 00000000..9a33c67b --- /dev/null +++ b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_imports2.py @@ -0,0 +1,16 @@ +"""Fix incompatible imports and module references that must be fixed after +fix_imports.""" +from . import fix_imports + + +MAPPING = { + 'whichdb': 'dbm', + 'anydbm': 'dbm', + } + + +class FixImports2(fix_imports.FixImports): + + run_order = 7 + + mapping = MAPPING diff --git a/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_input.py b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_input.py new file mode 100644 index 00000000..fbf4c72f --- /dev/null +++ b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_input.py @@ -0,0 +1,26 @@ +"""Fixer that changes input(...) into eval(input(...)).""" +# Author: Andre Roberge + +# Local imports +from .. import fixer_base +from ..fixer_util import Call, Name +from .. import patcomp + + +context = patcomp.compile_pattern("power< 'eval' trailer< '(' any ')' > >") + + +class FixInput(fixer_base.BaseFix): + BM_compatible = True + PATTERN = """ + power< 'input' args=trailer< '(' [any] ')' > > + """ + + def transform(self, node, results): + # If we're already wrapped in a eval() call, we're done. + if context.match(node.parent.parent): + return + + new = node.clone() + new.prefix = u"" + return Call(Name(u"eval"), [new], prefix=node.prefix) diff --git a/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_intern.py b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_intern.py new file mode 100644 index 00000000..e7bb5052 --- /dev/null +++ b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_intern.py @@ -0,0 +1,46 @@ +# Copyright 2006 Georg Brandl. +# Licensed to PSF under a Contributor Agreement. + +"""Fixer for intern(). + +intern(s) -> sys.intern(s)""" + +# Local imports +from .. import pytree +from .. import fixer_base +from ..fixer_util import Name, Attr, touch_import + + +class FixIntern(fixer_base.BaseFix): + BM_compatible = True + order = "pre" + + PATTERN = """ + power< 'intern' + trailer< lpar='(' + ( not(arglist | argument) any ','> ) + rpar=')' > + after=any* + > + """ + + def transform(self, node, results): + syms = self.syms + obj = results["obj"].clone() + if obj.type == syms.arglist: + newarglist = obj.clone() + else: + newarglist = pytree.Node(syms.arglist, [obj.clone()]) + after = results["after"] + if after: + after = [n.clone() for n in after] + new = pytree.Node(syms.power, + Attr(Name(u"sys"), Name(u"intern")) + + [pytree.Node(syms.trailer, + [results["lpar"].clone(), + newarglist, + results["rpar"].clone()])] + after) + new.prefix = node.prefix + touch_import(None, u'sys', node) + return new diff --git a/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_isinstance.py b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_isinstance.py new file mode 100644 index 00000000..4b04c8fd --- /dev/null +++ b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_isinstance.py @@ -0,0 +1,52 @@ +# Copyright 2008 Armin Ronacher. +# Licensed to PSF under a Contributor Agreement. + +"""Fixer that cleans up a tuple argument to isinstance after the tokens +in it were fixed. This is mainly used to remove double occurrences of +tokens as a leftover of the long -> int / unicode -> str conversion. + +eg. isinstance(x, (int, long)) -> isinstance(x, (int, int)) + -> isinstance(x, int) +""" + +from .. import fixer_base +from ..fixer_util import token + + +class FixIsinstance(fixer_base.BaseFix): + BM_compatible = True + PATTERN = """ + power< + 'isinstance' + trailer< '(' arglist< any ',' atom< '(' + args=testlist_gexp< any+ > + ')' > > ')' > + > + """ + + run_order = 6 + + def transform(self, node, results): + names_inserted = set() + testlist = results["args"] + args = testlist.children + new_args = [] + iterator = enumerate(args) + for idx, arg in iterator: + if arg.type == token.NAME and arg.value in names_inserted: + if idx < len(args) - 1 and args[idx + 1].type == token.COMMA: + iterator.next() + continue + else: + new_args.append(arg) + if arg.type == token.NAME: + names_inserted.add(arg.value) + if new_args and new_args[-1].type == token.COMMA: + del new_args[-1] + if len(new_args) == 1: + atom = testlist.parent + new_args[0].prefix = atom.prefix + atom.replace(new_args[0]) + else: + args[:] = new_args + node.changed() diff --git a/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_itertools.py b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_itertools.py new file mode 100644 index 00000000..067641b8 --- /dev/null +++ b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_itertools.py @@ -0,0 +1,43 @@ +""" Fixer for itertools.(imap|ifilter|izip) --> (map|filter|zip) and + itertools.ifilterfalse --> itertools.filterfalse (bugs 2360-2363) + + imports from itertools are fixed in fix_itertools_import.py + + If itertools is imported as something else (ie: import itertools as it; + it.izip(spam, eggs)) method calls will not get fixed. + """ + +# Local imports +from .. import fixer_base +from ..fixer_util import Name + +class FixItertools(fixer_base.BaseFix): + BM_compatible = True + it_funcs = "('imap'|'ifilter'|'izip'|'izip_longest'|'ifilterfalse')" + PATTERN = """ + power< it='itertools' + trailer< + dot='.' func=%(it_funcs)s > trailer< '(' [any] ')' > > + | + power< func=%(it_funcs)s trailer< '(' [any] ')' > > + """ %(locals()) + + # Needs to be run after fix_(map|zip|filter) + run_order = 6 + + def transform(self, node, results): + prefix = None + func = results['func'][0] + if ('it' in results and + func.value not in (u'ifilterfalse', u'izip_longest')): + dot, it = (results['dot'], results['it']) + # Remove the 'itertools' + prefix = it.prefix + it.remove() + # Replace the node which contains ('.', 'function') with the + # function (to be consistent with the second part of the pattern) + dot.remove() + func.parent.replace(func) + + prefix = prefix or func.prefix + func.replace(Name(func.value[1:], prefix=prefix)) diff --git a/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_itertools_imports.py b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_itertools_imports.py new file mode 100644 index 00000000..28610cfc --- /dev/null +++ b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_itertools_imports.py @@ -0,0 +1,57 @@ +""" Fixer for imports of itertools.(imap|ifilter|izip|ifilterfalse) """ + +# Local imports +from lib2to3 import fixer_base +from lib2to3.fixer_util import BlankLine, syms, token + + +class FixItertoolsImports(fixer_base.BaseFix): + BM_compatible = True + PATTERN = """ + import_from< 'from' 'itertools' 'import' imports=any > + """ %(locals()) + + def transform(self, node, results): + imports = results['imports'] + if imports.type == syms.import_as_name or not imports.children: + children = [imports] + else: + children = imports.children + for child in children[::2]: + if child.type == token.NAME: + member = child.value + name_node = child + elif child.type == token.STAR: + # Just leave the import as is. + return + else: + assert child.type == syms.import_as_name + name_node = child.children[0] + member_name = name_node.value + if member_name in (u'imap', u'izip', u'ifilter'): + child.value = None + child.remove() + elif member_name in (u'ifilterfalse', u'izip_longest'): + node.changed() + name_node.value = (u'filterfalse' if member_name[1] == u'f' + else u'zip_longest') + + # Make sure the import statement is still sane + children = imports.children[:] or [imports] + remove_comma = True + for child in children: + if remove_comma and child.type == token.COMMA: + child.remove() + else: + remove_comma ^= True + + while children and children[-1].type == token.COMMA: + children.pop().remove() + + # If there are no imports left, just get rid of the entire statement + if (not (imports.children or getattr(imports, 'value', None)) or + imports.parent is None): + p = node.prefix + node = BlankLine() + node.prefix = p + return node diff --git a/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_long.py b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_long.py new file mode 100644 index 00000000..5dddde0d --- /dev/null +++ b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_long.py @@ -0,0 +1,19 @@ +# Copyright 2006 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Fixer that turns 'long' into 'int' everywhere. +""" + +# Local imports +from lib2to3 import fixer_base +from lib2to3.fixer_util import is_probably_builtin + + +class FixLong(fixer_base.BaseFix): + BM_compatible = True + PATTERN = "'long'" + + def transform(self, node, results): + if is_probably_builtin(node): + node.value = u"int" + node.changed() diff --git a/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_map.py b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_map.py new file mode 100644 index 00000000..7a7d0dbc --- /dev/null +++ b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_map.py @@ -0,0 +1,91 @@ +# Copyright 2007 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Fixer that changes map(F, ...) into list(map(F, ...)) unless there +exists a 'from future_builtins import map' statement in the top-level +namespace. + +As a special case, map(None, X) is changed into list(X). (This is +necessary because the semantics are changed in this case -- the new +map(None, X) is equivalent to [(x,) for x in X].) + +We avoid the transformation (except for the special case mentioned +above) if the map() call is directly contained in iter(<>), list(<>), +tuple(<>), sorted(<>), ...join(<>), or for V in <>:. + +NOTE: This is still not correct if the original code was depending on +map(F, X, Y, ...) to go on until the longest argument is exhausted, +substituting None for missing values -- like zip(), it now stops as +soon as the shortest argument is exhausted. +""" + +# Local imports +from ..pgen2 import token +from .. import fixer_base +from ..fixer_util import Name, Call, ListComp, in_special_context +from ..pygram import python_symbols as syms + +class FixMap(fixer_base.ConditionalFix): + BM_compatible = True + + PATTERN = """ + map_none=power< + 'map' + trailer< '(' arglist< 'None' ',' arg=any [','] > ')' > + > + | + map_lambda=power< + 'map' + trailer< + '(' + arglist< + lambdef< 'lambda' + (fp=NAME | vfpdef< '(' fp=NAME ')'> ) ':' xp=any + > + ',' + it=any + > + ')' + > + > + | + power< + 'map' trailer< '(' [arglist=any] ')' > + > + """ + + skip_on = 'future_builtins.map' + + def transform(self, node, results): + if self.should_skip(node): + return + + if node.parent.type == syms.simple_stmt: + self.warning(node, "You should use a for loop here") + new = node.clone() + new.prefix = u"" + new = Call(Name(u"list"), [new]) + elif "map_lambda" in results: + new = ListComp(results["xp"].clone(), + results["fp"].clone(), + results["it"].clone()) + else: + if "map_none" in results: + new = results["arg"].clone() + else: + if "arglist" in results: + args = results["arglist"] + if args.type == syms.arglist and \ + args.children[0].type == token.NAME and \ + args.children[0].value == "None": + self.warning(node, "cannot convert map(None, ...) " + "with multiple arguments because map() " + "now truncates to the shortest sequence") + return + if in_special_context(node): + return None + new = node.clone() + new.prefix = u"" + new = Call(Name(u"list"), [new]) + new.prefix = node.prefix + return new diff --git a/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_metaclass.py b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_metaclass.py new file mode 100644 index 00000000..4f5593c5 --- /dev/null +++ b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_metaclass.py @@ -0,0 +1,228 @@ +"""Fixer for __metaclass__ = X -> (metaclass=X) methods. + + The various forms of classef (inherits nothing, inherits once, inherints + many) don't parse the same in the CST so we look at ALL classes for + a __metaclass__ and if we find one normalize the inherits to all be + an arglist. + + For one-liner classes ('class X: pass') there is no indent/dedent so + we normalize those into having a suite. + + Moving the __metaclass__ into the classdef can also cause the class + body to be empty so there is some special casing for that as well. + + This fixer also tries very hard to keep original indenting and spacing + in all those corner cases. + +""" +# Author: Jack Diederich + +# Local imports +from .. import fixer_base +from ..pygram import token +from ..fixer_util import Name, syms, Node, Leaf + + +def has_metaclass(parent): + """ we have to check the cls_node without changing it. + There are two possiblities: + 1) clsdef => suite => simple_stmt => expr_stmt => Leaf('__meta') + 2) clsdef => simple_stmt => expr_stmt => Leaf('__meta') + """ + for node in parent.children: + if node.type == syms.suite: + return has_metaclass(node) + elif node.type == syms.simple_stmt and node.children: + expr_node = node.children[0] + if expr_node.type == syms.expr_stmt and expr_node.children: + left_side = expr_node.children[0] + if isinstance(left_side, Leaf) and \ + left_side.value == '__metaclass__': + return True + return False + + +def fixup_parse_tree(cls_node): + """ one-line classes don't get a suite in the parse tree so we add + one to normalize the tree + """ + for node in cls_node.children: + if node.type == syms.suite: + # already in the preferred format, do nothing + return + + # !%@#! oneliners have no suite node, we have to fake one up + for i, node in enumerate(cls_node.children): + if node.type == token.COLON: + break + else: + raise ValueError("No class suite and no ':'!") + + # move everything into a suite node + suite = Node(syms.suite, []) + while cls_node.children[i+1:]: + move_node = cls_node.children[i+1] + suite.append_child(move_node.clone()) + move_node.remove() + cls_node.append_child(suite) + node = suite + + +def fixup_simple_stmt(parent, i, stmt_node): + """ if there is a semi-colon all the parts count as part of the same + simple_stmt. We just want the __metaclass__ part so we move + everything after the semi-colon into its own simple_stmt node + """ + for semi_ind, node in enumerate(stmt_node.children): + if node.type == token.SEMI: # *sigh* + break + else: + return + + node.remove() # kill the semicolon + new_expr = Node(syms.expr_stmt, []) + new_stmt = Node(syms.simple_stmt, [new_expr]) + while stmt_node.children[semi_ind:]: + move_node = stmt_node.children[semi_ind] + new_expr.append_child(move_node.clone()) + move_node.remove() + parent.insert_child(i, new_stmt) + new_leaf1 = new_stmt.children[0].children[0] + old_leaf1 = stmt_node.children[0].children[0] + new_leaf1.prefix = old_leaf1.prefix + + +def remove_trailing_newline(node): + if node.children and node.children[-1].type == token.NEWLINE: + node.children[-1].remove() + + +def find_metas(cls_node): + # find the suite node (Mmm, sweet nodes) + for node in cls_node.children: + if node.type == syms.suite: + break + else: + raise ValueError("No class suite!") + + # look for simple_stmt[ expr_stmt[ Leaf('__metaclass__') ] ] + for i, simple_node in list(enumerate(node.children)): + if simple_node.type == syms.simple_stmt and simple_node.children: + expr_node = simple_node.children[0] + if expr_node.type == syms.expr_stmt and expr_node.children: + # Check if the expr_node is a simple assignment. + left_node = expr_node.children[0] + if isinstance(left_node, Leaf) and \ + left_node.value == u'__metaclass__': + # We found a assignment to __metaclass__. + fixup_simple_stmt(node, i, simple_node) + remove_trailing_newline(simple_node) + yield (node, i, simple_node) + + +def fixup_indent(suite): + """ If an INDENT is followed by a thing with a prefix then nuke the prefix + Otherwise we get in trouble when removing __metaclass__ at suite start + """ + kids = suite.children[::-1] + # find the first indent + while kids: + node = kids.pop() + if node.type == token.INDENT: + break + + # find the first Leaf + while kids: + node = kids.pop() + if isinstance(node, Leaf) and node.type != token.DEDENT: + if node.prefix: + node.prefix = u'' + return + else: + kids.extend(node.children[::-1]) + + +class FixMetaclass(fixer_base.BaseFix): + BM_compatible = True + + PATTERN = """ + classdef + """ + + def transform(self, node, results): + if not has_metaclass(node): + return + + fixup_parse_tree(node) + + # find metaclasses, keep the last one + last_metaclass = None + for suite, i, stmt in find_metas(node): + last_metaclass = stmt + stmt.remove() + + text_type = node.children[0].type # always Leaf(nnn, 'class') + + # figure out what kind of classdef we have + if len(node.children) == 7: + # Node(classdef, ['class', 'name', '(', arglist, ')', ':', suite]) + # 0 1 2 3 4 5 6 + if node.children[3].type == syms.arglist: + arglist = node.children[3] + # Node(classdef, ['class', 'name', '(', 'Parent', ')', ':', suite]) + else: + parent = node.children[3].clone() + arglist = Node(syms.arglist, [parent]) + node.set_child(3, arglist) + elif len(node.children) == 6: + # Node(classdef, ['class', 'name', '(', ')', ':', suite]) + # 0 1 2 3 4 5 + arglist = Node(syms.arglist, []) + node.insert_child(3, arglist) + elif len(node.children) == 4: + # Node(classdef, ['class', 'name', ':', suite]) + # 0 1 2 3 + arglist = Node(syms.arglist, []) + node.insert_child(2, Leaf(token.RPAR, u')')) + node.insert_child(2, arglist) + node.insert_child(2, Leaf(token.LPAR, u'(')) + else: + raise ValueError("Unexpected class definition") + + # now stick the metaclass in the arglist + meta_txt = last_metaclass.children[0].children[0] + meta_txt.value = 'metaclass' + orig_meta_prefix = meta_txt.prefix + + if arglist.children: + arglist.append_child(Leaf(token.COMMA, u',')) + meta_txt.prefix = u' ' + else: + meta_txt.prefix = u'' + + # compact the expression "metaclass = Meta" -> "metaclass=Meta" + expr_stmt = last_metaclass.children[0] + assert expr_stmt.type == syms.expr_stmt + expr_stmt.children[1].prefix = u'' + expr_stmt.children[2].prefix = u'' + + arglist.append_child(last_metaclass) + + fixup_indent(suite) + + # check for empty suite + if not suite.children: + # one-liner that was just __metaclass_ + suite.remove() + pass_leaf = Leaf(text_type, u'pass') + pass_leaf.prefix = orig_meta_prefix + node.append_child(pass_leaf) + node.append_child(Leaf(token.NEWLINE, u'\n')) + + elif len(suite.children) > 1 and \ + (suite.children[-2].type == token.INDENT and + suite.children[-1].type == token.DEDENT): + # there was only one line in the class body and it was __metaclass__ + pass_leaf = Leaf(text_type, u'pass') + suite.insert_child(-1, pass_leaf) + suite.insert_child(-1, Leaf(token.NEWLINE, u'\n')) diff --git a/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_methodattrs.py b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_methodattrs.py new file mode 100644 index 00000000..f3c1ecfe --- /dev/null +++ b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_methodattrs.py @@ -0,0 +1,24 @@ +"""Fix bound method attributes (method.im_? -> method.__?__). +""" +# Author: Christian Heimes + +# Local imports +from .. import fixer_base +from ..fixer_util import Name + +MAP = { + "im_func" : "__func__", + "im_self" : "__self__", + "im_class" : "__self__.__class__" + } + +class FixMethodattrs(fixer_base.BaseFix): + BM_compatible = True + PATTERN = """ + power< any+ trailer< '.' attr=('im_func' | 'im_self' | 'im_class') > any* > + """ + + def transform(self, node, results): + attr = results["attr"][0] + new = unicode(MAP[attr.value]) + attr.replace(Name(new, prefix=attr.prefix)) diff --git a/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_ne.py b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_ne.py new file mode 100644 index 00000000..7025980b --- /dev/null +++ b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_ne.py @@ -0,0 +1,23 @@ +# Copyright 2006 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Fixer that turns <> into !=.""" + +# Local imports +from .. import pytree +from ..pgen2 import token +from .. import fixer_base + + +class FixNe(fixer_base.BaseFix): + # This is so simple that we don't need the pattern compiler. + + _accept_type = token.NOTEQUAL + + def match(self, node): + # Override + return node.value == u"<>" + + def transform(self, node, results): + new = pytree.Leaf(token.NOTEQUAL, u"!=", prefix=node.prefix) + return new diff --git a/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_next.py b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_next.py new file mode 100644 index 00000000..f021a9bd --- /dev/null +++ b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_next.py @@ -0,0 +1,103 @@ +"""Fixer for it.next() -> next(it), per PEP 3114.""" +# Author: Collin Winter + +# Things that currently aren't covered: +# - listcomp "next" names aren't warned +# - "with" statement targets aren't checked + +# Local imports +from ..pgen2 import token +from ..pygram import python_symbols as syms +from .. import fixer_base +from ..fixer_util import Name, Call, find_binding + +bind_warning = "Calls to builtin next() possibly shadowed by global binding" + + +class FixNext(fixer_base.BaseFix): + BM_compatible = True + PATTERN = """ + power< base=any+ trailer< '.' attr='next' > trailer< '(' ')' > > + | + power< head=any+ trailer< '.' attr='next' > not trailer< '(' ')' > > + | + classdef< 'class' any+ ':' + suite< any* + funcdef< 'def' + name='next' + parameters< '(' NAME ')' > any+ > + any* > > + | + global=global_stmt< 'global' any* 'next' any* > + """ + + order = "pre" # Pre-order tree traversal + + def start_tree(self, tree, filename): + super(FixNext, self).start_tree(tree, filename) + + n = find_binding(u'next', tree) + if n: + self.warning(n, bind_warning) + self.shadowed_next = True + else: + self.shadowed_next = False + + def transform(self, node, results): + assert results + + base = results.get("base") + attr = results.get("attr") + name = results.get("name") + + if base: + if self.shadowed_next: + attr.replace(Name(u"__next__", prefix=attr.prefix)) + else: + base = [n.clone() for n in base] + base[0].prefix = u"" + node.replace(Call(Name(u"next", prefix=node.prefix), base)) + elif name: + n = Name(u"__next__", prefix=name.prefix) + name.replace(n) + elif attr: + # We don't do this transformation if we're assigning to "x.next". + # Unfortunately, it doesn't seem possible to do this in PATTERN, + # so it's being done here. + if is_assign_target(node): + head = results["head"] + if "".join([str(n) for n in head]).strip() == u'__builtin__': + self.warning(node, bind_warning) + return + attr.replace(Name(u"__next__")) + elif "global" in results: + self.warning(node, bind_warning) + self.shadowed_next = True + + +### The following functions help test if node is part of an assignment +### target. + +def is_assign_target(node): + assign = find_assign(node) + if assign is None: + return False + + for child in assign.children: + if child.type == token.EQUAL: + return False + elif is_subtree(child, node): + return True + return False + +def find_assign(node): + if node.type == syms.expr_stmt: + return node + if node.type == syms.simple_stmt or node.parent is None: + return None + return find_assign(node.parent) + +def is_subtree(root, node): + if root == node: + return True + return any(is_subtree(c, node) for c in root.children) diff --git a/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_nonzero.py b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_nonzero.py new file mode 100644 index 00000000..ba83478f --- /dev/null +++ b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_nonzero.py @@ -0,0 +1,21 @@ +"""Fixer for __nonzero__ -> __bool__ methods.""" +# Author: Collin Winter + +# Local imports +from .. import fixer_base +from ..fixer_util import Name, syms + +class FixNonzero(fixer_base.BaseFix): + BM_compatible = True + PATTERN = """ + classdef< 'class' any+ ':' + suite< any* + funcdef< 'def' name='__nonzero__' + parameters< '(' NAME ')' > any+ > + any* > > + """ + + def transform(self, node, results): + name = results["name"] + new = Name(u"__bool__", prefix=name.prefix) + name.replace(new) diff --git a/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_numliterals.py b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_numliterals.py new file mode 100644 index 00000000..b0c23f80 --- /dev/null +++ b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_numliterals.py @@ -0,0 +1,28 @@ +"""Fixer that turns 1L into 1, 0755 into 0o755. +""" +# Copyright 2007 Georg Brandl. +# Licensed to PSF under a Contributor Agreement. + +# Local imports +from ..pgen2 import token +from .. import fixer_base +from ..fixer_util import Number + + +class FixNumliterals(fixer_base.BaseFix): + # This is so simple that we don't need the pattern compiler. + + _accept_type = token.NUMBER + + def match(self, node): + # Override + return (node.value.startswith(u"0") or node.value[-1] in u"Ll") + + def transform(self, node, results): + val = node.value + if val[-1] in u'Ll': + val = val[:-1] + elif val.startswith(u'0') and val.isdigit() and len(set(val)) > 1: + val = u"0o" + val[1:] + + return Number(val, prefix=node.prefix) diff --git a/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_operator.py b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_operator.py new file mode 100644 index 00000000..7bf2c0dd --- /dev/null +++ b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_operator.py @@ -0,0 +1,96 @@ +"""Fixer for operator functions. + +operator.isCallable(obj) -> hasattr(obj, '__call__') +operator.sequenceIncludes(obj) -> operator.contains(obj) +operator.isSequenceType(obj) -> isinstance(obj, collections.Sequence) +operator.isMappingType(obj) -> isinstance(obj, collections.Mapping) +operator.isNumberType(obj) -> isinstance(obj, numbers.Number) +operator.repeat(obj, n) -> operator.mul(obj, n) +operator.irepeat(obj, n) -> operator.imul(obj, n) +""" + +# Local imports +from lib2to3 import fixer_base +from lib2to3.fixer_util import Call, Name, String, touch_import + + +def invocation(s): + def dec(f): + f.invocation = s + return f + return dec + + +class FixOperator(fixer_base.BaseFix): + BM_compatible = True + order = "pre" + + methods = """ + method=('isCallable'|'sequenceIncludes' + |'isSequenceType'|'isMappingType'|'isNumberType' + |'repeat'|'irepeat') + """ + obj = "'(' obj=any ')'" + PATTERN = """ + power< module='operator' + trailer< '.' %(methods)s > trailer< %(obj)s > > + | + power< %(methods)s trailer< %(obj)s > > + """ % dict(methods=methods, obj=obj) + + def transform(self, node, results): + method = self._check_method(node, results) + if method is not None: + return method(node, results) + + @invocation("operator.contains(%s)") + def _sequenceIncludes(self, node, results): + return self._handle_rename(node, results, u"contains") + + @invocation("hasattr(%s, '__call__')") + def _isCallable(self, node, results): + obj = results["obj"] + args = [obj.clone(), String(u", "), String(u"'__call__'")] + return Call(Name(u"hasattr"), args, prefix=node.prefix) + + @invocation("operator.mul(%s)") + def _repeat(self, node, results): + return self._handle_rename(node, results, u"mul") + + @invocation("operator.imul(%s)") + def _irepeat(self, node, results): + return self._handle_rename(node, results, u"imul") + + @invocation("isinstance(%s, collections.Sequence)") + def _isSequenceType(self, node, results): + return self._handle_type2abc(node, results, u"collections", u"Sequence") + + @invocation("isinstance(%s, collections.Mapping)") + def _isMappingType(self, node, results): + return self._handle_type2abc(node, results, u"collections", u"Mapping") + + @invocation("isinstance(%s, numbers.Number)") + def _isNumberType(self, node, results): + return self._handle_type2abc(node, results, u"numbers", u"Number") + + def _handle_rename(self, node, results, name): + method = results["method"][0] + method.value = name + method.changed() + + def _handle_type2abc(self, node, results, module, abc): + touch_import(None, module, node) + obj = results["obj"] + args = [obj.clone(), String(u", " + u".".join([module, abc]))] + return Call(Name(u"isinstance"), args, prefix=node.prefix) + + def _check_method(self, node, results): + method = getattr(self, "_" + results["method"][0].value.encode("ascii")) + if callable(method): + if "module" in results: + return method + else: + sub = (unicode(results["obj"]),) + invocation_str = unicode(method.invocation) % sub + self.warning(node, u"You should use '%s' here." % invocation_str) + return None diff --git a/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_paren.py b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_paren.py new file mode 100644 index 00000000..8650cd90 --- /dev/null +++ b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_paren.py @@ -0,0 +1,44 @@ +"""Fixer that addes parentheses where they are required + +This converts ``[x for x in 1, 2]`` to ``[x for x in (1, 2)]``.""" + +# By Taek Joo Kim and Benjamin Peterson + +# Local imports +from .. import fixer_base +from ..fixer_util import LParen, RParen + +# XXX This doesn't support nested for loops like [x for x in 1, 2 for x in 1, 2] +class FixParen(fixer_base.BaseFix): + BM_compatible = True + + PATTERN = """ + atom< ('[' | '(') + (listmaker< any + comp_for< + 'for' NAME 'in' + target=testlist_safe< any (',' any)+ [','] + > + [any] + > + > + | + testlist_gexp< any + comp_for< + 'for' NAME 'in' + target=testlist_safe< any (',' any)+ [','] + > + [any] + > + >) + (']' | ')') > + """ + + def transform(self, node, results): + target = results["target"] + + lparen = LParen() + lparen.prefix = target.prefix + target.prefix = u"" # Make it hug the parentheses + target.insert_child(0, lparen) + target.append_child(RParen()) diff --git a/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_print.py b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_print.py new file mode 100644 index 00000000..98786b3e --- /dev/null +++ b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_print.py @@ -0,0 +1,87 @@ +# Copyright 2006 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Fixer for print. + +Change: + 'print' into 'print()' + 'print ...' into 'print(...)' + 'print ... ,' into 'print(..., end=" ")' + 'print >>x, ...' into 'print(..., file=x)' + +No changes are applied if print_function is imported from __future__ + +""" + +# Local imports +from .. import patcomp +from .. import pytree +from ..pgen2 import token +from .. import fixer_base +from ..fixer_util import Name, Call, Comma, String, is_tuple + + +parend_expr = patcomp.compile_pattern( + """atom< '(' [atom|STRING|NAME] ')' >""" + ) + + +class FixPrint(fixer_base.BaseFix): + + BM_compatible = True + + PATTERN = """ + simple_stmt< any* bare='print' any* > | print_stmt + """ + + def transform(self, node, results): + assert results + + bare_print = results.get("bare") + + if bare_print: + # Special-case print all by itself + bare_print.replace(Call(Name(u"print"), [], + prefix=bare_print.prefix)) + return + assert node.children[0] == Name(u"print") + args = node.children[1:] + if len(args) == 1 and parend_expr.match(args[0]): + # We don't want to keep sticking parens around an + # already-parenthesised expression. + return + + sep = end = file = None + if args and args[-1] == Comma(): + args = args[:-1] + end = " " + if args and args[0] == pytree.Leaf(token.RIGHTSHIFT, u">>"): + assert len(args) >= 2 + file = args[1].clone() + args = args[3:] # Strip a possible comma after the file expression + # Now synthesize a print(args, sep=..., end=..., file=...) node. + l_args = [arg.clone() for arg in args] + if l_args: + l_args[0].prefix = u"" + if sep is not None or end is not None or file is not None: + if sep is not None: + self.add_kwarg(l_args, u"sep", String(repr(sep))) + if end is not None: + self.add_kwarg(l_args, u"end", String(repr(end))) + if file is not None: + self.add_kwarg(l_args, u"file", file) + n_stmt = Call(Name(u"print"), l_args) + n_stmt.prefix = node.prefix + return n_stmt + + def add_kwarg(self, l_nodes, s_kwd, n_expr): + # XXX All this prefix-setting may lose comments (though rarely) + n_expr.prefix = u"" + n_argument = pytree.Node(self.syms.argument, + (Name(s_kwd), + pytree.Leaf(token.EQUAL, u"="), + n_expr)) + if l_nodes: + l_nodes.append(Comma()) + n_argument.prefix = u" " + l_nodes.append(n_argument) diff --git a/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_raise.py b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_raise.py new file mode 100644 index 00000000..b958ba01 --- /dev/null +++ b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_raise.py @@ -0,0 +1,90 @@ +"""Fixer for 'raise E, V, T' + +raise -> raise +raise E -> raise E +raise E, V -> raise E(V) +raise E, V, T -> raise E(V).with_traceback(T) +raise E, None, T -> raise E.with_traceback(T) + +raise (((E, E'), E''), E'''), V -> raise E(V) +raise "foo", V, T -> warns about string exceptions + + +CAVEATS: +1) "raise E, V" will be incorrectly translated if V is an exception + instance. The correct Python 3 idiom is + + raise E from V + + but since we can't detect instance-hood by syntax alone and since + any client code would have to be changed as well, we don't automate + this. +""" +# Author: Collin Winter + +# Local imports +from .. import pytree +from ..pgen2 import token +from .. import fixer_base +from ..fixer_util import Name, Call, Attr, ArgList, is_tuple + +class FixRaise(fixer_base.BaseFix): + + BM_compatible = True + PATTERN = """ + raise_stmt< 'raise' exc=any [',' val=any [',' tb=any]] > + """ + + def transform(self, node, results): + syms = self.syms + + exc = results["exc"].clone() + if exc.type == token.STRING: + msg = "Python 3 does not support string exceptions" + self.cannot_convert(node, msg) + return + + # Python 2 supports + # raise ((((E1, E2), E3), E4), E5), V + # as a synonym for + # raise E1, V + # Since Python 3 will not support this, we recurse down any tuple + # literals, always taking the first element. + if is_tuple(exc): + while is_tuple(exc): + # exc.children[1:-1] is the unparenthesized tuple + # exc.children[1].children[0] is the first element of the tuple + exc = exc.children[1].children[0].clone() + exc.prefix = u" " + + if "val" not in results: + # One-argument raise + new = pytree.Node(syms.raise_stmt, [Name(u"raise"), exc]) + new.prefix = node.prefix + return new + + val = results["val"].clone() + if is_tuple(val): + args = [c.clone() for c in val.children[1:-1]] + else: + val.prefix = u"" + args = [val] + + if "tb" in results: + tb = results["tb"].clone() + tb.prefix = u"" + + e = exc + # If there's a traceback and None is passed as the value, then don't + # add a call, since the user probably just wants to add a + # traceback. See issue #9661. + if val.type != token.NAME or val.value != u"None": + e = Call(exc, args) + with_tb = Attr(e, Name(u'with_traceback')) + [ArgList([tb])] + new = pytree.Node(syms.simple_stmt, [Name(u"raise")] + with_tb) + new.prefix = node.prefix + return new + else: + return pytree.Node(syms.raise_stmt, + [Name(u"raise"), Call(exc, args)], + prefix=node.prefix) diff --git a/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_raw_input.py b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_raw_input.py new file mode 100644 index 00000000..3a73b818 --- /dev/null +++ b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_raw_input.py @@ -0,0 +1,17 @@ +"""Fixer that changes raw_input(...) into input(...).""" +# Author: Andre Roberge + +# Local imports +from .. import fixer_base +from ..fixer_util import Name + +class FixRawInput(fixer_base.BaseFix): + + BM_compatible = True + PATTERN = """ + power< name='raw_input' trailer< '(' [any] ')' > any* > + """ + + def transform(self, node, results): + name = results["name"] + name.replace(Name(u"input", prefix=name.prefix)) diff --git a/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_reduce.py b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_reduce.py new file mode 100644 index 00000000..6bd785c1 --- /dev/null +++ b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_reduce.py @@ -0,0 +1,35 @@ +# Copyright 2008 Armin Ronacher. +# Licensed to PSF under a Contributor Agreement. + +"""Fixer for reduce(). + +Makes sure reduce() is imported from the functools module if reduce is +used in that module. +""" + +from lib2to3 import fixer_base +from lib2to3.fixer_util import touch_import + + + +class FixReduce(fixer_base.BaseFix): + + BM_compatible = True + order = "pre" + + PATTERN = """ + power< 'reduce' + trailer< '(' + arglist< ( + (not(argument) any ',' + not(argument + > + """ + + def transform(self, node, results): + touch_import(u'functools', u'reduce', node) diff --git a/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_renames.py b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_renames.py new file mode 100644 index 00000000..4bcce8c4 --- /dev/null +++ b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_renames.py @@ -0,0 +1,70 @@ +"""Fix incompatible renames + +Fixes: + * sys.maxint -> sys.maxsize +""" +# Author: Christian Heimes +# based on Collin Winter's fix_import + +# Local imports +from .. import fixer_base +from ..fixer_util import Name, attr_chain + +MAPPING = {"sys": {"maxint" : "maxsize"}, + } +LOOKUP = {} + +def alternates(members): + return "(" + "|".join(map(repr, members)) + ")" + + +def build_pattern(): + #bare = set() + for module, replace in MAPPING.items(): + for old_attr, new_attr in replace.items(): + LOOKUP[(module, old_attr)] = new_attr + #bare.add(module) + #bare.add(old_attr) + #yield """ + # import_name< 'import' (module=%r + # | dotted_as_names< any* module=%r any* >) > + # """ % (module, module) + yield """ + import_from< 'from' module_name=%r 'import' + ( attr_name=%r | import_as_name< attr_name=%r 'as' any >) > + """ % (module, old_attr, old_attr) + yield """ + power< module_name=%r trailer< '.' attr_name=%r > any* > + """ % (module, old_attr) + #yield """bare_name=%s""" % alternates(bare) + + +class FixRenames(fixer_base.BaseFix): + BM_compatible = True + PATTERN = "|".join(build_pattern()) + + order = "pre" # Pre-order tree traversal + + # Don't match the node if it's within another match + def match(self, node): + match = super(FixRenames, self).match + results = match(node) + if results: + if any(match(obj) for obj in attr_chain(node, "parent")): + return False + return results + return False + + #def start_tree(self, tree, filename): + # super(FixRenames, self).start_tree(tree, filename) + # self.replace = {} + + def transform(self, node, results): + mod_name = results.get("module_name") + attr_name = results.get("attr_name") + #bare_name = results.get("bare_name") + #import_mod = results.get("module") + + if mod_name and attr_name: + new_attr = unicode(LOOKUP[(mod_name.value, attr_name.value)]) + attr_name.replace(Name(new_attr, prefix=attr_name.prefix)) diff --git a/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_repr.py b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_repr.py new file mode 100644 index 00000000..f3436564 --- /dev/null +++ b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_repr.py @@ -0,0 +1,23 @@ +# Copyright 2006 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Fixer that transforms `xyzzy` into repr(xyzzy).""" + +# Local imports +from .. import fixer_base +from ..fixer_util import Call, Name, parenthesize + + +class FixRepr(fixer_base.BaseFix): + + BM_compatible = True + PATTERN = """ + atom < '`' expr=any '`' > + """ + + def transform(self, node, results): + expr = results["expr"].clone() + + if expr.type == self.syms.testlist1: + expr = parenthesize(expr) + return Call(Name(u"repr"), [expr], prefix=node.prefix) diff --git a/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_set_literal.py b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_set_literal.py new file mode 100644 index 00000000..d3d38ec4 --- /dev/null +++ b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_set_literal.py @@ -0,0 +1,53 @@ +""" +Optional fixer to transform set() calls to set literals. +""" + +# Author: Benjamin Peterson + +from lib2to3 import fixer_base, pytree +from lib2to3.fixer_util import token, syms + + + +class FixSetLiteral(fixer_base.BaseFix): + + BM_compatible = True + explicit = True + + PATTERN = """power< 'set' trailer< '(' + (atom=atom< '[' (items=listmaker< any ((',' any)* [',']) > + | + single=any) ']' > + | + atom< '(' items=testlist_gexp< any ((',' any)* [',']) > ')' > + ) + ')' > > + """ + + def transform(self, node, results): + single = results.get("single") + if single: + # Make a fake listmaker + fake = pytree.Node(syms.listmaker, [single.clone()]) + single.replace(fake) + items = fake + else: + items = results["items"] + + # Build the contents of the literal + literal = [pytree.Leaf(token.LBRACE, u"{")] + literal.extend(n.clone() for n in items.children) + literal.append(pytree.Leaf(token.RBRACE, u"}")) + # Set the prefix of the right brace to that of the ')' or ']' + literal[-1].prefix = items.next_sibling.prefix + maker = pytree.Node(syms.dictsetmaker, literal) + maker.prefix = node.prefix + + # If the original was a one tuple, we need to remove the extra comma. + if len(maker.children) == 4: + n = maker.children[2] + n.remove() + maker.children[-1].prefix = n.prefix + + # Finally, replace the set call with our shiny new literal. + return maker diff --git a/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_standarderror.py b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_standarderror.py new file mode 100644 index 00000000..6cad5111 --- /dev/null +++ b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_standarderror.py @@ -0,0 +1,18 @@ +# Copyright 2007 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Fixer for StandardError -> Exception.""" + +# Local imports +from .. import fixer_base +from ..fixer_util import Name + + +class FixStandarderror(fixer_base.BaseFix): + BM_compatible = True + PATTERN = """ + 'StandardError' + """ + + def transform(self, node, results): + return Name(u"Exception", prefix=node.prefix) diff --git a/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_sys_exc.py b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_sys_exc.py new file mode 100644 index 00000000..2ecca2b5 --- /dev/null +++ b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_sys_exc.py @@ -0,0 +1,30 @@ +"""Fixer for sys.exc_{type, value, traceback} + +sys.exc_type -> sys.exc_info()[0] +sys.exc_value -> sys.exc_info()[1] +sys.exc_traceback -> sys.exc_info()[2] +""" + +# By Jeff Balogh and Benjamin Peterson + +# Local imports +from .. import fixer_base +from ..fixer_util import Attr, Call, Name, Number, Subscript, Node, syms + +class FixSysExc(fixer_base.BaseFix): + # This order matches the ordering of sys.exc_info(). + exc_info = [u"exc_type", u"exc_value", u"exc_traceback"] + BM_compatible = True + PATTERN = """ + power< 'sys' trailer< dot='.' attribute=(%s) > > + """ % '|'.join("'%s'" % e for e in exc_info) + + def transform(self, node, results): + sys_attr = results["attribute"][0] + index = Number(self.exc_info.index(sys_attr.value)) + + call = Call(Name(u"exc_info"), prefix=sys_attr.prefix) + attr = Attr(Name(u"sys"), call) + attr[1].children[0].prefix = results["dot"].prefix + attr.append(Subscript(index)) + return Node(syms.power, attr, prefix=node.prefix) diff --git a/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_throw.py b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_throw.py new file mode 100644 index 00000000..1468d89a --- /dev/null +++ b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_throw.py @@ -0,0 +1,56 @@ +"""Fixer for generator.throw(E, V, T). + +g.throw(E) -> g.throw(E) +g.throw(E, V) -> g.throw(E(V)) +g.throw(E, V, T) -> g.throw(E(V).with_traceback(T)) + +g.throw("foo"[, V[, T]]) will warn about string exceptions.""" +# Author: Collin Winter + +# Local imports +from .. import pytree +from ..pgen2 import token +from .. import fixer_base +from ..fixer_util import Name, Call, ArgList, Attr, is_tuple + +class FixThrow(fixer_base.BaseFix): + BM_compatible = True + PATTERN = """ + power< any trailer< '.' 'throw' > + trailer< '(' args=arglist< exc=any ',' val=any [',' tb=any] > ')' > + > + | + power< any trailer< '.' 'throw' > trailer< '(' exc=any ')' > > + """ + + def transform(self, node, results): + syms = self.syms + + exc = results["exc"].clone() + if exc.type is token.STRING: + self.cannot_convert(node, "Python 3 does not support string exceptions") + return + + # Leave "g.throw(E)" alone + val = results.get(u"val") + if val is None: + return + + val = val.clone() + if is_tuple(val): + args = [c.clone() for c in val.children[1:-1]] + else: + val.prefix = u"" + args = [val] + + throw_args = results["args"] + + if "tb" in results: + tb = results["tb"].clone() + tb.prefix = u"" + + e = Call(exc, args) + with_tb = Attr(e, Name(u'with_traceback')) + [ArgList([tb])] + throw_args.replace(pytree.Node(syms.power, with_tb)) + else: + throw_args.replace(Call(exc, args)) diff --git a/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_tuple_params.py b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_tuple_params.py new file mode 100644 index 00000000..6361717d --- /dev/null +++ b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_tuple_params.py @@ -0,0 +1,175 @@ +"""Fixer for function definitions with tuple parameters. + +def func(((a, b), c), d): + ... + + -> + +def func(x, d): + ((a, b), c) = x + ... + +It will also support lambdas: + + lambda (x, y): x + y -> lambda t: t[0] + t[1] + + # The parens are a syntax error in Python 3 + lambda (x): x + y -> lambda x: x + y +""" +# Author: Collin Winter + +# Local imports +from .. import pytree +from ..pgen2 import token +from .. import fixer_base +from ..fixer_util import Assign, Name, Newline, Number, Subscript, syms + +def is_docstring(stmt): + return isinstance(stmt, pytree.Node) and \ + stmt.children[0].type == token.STRING + +class FixTupleParams(fixer_base.BaseFix): + run_order = 4 #use a lower order since lambda is part of other + #patterns + BM_compatible = True + + PATTERN = """ + funcdef< 'def' any parameters< '(' args=any ')' > + ['->' any] ':' suite=any+ > + | + lambda= + lambdef< 'lambda' args=vfpdef< '(' inner=any ')' > + ':' body=any + > + """ + + def transform(self, node, results): + if "lambda" in results: + return self.transform_lambda(node, results) + + new_lines = [] + suite = results["suite"] + args = results["args"] + # This crap is so "def foo(...): x = 5; y = 7" is handled correctly. + # TODO(cwinter): suite-cleanup + if suite[0].children[1].type == token.INDENT: + start = 2 + indent = suite[0].children[1].value + end = Newline() + else: + start = 0 + indent = u"; " + end = pytree.Leaf(token.INDENT, u"") + + # We need access to self for new_name(), and making this a method + # doesn't feel right. Closing over self and new_lines makes the + # code below cleaner. + def handle_tuple(tuple_arg, add_prefix=False): + n = Name(self.new_name()) + arg = tuple_arg.clone() + arg.prefix = u"" + stmt = Assign(arg, n.clone()) + if add_prefix: + n.prefix = u" " + tuple_arg.replace(n) + new_lines.append(pytree.Node(syms.simple_stmt, + [stmt, end.clone()])) + + if args.type == syms.tfpdef: + handle_tuple(args) + elif args.type == syms.typedargslist: + for i, arg in enumerate(args.children): + if arg.type == syms.tfpdef: + # Without add_prefix, the emitted code is correct, + # just ugly. + handle_tuple(arg, add_prefix=(i > 0)) + + if not new_lines: + return + + # This isn't strictly necessary, but it plays nicely with other fixers. + # TODO(cwinter) get rid of this when children becomes a smart list + for line in new_lines: + line.parent = suite[0] + + # TODO(cwinter) suite-cleanup + after = start + if start == 0: + new_lines[0].prefix = u" " + elif is_docstring(suite[0].children[start]): + new_lines[0].prefix = indent + after = start + 1 + + for line in new_lines: + line.parent = suite[0] + suite[0].children[after:after] = new_lines + for i in range(after+1, after+len(new_lines)+1): + suite[0].children[i].prefix = indent + suite[0].changed() + + def transform_lambda(self, node, results): + args = results["args"] + body = results["body"] + inner = simplify_args(results["inner"]) + + # Replace lambda ((((x)))): x with lambda x: x + if inner.type == token.NAME: + inner = inner.clone() + inner.prefix = u" " + args.replace(inner) + return + + params = find_params(args) + to_index = map_to_index(params) + tup_name = self.new_name(tuple_name(params)) + + new_param = Name(tup_name, prefix=u" ") + args.replace(new_param.clone()) + for n in body.post_order(): + if n.type == token.NAME and n.value in to_index: + subscripts = [c.clone() for c in to_index[n.value]] + new = pytree.Node(syms.power, + [new_param.clone()] + subscripts) + new.prefix = n.prefix + n.replace(new) + + +### Helper functions for transform_lambda() + +def simplify_args(node): + if node.type in (syms.vfplist, token.NAME): + return node + elif node.type == syms.vfpdef: + # These look like vfpdef< '(' x ')' > where x is NAME + # or another vfpdef instance (leading to recursion). + while node.type == syms.vfpdef: + node = node.children[1] + return node + raise RuntimeError("Received unexpected node %s" % node) + +def find_params(node): + if node.type == syms.vfpdef: + return find_params(node.children[1]) + elif node.type == token.NAME: + return node.value + return [find_params(c) for c in node.children if c.type != token.COMMA] + +def map_to_index(param_list, prefix=[], d=None): + if d is None: + d = {} + for i, obj in enumerate(param_list): + trailer = [Subscript(Number(unicode(i)))] + if isinstance(obj, list): + map_to_index(obj, trailer, d=d) + else: + d[obj] = prefix + trailer + return d + +def tuple_name(param_list): + l = [] + for obj in param_list: + if isinstance(obj, list): + l.append(tuple_name(obj)) + else: + l.append(obj) + return u"_".join(l) diff --git a/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_types.py b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_types.py new file mode 100644 index 00000000..fc9d4959 --- /dev/null +++ b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_types.py @@ -0,0 +1,62 @@ +# Copyright 2007 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Fixer for removing uses of the types module. + +These work for only the known names in the types module. The forms above +can include types. or not. ie, It is assumed the module is imported either as: + + import types + from types import ... # either * or specific types + +The import statements are not modified. + +There should be another fixer that handles at least the following constants: + + type([]) -> list + type(()) -> tuple + type('') -> str + +""" + +# Local imports +from ..pgen2 import token +from .. import fixer_base +from ..fixer_util import Name + +_TYPE_MAPPING = { + 'BooleanType' : 'bool', + 'BufferType' : 'memoryview', + 'ClassType' : 'type', + 'ComplexType' : 'complex', + 'DictType': 'dict', + 'DictionaryType' : 'dict', + 'EllipsisType' : 'type(Ellipsis)', + #'FileType' : 'io.IOBase', + 'FloatType': 'float', + 'IntType': 'int', + 'ListType': 'list', + 'LongType': 'int', + 'ObjectType' : 'object', + 'NoneType': 'type(None)', + 'NotImplementedType' : 'type(NotImplemented)', + 'SliceType' : 'slice', + 'StringType': 'bytes', # XXX ? + 'StringTypes' : 'str', # XXX ? + 'TupleType': 'tuple', + 'TypeType' : 'type', + 'UnicodeType': 'str', + 'XRangeType' : 'range', + } + +_pats = ["power< 'types' trailer< '.' name='%s' > >" % t for t in _TYPE_MAPPING] + +class FixTypes(fixer_base.BaseFix): + BM_compatible = True + PATTERN = '|'.join(_pats) + + def transform(self, node, results): + new_value = unicode(_TYPE_MAPPING.get(results["name"].value)) + if new_value: + return Name(new_value, prefix=node.prefix) + return None diff --git a/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_unicode.py b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_unicode.py new file mode 100644 index 00000000..2d776f61 --- /dev/null +++ b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_unicode.py @@ -0,0 +1,42 @@ +r"""Fixer for unicode. + +* Changes unicode to str and unichr to chr. + +* If "...\u..." is not unicode literal change it into "...\\u...". + +* Change u"..." into "...". + +""" + +from ..pgen2 import token +from .. import fixer_base + +_mapping = {u"unichr" : u"chr", u"unicode" : u"str"} + +class FixUnicode(fixer_base.BaseFix): + BM_compatible = True + PATTERN = "STRING | 'unicode' | 'unichr'" + + def start_tree(self, tree, filename): + super(FixUnicode, self).start_tree(tree, filename) + self.unicode_literals = 'unicode_literals' in tree.future_features + + def transform(self, node, results): + if node.type == token.NAME: + new = node.clone() + new.value = _mapping[node.value] + return new + elif node.type == token.STRING: + val = node.value + if not self.unicode_literals and val[0] in u'\'"' and u'\\' in val: + val = ur'\\'.join([ + v.replace(u'\\u', ur'\\u').replace(u'\\U', ur'\\U') + for v in val.split(ur'\\') + ]) + if val[0] in u'uU': + val = val[1:] + if val == node.value: + return node + new = node.clone() + new.value = val + return new diff --git a/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_urllib.py b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_urllib.py new file mode 100644 index 00000000..34e1b270 --- /dev/null +++ b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_urllib.py @@ -0,0 +1,197 @@ +"""Fix changes imports of urllib which are now incompatible. + This is rather similar to fix_imports, but because of the more + complex nature of the fixing for urllib, it has its own fixer. +""" +# Author: Nick Edds + +# Local imports +from lib2to3.fixes.fix_imports import alternates, FixImports +from lib2to3 import fixer_base +from lib2to3.fixer_util import (Name, Comma, FromImport, Newline, + find_indentation, Node, syms) + +MAPPING = {"urllib": [ + ("urllib.request", + ["URLopener", "FancyURLopener", "urlretrieve", + "_urlopener", "urlopen", "urlcleanup", + "pathname2url", "url2pathname"]), + ("urllib.parse", + ["quote", "quote_plus", "unquote", "unquote_plus", + "urlencode", "splitattr", "splithost", "splitnport", + "splitpasswd", "splitport", "splitquery", "splittag", + "splittype", "splituser", "splitvalue", ]), + ("urllib.error", + ["ContentTooShortError"])], + "urllib2" : [ + ("urllib.request", + ["urlopen", "install_opener", "build_opener", + "Request", "OpenerDirector", "BaseHandler", + "HTTPDefaultErrorHandler", "HTTPRedirectHandler", + "HTTPCookieProcessor", "ProxyHandler", + "HTTPPasswordMgr", + "HTTPPasswordMgrWithDefaultRealm", + "AbstractBasicAuthHandler", + "HTTPBasicAuthHandler", "ProxyBasicAuthHandler", + "AbstractDigestAuthHandler", + "HTTPDigestAuthHandler", "ProxyDigestAuthHandler", + "HTTPHandler", "HTTPSHandler", "FileHandler", + "FTPHandler", "CacheFTPHandler", + "UnknownHandler"]), + ("urllib.error", + ["URLError", "HTTPError"]), + ] +} + +# Duplicate the url parsing functions for urllib2. +MAPPING["urllib2"].append(MAPPING["urllib"][1]) + + +def build_pattern(): + bare = set() + for old_module, changes in MAPPING.items(): + for change in changes: + new_module, members = change + members = alternates(members) + yield """import_name< 'import' (module=%r + | dotted_as_names< any* module=%r any* >) > + """ % (old_module, old_module) + yield """import_from< 'from' mod_member=%r 'import' + ( member=%s | import_as_name< member=%s 'as' any > | + import_as_names< members=any* >) > + """ % (old_module, members, members) + yield """import_from< 'from' module_star=%r 'import' star='*' > + """ % old_module + yield """import_name< 'import' + dotted_as_name< module_as=%r 'as' any > > + """ % old_module + # bare_with_attr has a special significance for FixImports.match(). + yield """power< bare_with_attr=%r trailer< '.' member=%s > any* > + """ % (old_module, members) + + +class FixUrllib(FixImports): + + def build_pattern(self): + return "|".join(build_pattern()) + + def transform_import(self, node, results): + """Transform for the basic import case. Replaces the old + import name with a comma separated list of its + replacements. + """ + import_mod = results.get("module") + pref = import_mod.prefix + + names = [] + + # create a Node list of the replacement modules + for name in MAPPING[import_mod.value][:-1]: + names.extend([Name(name[0], prefix=pref), Comma()]) + names.append(Name(MAPPING[import_mod.value][-1][0], prefix=pref)) + import_mod.replace(names) + + def transform_member(self, node, results): + """Transform for imports of specific module elements. Replaces + the module to be imported from with the appropriate new + module. + """ + mod_member = results.get("mod_member") + pref = mod_member.prefix + member = results.get("member") + + # Simple case with only a single member being imported + if member: + # this may be a list of length one, or just a node + if isinstance(member, list): + member = member[0] + new_name = None + for change in MAPPING[mod_member.value]: + if member.value in change[1]: + new_name = change[0] + break + if new_name: + mod_member.replace(Name(new_name, prefix=pref)) + else: + self.cannot_convert(node, "This is an invalid module element") + + # Multiple members being imported + else: + # a dictionary for replacements, order matters + modules = [] + mod_dict = {} + members = results["members"] + for member in members: + # we only care about the actual members + if member.type == syms.import_as_name: + as_name = member.children[2].value + member_name = member.children[0].value + else: + member_name = member.value + as_name = None + if member_name != u",": + for change in MAPPING[mod_member.value]: + if member_name in change[1]: + if change[0] not in mod_dict: + modules.append(change[0]) + mod_dict.setdefault(change[0], []).append(member) + + new_nodes = [] + indentation = find_indentation(node) + first = True + def handle_name(name, prefix): + if name.type == syms.import_as_name: + kids = [Name(name.children[0].value, prefix=prefix), + name.children[1].clone(), + name.children[2].clone()] + return [Node(syms.import_as_name, kids)] + return [Name(name.value, prefix=prefix)] + for module in modules: + elts = mod_dict[module] + names = [] + for elt in elts[:-1]: + names.extend(handle_name(elt, pref)) + names.append(Comma()) + names.extend(handle_name(elts[-1], pref)) + new = FromImport(module, names) + if not first or node.parent.prefix.endswith(indentation): + new.prefix = indentation + new_nodes.append(new) + first = False + if new_nodes: + nodes = [] + for new_node in new_nodes[:-1]: + nodes.extend([new_node, Newline()]) + nodes.append(new_nodes[-1]) + node.replace(nodes) + else: + self.cannot_convert(node, "All module elements are invalid") + + def transform_dot(self, node, results): + """Transform for calls to module members in code.""" + module_dot = results.get("bare_with_attr") + member = results.get("member") + new_name = None + if isinstance(member, list): + member = member[0] + for change in MAPPING[module_dot.value]: + if member.value in change[1]: + new_name = change[0] + break + if new_name: + module_dot.replace(Name(new_name, + prefix=module_dot.prefix)) + else: + self.cannot_convert(node, "This is an invalid module element") + + def transform(self, node, results): + if results.get("module"): + self.transform_import(node, results) + elif results.get("mod_member"): + self.transform_member(node, results) + elif results.get("bare_with_attr"): + self.transform_dot(node, results) + # Renaming and star imports are not supported for these modules. + elif results.get("module_star"): + self.cannot_convert(node, "Cannot handle star imports.") + elif results.get("module_as"): + self.cannot_convert(node, "This module is now multiple modules") diff --git a/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_ws_comma.py b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_ws_comma.py new file mode 100644 index 00000000..37ff6244 --- /dev/null +++ b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_ws_comma.py @@ -0,0 +1,39 @@ +"""Fixer that changes 'a ,b' into 'a, b'. + +This also changes '{a :b}' into '{a: b}', but does not touch other +uses of colons. It does not touch other uses of whitespace. + +""" + +from .. import pytree +from ..pgen2 import token +from .. import fixer_base + +class FixWsComma(fixer_base.BaseFix): + + explicit = True # The user must ask for this fixers + + PATTERN = """ + any<(not(',') any)+ ',' ((not(',') any)+ ',')* [not(',') any]> + """ + + COMMA = pytree.Leaf(token.COMMA, u",") + COLON = pytree.Leaf(token.COLON, u":") + SEPS = (COMMA, COLON) + + def transform(self, node, results): + new = node.clone() + comma = False + for child in new.children: + if child in self.SEPS: + prefix = child.prefix + if prefix.isspace() and u"\n" not in prefix: + child.prefix = u"" + comma = True + else: + if comma: + prefix = child.prefix + if not prefix: + child.prefix = u" " + comma = False + return new diff --git a/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_xrange.py b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_xrange.py new file mode 100644 index 00000000..f1436724 --- /dev/null +++ b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_xrange.py @@ -0,0 +1,73 @@ +# Copyright 2007 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Fixer that changes xrange(...) into range(...).""" + +# Local imports +from .. import fixer_base +from ..fixer_util import Name, Call, consuming_calls +from .. import patcomp + + +class FixXrange(fixer_base.BaseFix): + BM_compatible = True + PATTERN = """ + power< + (name='range'|name='xrange') trailer< '(' args=any ')' > + rest=any* > + """ + + def start_tree(self, tree, filename): + super(FixXrange, self).start_tree(tree, filename) + self.transformed_xranges = set() + + def finish_tree(self, tree, filename): + self.transformed_xranges = None + + def transform(self, node, results): + name = results["name"] + if name.value == u"xrange": + return self.transform_xrange(node, results) + elif name.value == u"range": + return self.transform_range(node, results) + else: + raise ValueError(repr(name)) + + def transform_xrange(self, node, results): + name = results["name"] + name.replace(Name(u"range", prefix=name.prefix)) + # This prevents the new range call from being wrapped in a list later. + self.transformed_xranges.add(id(node)) + + def transform_range(self, node, results): + if (id(node) not in self.transformed_xranges and + not self.in_special_context(node)): + range_call = Call(Name(u"range"), [results["args"].clone()]) + # Encase the range call in list(). + list_call = Call(Name(u"list"), [range_call], + prefix=node.prefix) + # Put things that were after the range() call after the list call. + for n in results["rest"]: + list_call.append_child(n) + return list_call + + P1 = "power< func=NAME trailer< '(' node=any ')' > any* >" + p1 = patcomp.compile_pattern(P1) + + P2 = """for_stmt< 'for' any 'in' node=any ':' any* > + | comp_for< 'for' any 'in' node=any any* > + | comparison< any 'in' node=any any*> + """ + p2 = patcomp.compile_pattern(P2) + + def in_special_context(self, node): + if node.parent is None: + return False + results = {} + if (node.parent.parent is not None and + self.p1.match(node.parent.parent, results) and + results["node"] is node): + # list(d.keys()) -> list(d.keys()), etc. + return results["func"].value in consuming_calls + # for ... in d.iterkeys() -> for ... in d.keys(), etc. + return self.p2.match(node.parent, results) and results["node"] is node diff --git a/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_xreadlines.py b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_xreadlines.py new file mode 100644 index 00000000..f50b9a27 --- /dev/null +++ b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_xreadlines.py @@ -0,0 +1,25 @@ +"""Fix "for x in f.xreadlines()" -> "for x in f". + +This fixer will also convert g(f.xreadlines) into g(f.__iter__).""" +# Author: Collin Winter + +# Local imports +from .. import fixer_base +from ..fixer_util import Name + + +class FixXreadlines(fixer_base.BaseFix): + BM_compatible = True + PATTERN = """ + power< call=any+ trailer< '.' 'xreadlines' > trailer< '(' ')' > > + | + power< any+ trailer< '.' no_call='xreadlines' > > + """ + + def transform(self, node, results): + no_call = results.get("no_call") + + if no_call: + no_call.replace(Name(u"__iter__", prefix=no_call.prefix)) + else: + node.replace([x.clone() for x in results["call"]]) diff --git a/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_zip.py b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_zip.py new file mode 100644 index 00000000..c5d7b66d --- /dev/null +++ b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/fixes/fix_zip.py @@ -0,0 +1,35 @@ +""" +Fixer that changes zip(seq0, seq1, ...) into list(zip(seq0, seq1, ...) +unless there exists a 'from future_builtins import zip' statement in the +top-level namespace. + +We avoid the transformation if the zip() call is directly contained in +iter(<>), list(<>), tuple(<>), sorted(<>), ...join(<>), or for V in <>:. +""" + +# Local imports +from .. import fixer_base +from ..fixer_util import Name, Call, in_special_context + +class FixZip(fixer_base.ConditionalFix): + + BM_compatible = True + PATTERN = """ + power< 'zip' args=trailer< '(' [any] ')' > + > + """ + + skip_on = "future_builtins.zip" + + def transform(self, node, results): + if self.should_skip(node): + return + + if in_special_context(node): + return None + + new = node.clone() + new.prefix = u"" + new = Call(Name(u"list"), [new]) + new.prefix = node.prefix + return new diff --git a/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/main.py b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/main.py new file mode 100644 index 00000000..ad0625e5 --- /dev/null +++ b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/main.py @@ -0,0 +1,269 @@ +""" +Main program for 2to3. +""" + +from __future__ import with_statement + +import sys +import os +import difflib +import logging +import shutil +import optparse + +from . import refactor + + +def diff_texts(a, b, filename): + """Return a unified diff of two strings.""" + a = a.splitlines() + b = b.splitlines() + return difflib.unified_diff(a, b, filename, filename, + "(original)", "(refactored)", + lineterm="") + + +class StdoutRefactoringTool(refactor.MultiprocessRefactoringTool): + """ + A refactoring tool that can avoid overwriting its input files. + Prints output to stdout. + + Output files can optionally be written to a different directory and or + have an extra file suffix appended to their name for use in situations + where you do not want to replace the input files. + """ + + def __init__(self, fixers, options, explicit, nobackups, show_diffs, + input_base_dir='', output_dir='', append_suffix=''): + """ + Args: + fixers: A list of fixers to import. + options: A dict with RefactoringTool configuration. + explicit: A list of fixers to run even if they are explicit. + nobackups: If true no backup '.bak' files will be created for those + files that are being refactored. + show_diffs: Should diffs of the refactoring be printed to stdout? + input_base_dir: The base directory for all input files. This class + will strip this path prefix off of filenames before substituting + it with output_dir. Only meaningful if output_dir is supplied. + All files processed by refactor() must start with this path. + output_dir: If supplied, all converted files will be written into + this directory tree instead of input_base_dir. + append_suffix: If supplied, all files output by this tool will have + this appended to their filename. Useful for changing .py to + .py3 for example by passing append_suffix='3'. + """ + self.nobackups = nobackups + self.show_diffs = show_diffs + if input_base_dir and not input_base_dir.endswith(os.sep): + input_base_dir += os.sep + self._input_base_dir = input_base_dir + self._output_dir = output_dir + self._append_suffix = append_suffix + super(StdoutRefactoringTool, self).__init__(fixers, options, explicit) + + def log_error(self, msg, *args, **kwargs): + self.errors.append((msg, args, kwargs)) + self.logger.error(msg, *args, **kwargs) + + def write_file(self, new_text, filename, old_text, encoding): + orig_filename = filename + if self._output_dir: + if filename.startswith(self._input_base_dir): + filename = os.path.join(self._output_dir, + filename[len(self._input_base_dir):]) + else: + raise ValueError('filename %s does not start with the ' + 'input_base_dir %s' % ( + filename, self._input_base_dir)) + if self._append_suffix: + filename += self._append_suffix + if orig_filename != filename: + output_dir = os.path.dirname(filename) + if not os.path.isdir(output_dir): + os.makedirs(output_dir) + self.log_message('Writing converted %s to %s.', orig_filename, + filename) + if not self.nobackups: + # Make backup + backup = filename + ".bak" + if os.path.lexists(backup): + try: + os.remove(backup) + except os.error, err: + self.log_message("Can't remove backup %s", backup) + try: + os.rename(filename, backup) + except os.error, err: + self.log_message("Can't rename %s to %s", filename, backup) + # Actually write the new file + write = super(StdoutRefactoringTool, self).write_file + write(new_text, filename, old_text, encoding) + if not self.nobackups: + shutil.copymode(backup, filename) + if orig_filename != filename: + # Preserve the file mode in the new output directory. + shutil.copymode(orig_filename, filename) + + def print_output(self, old, new, filename, equal): + if equal: + self.log_message("No changes to %s", filename) + else: + self.log_message("Refactored %s", filename) + if self.show_diffs: + diff_lines = diff_texts(old, new, filename) + try: + if self.output_lock is not None: + with self.output_lock: + for line in diff_lines: + print line + sys.stdout.flush() + else: + for line in diff_lines: + print line + except UnicodeEncodeError: + warn("couldn't encode %s's diff for your terminal" % + (filename,)) + return + + +def warn(msg): + print >> sys.stderr, "WARNING: %s" % (msg,) + + +def main(fixer_pkg, args=None): + """Main program. + + Args: + fixer_pkg: the name of a package where the fixers are located. + args: optional; a list of command line arguments. If omitted, + sys.argv[1:] is used. + + Returns a suggested exit status (0, 1, 2). + """ + # Set up option parser + parser = optparse.OptionParser(usage="2to3 [options] file|dir ...") + parser.add_option("-d", "--doctests_only", action="store_true", + help="Fix up doctests only") + parser.add_option("-f", "--fix", action="append", default=[], + help="Each FIX specifies a transformation; default: all") + parser.add_option("-j", "--processes", action="store", default=1, + type="int", help="Run 2to3 concurrently") + parser.add_option("-x", "--nofix", action="append", default=[], + help="Prevent a transformation from being run") + parser.add_option("-l", "--list-fixes", action="store_true", + help="List available transformations") + parser.add_option("-p", "--print-function", action="store_true", + help="Modify the grammar so that print() is a function") + parser.add_option("-v", "--verbose", action="store_true", + help="More verbose logging") + parser.add_option("--no-diffs", action="store_true", + help="Don't show diffs of the refactoring") + parser.add_option("-w", "--write", action="store_true", + help="Write back modified files") + parser.add_option("-n", "--nobackups", action="store_true", default=False, + help="Don't write backups for modified files") + parser.add_option("-o", "--output-dir", action="store", type="str", + default="", help="Put output files in this directory " + "instead of overwriting the input files. Requires -n.") + parser.add_option("-W", "--write-unchanged-files", action="store_true", + help="Also write files even if no changes were required" + " (useful with --output-dir); implies -w.") + parser.add_option("--add-suffix", action="store", type="str", default="", + help="Append this string to all output filenames." + " Requires -n if non-empty. " + "ex: --add-suffix='3' will generate .py3 files.") + + # Parse command line arguments + refactor_stdin = False + flags = {} + options, args = parser.parse_args(args) + if options.write_unchanged_files: + flags["write_unchanged_files"] = True + if not options.write: + warn("--write-unchanged-files/-W implies -w.") + options.write = True + # If we allowed these, the original files would be renamed to backup names + # but not replaced. + if options.output_dir and not options.nobackups: + parser.error("Can't use --output-dir/-o without -n.") + if options.add_suffix and not options.nobackups: + parser.error("Can't use --add-suffix without -n.") + + if not options.write and options.no_diffs: + warn("not writing files and not printing diffs; that's not very useful") + if not options.write and options.nobackups: + parser.error("Can't use -n without -w") + if options.list_fixes: + print "Available transformations for the -f/--fix option:" + for fixname in refactor.get_all_fix_names(fixer_pkg): + print fixname + if not args: + return 0 + if not args: + print >> sys.stderr, "At least one file or directory argument required." + print >> sys.stderr, "Use --help to show usage." + return 2 + if "-" in args: + refactor_stdin = True + if options.write: + print >> sys.stderr, "Can't write to stdin." + return 2 + if options.print_function: + flags["print_function"] = True + + # Set up logging handler + level = logging.DEBUG if options.verbose else logging.INFO + logging.basicConfig(format='%(name)s: %(message)s', level=level) + logger = logging.getLogger('lib2to3.main') + + # Initialize the refactoring tool + avail_fixes = set(refactor.get_fixers_from_package(fixer_pkg)) + unwanted_fixes = set(fixer_pkg + ".fix_" + fix for fix in options.nofix) + explicit = set() + if options.fix: + all_present = False + for fix in options.fix: + if fix == "all": + all_present = True + else: + explicit.add(fixer_pkg + ".fix_" + fix) + requested = avail_fixes.union(explicit) if all_present else explicit + else: + requested = avail_fixes.union(explicit) + fixer_names = requested.difference(unwanted_fixes) + input_base_dir = os.path.commonprefix(args) + if (input_base_dir and not input_base_dir.endswith(os.sep) + and not os.path.isdir(input_base_dir)): + # One or more similar names were passed, their directory is the base. + # os.path.commonprefix() is ignorant of path elements, this corrects + # for that weird API. + input_base_dir = os.path.dirname(input_base_dir) + if options.output_dir: + input_base_dir = input_base_dir.rstrip(os.sep) + logger.info('Output in %r will mirror the input directory %r layout.', + options.output_dir, input_base_dir) + rt = StdoutRefactoringTool( + sorted(fixer_names), flags, sorted(explicit), + options.nobackups, not options.no_diffs, + input_base_dir=input_base_dir, + output_dir=options.output_dir, + append_suffix=options.add_suffix) + + # Refactor all files and directories passed as arguments + if not rt.errors: + if refactor_stdin: + rt.refactor_stdin() + else: + try: + rt.refactor(args, options.write, options.doctests_only, + options.processes) + except refactor.MultiprocessingUnsupported: + assert options.processes > 1 + print >> sys.stderr, "Sorry, -j isn't " \ + "supported on this platform." + return 1 + rt.summarize() + + # Return error status (0 if rt.errors is zero) + return int(bool(rt.errors)) diff --git a/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/patcomp.py b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/patcomp.py new file mode 100644 index 00000000..093e5f9f --- /dev/null +++ b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/patcomp.py @@ -0,0 +1,205 @@ +# Copyright 2006 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Pattern compiler. + +The grammer is taken from PatternGrammar.txt. + +The compiler compiles a pattern to a pytree.*Pattern instance. +""" + +__author__ = "Guido van Rossum " + +# Python imports +import os +import StringIO + +# Fairly local imports +from .pgen2 import driver, literals, token, tokenize, parse, grammar + +# Really local imports +from . import pytree +from . import pygram + +# The pattern grammar file +_PATTERN_GRAMMAR_FILE = os.path.join(os.path.dirname(__file__), + "PatternGrammar.txt") + + +class PatternSyntaxError(Exception): + pass + + +def tokenize_wrapper(input): + """Tokenizes a string suppressing significant whitespace.""" + skip = set((token.NEWLINE, token.INDENT, token.DEDENT)) + tokens = tokenize.generate_tokens(StringIO.StringIO(input).readline) + for quintuple in tokens: + type, value, start, end, line_text = quintuple + if type not in skip: + yield quintuple + + +class PatternCompiler(object): + + def __init__(self, grammar_file=_PATTERN_GRAMMAR_FILE): + """Initializer. + + Takes an optional alternative filename for the pattern grammar. + """ + self.grammar = driver.load_grammar(grammar_file) + self.syms = pygram.Symbols(self.grammar) + self.pygrammar = pygram.python_grammar + self.pysyms = pygram.python_symbols + self.driver = driver.Driver(self.grammar, convert=pattern_convert) + + def compile_pattern(self, input, debug=False, with_tree=False): + """Compiles a pattern string to a nested pytree.*Pattern object.""" + tokens = tokenize_wrapper(input) + try: + root = self.driver.parse_tokens(tokens, debug=debug) + except parse.ParseError as e: + raise PatternSyntaxError(str(e)) + if with_tree: + return self.compile_node(root), root + else: + return self.compile_node(root) + + def compile_node(self, node): + """Compiles a node, recursively. + + This is one big switch on the node type. + """ + # XXX Optimize certain Wildcard-containing-Wildcard patterns + # that can be merged + if node.type == self.syms.Matcher: + node = node.children[0] # Avoid unneeded recursion + + if node.type == self.syms.Alternatives: + # Skip the odd children since they are just '|' tokens + alts = [self.compile_node(ch) for ch in node.children[::2]] + if len(alts) == 1: + return alts[0] + p = pytree.WildcardPattern([[a] for a in alts], min=1, max=1) + return p.optimize() + + if node.type == self.syms.Alternative: + units = [self.compile_node(ch) for ch in node.children] + if len(units) == 1: + return units[0] + p = pytree.WildcardPattern([units], min=1, max=1) + return p.optimize() + + if node.type == self.syms.NegatedUnit: + pattern = self.compile_basic(node.children[1:]) + p = pytree.NegatedPattern(pattern) + return p.optimize() + + assert node.type == self.syms.Unit + + name = None + nodes = node.children + if len(nodes) >= 3 and nodes[1].type == token.EQUAL: + name = nodes[0].value + nodes = nodes[2:] + repeat = None + if len(nodes) >= 2 and nodes[-1].type == self.syms.Repeater: + repeat = nodes[-1] + nodes = nodes[:-1] + + # Now we've reduced it to: STRING | NAME [Details] | (...) | [...] + pattern = self.compile_basic(nodes, repeat) + + if repeat is not None: + assert repeat.type == self.syms.Repeater + children = repeat.children + child = children[0] + if child.type == token.STAR: + min = 0 + max = pytree.HUGE + elif child.type == token.PLUS: + min = 1 + max = pytree.HUGE + elif child.type == token.LBRACE: + assert children[-1].type == token.RBRACE + assert len(children) in (3, 5) + min = max = self.get_int(children[1]) + if len(children) == 5: + max = self.get_int(children[3]) + else: + assert False + if min != 1 or max != 1: + pattern = pattern.optimize() + pattern = pytree.WildcardPattern([[pattern]], min=min, max=max) + + if name is not None: + pattern.name = name + return pattern.optimize() + + def compile_basic(self, nodes, repeat=None): + # Compile STRING | NAME [Details] | (...) | [...] + assert len(nodes) >= 1 + node = nodes[0] + if node.type == token.STRING: + value = unicode(literals.evalString(node.value)) + return pytree.LeafPattern(_type_of_literal(value), value) + elif node.type == token.NAME: + value = node.value + if value.isupper(): + if value not in TOKEN_MAP: + raise PatternSyntaxError("Invalid token: %r" % value) + if nodes[1:]: + raise PatternSyntaxError("Can't have details for token") + return pytree.LeafPattern(TOKEN_MAP[value]) + else: + if value == "any": + type = None + elif not value.startswith("_"): + type = getattr(self.pysyms, value, None) + if type is None: + raise PatternSyntaxError("Invalid symbol: %r" % value) + if nodes[1:]: # Details present + content = [self.compile_node(nodes[1].children[1])] + else: + content = None + return pytree.NodePattern(type, content) + elif node.value == "(": + return self.compile_node(nodes[1]) + elif node.value == "[": + assert repeat is None + subpattern = self.compile_node(nodes[1]) + return pytree.WildcardPattern([[subpattern]], min=0, max=1) + assert False, node + + def get_int(self, node): + assert node.type == token.NUMBER + return int(node.value) + + +# Map named tokens to the type value for a LeafPattern +TOKEN_MAP = {"NAME": token.NAME, + "STRING": token.STRING, + "NUMBER": token.NUMBER, + "TOKEN": None} + + +def _type_of_literal(value): + if value[0].isalpha(): + return token.NAME + elif value in grammar.opmap: + return grammar.opmap[value] + else: + return None + + +def pattern_convert(grammar, raw_node_info): + """Converts raw node information to a Node or Leaf instance.""" + type, value, context, children = raw_node_info + if children or type in grammar.number2symbol: + return pytree.Node(type, children, context=context) + else: + return pytree.Leaf(type, value, context=context) + + +def compile_pattern(pattern): + return PatternCompiler().compile_pattern(pattern) diff --git a/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/pgen2/__init__.py b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/pgen2/__init__.py new file mode 100644 index 00000000..af390484 --- /dev/null +++ b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/pgen2/__init__.py @@ -0,0 +1,4 @@ +# Copyright 2004-2005 Elemental Security, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""The pgen2 package.""" diff --git a/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/pgen2/conv.py b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/pgen2/conv.py new file mode 100644 index 00000000..28fbb0b9 --- /dev/null +++ b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/pgen2/conv.py @@ -0,0 +1,257 @@ +# Copyright 2004-2005 Elemental Security, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Convert graminit.[ch] spit out by pgen to Python code. + +Pgen is the Python parser generator. It is useful to quickly create a +parser from a grammar file in Python's grammar notation. But I don't +want my parsers to be written in C (yet), so I'm translating the +parsing tables to Python data structures and writing a Python parse +engine. + +Note that the token numbers are constants determined by the standard +Python tokenizer. The standard token module defines these numbers and +their names (the names are not used much). The token numbers are +hardcoded into the Python tokenizer and into pgen. A Python +implementation of the Python tokenizer is also available, in the +standard tokenize module. + +On the other hand, symbol numbers (representing the grammar's +non-terminals) are assigned by pgen based on the actual grammar +input. + +Note: this module is pretty much obsolete; the pgen module generates +equivalent grammar tables directly from the Grammar.txt input file +without having to invoke the Python pgen C program. + +""" + +# Python imports +import re + +# Local imports +from pgen2 import grammar, token + + +class Converter(grammar.Grammar): + """Grammar subclass that reads classic pgen output files. + + The run() method reads the tables as produced by the pgen parser + generator, typically contained in two C files, graminit.h and + graminit.c. The other methods are for internal use only. + + See the base class for more documentation. + + """ + + def run(self, graminit_h, graminit_c): + """Load the grammar tables from the text files written by pgen.""" + self.parse_graminit_h(graminit_h) + self.parse_graminit_c(graminit_c) + self.finish_off() + + def parse_graminit_h(self, filename): + """Parse the .h file written by pgen. (Internal) + + This file is a sequence of #define statements defining the + nonterminals of the grammar as numbers. We build two tables + mapping the numbers to names and back. + + """ + try: + f = open(filename) + except IOError, err: + print "Can't open %s: %s" % (filename, err) + return False + self.symbol2number = {} + self.number2symbol = {} + lineno = 0 + for line in f: + lineno += 1 + mo = re.match(r"^#define\s+(\w+)\s+(\d+)$", line) + if not mo and line.strip(): + print "%s(%s): can't parse %s" % (filename, lineno, + line.strip()) + else: + symbol, number = mo.groups() + number = int(number) + assert symbol not in self.symbol2number + assert number not in self.number2symbol + self.symbol2number[symbol] = number + self.number2symbol[number] = symbol + return True + + def parse_graminit_c(self, filename): + """Parse the .c file written by pgen. (Internal) + + The file looks as follows. The first two lines are always this: + + #include "pgenheaders.h" + #include "grammar.h" + + After that come four blocks: + + 1) one or more state definitions + 2) a table defining dfas + 3) a table defining labels + 4) a struct defining the grammar + + A state definition has the following form: + - one or more arc arrays, each of the form: + static arc arcs__[] = { + {, }, + ... + }; + - followed by a state array, of the form: + static state states_[] = { + {, arcs__}, + ... + }; + + """ + try: + f = open(filename) + except IOError, err: + print "Can't open %s: %s" % (filename, err) + return False + # The code below essentially uses f's iterator-ness! + lineno = 0 + + # Expect the two #include lines + lineno, line = lineno+1, f.next() + assert line == '#include "pgenheaders.h"\n', (lineno, line) + lineno, line = lineno+1, f.next() + assert line == '#include "grammar.h"\n', (lineno, line) + + # Parse the state definitions + lineno, line = lineno+1, f.next() + allarcs = {} + states = [] + while line.startswith("static arc "): + while line.startswith("static arc "): + mo = re.match(r"static arc arcs_(\d+)_(\d+)\[(\d+)\] = {$", + line) + assert mo, (lineno, line) + n, m, k = map(int, mo.groups()) + arcs = [] + for _ in range(k): + lineno, line = lineno+1, f.next() + mo = re.match(r"\s+{(\d+), (\d+)},$", line) + assert mo, (lineno, line) + i, j = map(int, mo.groups()) + arcs.append((i, j)) + lineno, line = lineno+1, f.next() + assert line == "};\n", (lineno, line) + allarcs[(n, m)] = arcs + lineno, line = lineno+1, f.next() + mo = re.match(r"static state states_(\d+)\[(\d+)\] = {$", line) + assert mo, (lineno, line) + s, t = map(int, mo.groups()) + assert s == len(states), (lineno, line) + state = [] + for _ in range(t): + lineno, line = lineno+1, f.next() + mo = re.match(r"\s+{(\d+), arcs_(\d+)_(\d+)},$", line) + assert mo, (lineno, line) + k, n, m = map(int, mo.groups()) + arcs = allarcs[n, m] + assert k == len(arcs), (lineno, line) + state.append(arcs) + states.append(state) + lineno, line = lineno+1, f.next() + assert line == "};\n", (lineno, line) + lineno, line = lineno+1, f.next() + self.states = states + + # Parse the dfas + dfas = {} + mo = re.match(r"static dfa dfas\[(\d+)\] = {$", line) + assert mo, (lineno, line) + ndfas = int(mo.group(1)) + for i in range(ndfas): + lineno, line = lineno+1, f.next() + mo = re.match(r'\s+{(\d+), "(\w+)", (\d+), (\d+), states_(\d+),$', + line) + assert mo, (lineno, line) + symbol = mo.group(2) + number, x, y, z = map(int, mo.group(1, 3, 4, 5)) + assert self.symbol2number[symbol] == number, (lineno, line) + assert self.number2symbol[number] == symbol, (lineno, line) + assert x == 0, (lineno, line) + state = states[z] + assert y == len(state), (lineno, line) + lineno, line = lineno+1, f.next() + mo = re.match(r'\s+("(?:\\\d\d\d)*")},$', line) + assert mo, (lineno, line) + first = {} + rawbitset = eval(mo.group(1)) + for i, c in enumerate(rawbitset): + byte = ord(c) + for j in range(8): + if byte & (1<= os.path.getmtime(b) + + +def main(*args): + """Main program, when run as a script: produce grammar pickle files. + + Calls load_grammar for each argument, a path to a grammar text file. + """ + if not args: + args = sys.argv[1:] + logging.basicConfig(level=logging.INFO, stream=sys.stdout, + format='%(message)s') + for gt in args: + load_grammar(gt, save=True, force=True) + return True + +if __name__ == "__main__": + sys.exit(int(not main())) diff --git a/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/pgen2/grammar.py b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/pgen2/grammar.py new file mode 100644 index 00000000..1aa5c432 --- /dev/null +++ b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/pgen2/grammar.py @@ -0,0 +1,184 @@ +# Copyright 2004-2005 Elemental Security, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""This module defines the data structures used to represent a grammar. + +These are a bit arcane because they are derived from the data +structures used by Python's 'pgen' parser generator. + +There's also a table here mapping operators to their names in the +token module; the Python tokenize module reports all operators as the +fallback token code OP, but the parser needs the actual token code. + +""" + +# Python imports +import pickle + +# Local imports +from . import token, tokenize + + +class Grammar(object): + """Pgen parsing tables conversion class. + + Once initialized, this class supplies the grammar tables for the + parsing engine implemented by parse.py. The parsing engine + accesses the instance variables directly. The class here does not + provide initialization of the tables; several subclasses exist to + do this (see the conv and pgen modules). + + The load() method reads the tables from a pickle file, which is + much faster than the other ways offered by subclasses. The pickle + file is written by calling dump() (after loading the grammar + tables using a subclass). The report() method prints a readable + representation of the tables to stdout, for debugging. + + The instance variables are as follows: + + symbol2number -- a dict mapping symbol names to numbers. Symbol + numbers are always 256 or higher, to distinguish + them from token numbers, which are between 0 and + 255 (inclusive). + + number2symbol -- a dict mapping numbers to symbol names; + these two are each other's inverse. + + states -- a list of DFAs, where each DFA is a list of + states, each state is a list of arcs, and each + arc is a (i, j) pair where i is a label and j is + a state number. The DFA number is the index into + this list. (This name is slightly confusing.) + Final states are represented by a special arc of + the form (0, j) where j is its own state number. + + dfas -- a dict mapping symbol numbers to (DFA, first) + pairs, where DFA is an item from the states list + above, and first is a set of tokens that can + begin this grammar rule (represented by a dict + whose values are always 1). + + labels -- a list of (x, y) pairs where x is either a token + number or a symbol number, and y is either None + or a string; the strings are keywords. The label + number is the index in this list; label numbers + are used to mark state transitions (arcs) in the + DFAs. + + start -- the number of the grammar's start symbol. + + keywords -- a dict mapping keyword strings to arc labels. + + tokens -- a dict mapping token numbers to arc labels. + + """ + + def __init__(self): + self.symbol2number = {} + self.number2symbol = {} + self.states = [] + self.dfas = {} + self.labels = [(0, "EMPTY")] + self.keywords = {} + self.tokens = {} + self.symbol2label = {} + self.start = 256 + + def dump(self, filename): + """Dump the grammar tables to a pickle file.""" + f = open(filename, "wb") + pickle.dump(self.__dict__, f, 2) + f.close() + + def load(self, filename): + """Load the grammar tables from a pickle file.""" + f = open(filename, "rb") + d = pickle.load(f) + f.close() + self.__dict__.update(d) + + def copy(self): + """ + Copy the grammar. + """ + new = self.__class__() + for dict_attr in ("symbol2number", "number2symbol", "dfas", "keywords", + "tokens", "symbol2label"): + setattr(new, dict_attr, getattr(self, dict_attr).copy()) + new.labels = self.labels[:] + new.states = self.states[:] + new.start = self.start + return new + + def report(self): + """Dump the grammar tables to standard output, for debugging.""" + from pprint import pprint + print "s2n" + pprint(self.symbol2number) + print "n2s" + pprint(self.number2symbol) + print "states" + pprint(self.states) + print "dfas" + pprint(self.dfas) + print "labels" + pprint(self.labels) + print "start", self.start + + +# Map from operator to number (since tokenize doesn't do this) + +opmap_raw = """ +( LPAR +) RPAR +[ LSQB +] RSQB +: COLON +, COMMA +; SEMI ++ PLUS +- MINUS +* STAR +/ SLASH +| VBAR +& AMPER +< LESS +> GREATER += EQUAL +. DOT +% PERCENT +` BACKQUOTE +{ LBRACE +} RBRACE +@ AT +== EQEQUAL +!= NOTEQUAL +<> NOTEQUAL +<= LESSEQUAL +>= GREATEREQUAL +~ TILDE +^ CIRCUMFLEX +<< LEFTSHIFT +>> RIGHTSHIFT +** DOUBLESTAR ++= PLUSEQUAL +-= MINEQUAL +*= STAREQUAL +/= SLASHEQUAL +%= PERCENTEQUAL +&= AMPEREQUAL +|= VBAREQUAL +^= CIRCUMFLEXEQUAL +<<= LEFTSHIFTEQUAL +>>= RIGHTSHIFTEQUAL +**= DOUBLESTAREQUAL +// DOUBLESLASH +//= DOUBLESLASHEQUAL +-> RARROW +""" + +opmap = {} +for line in opmap_raw.splitlines(): + if line: + op, name = line.split() + opmap[op] = getattr(token, name) diff --git a/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/pgen2/literals.py b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/pgen2/literals.py new file mode 100644 index 00000000..0b3948a5 --- /dev/null +++ b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/pgen2/literals.py @@ -0,0 +1,60 @@ +# Copyright 2004-2005 Elemental Security, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Safely evaluate Python string literals without using eval().""" + +import re + +simple_escapes = {"a": "\a", + "b": "\b", + "f": "\f", + "n": "\n", + "r": "\r", + "t": "\t", + "v": "\v", + "'": "'", + '"': '"', + "\\": "\\"} + +def escape(m): + all, tail = m.group(0, 1) + assert all.startswith("\\") + esc = simple_escapes.get(tail) + if esc is not None: + return esc + if tail.startswith("x"): + hexes = tail[1:] + if len(hexes) < 2: + raise ValueError("invalid hex string escape ('\\%s')" % tail) + try: + i = int(hexes, 16) + except ValueError: + raise ValueError("invalid hex string escape ('\\%s')" % tail) + else: + try: + i = int(tail, 8) + except ValueError: + raise ValueError("invalid octal string escape ('\\%s')" % tail) + return chr(i) + +def evalString(s): + assert s.startswith("'") or s.startswith('"'), repr(s[:1]) + q = s[0] + if s[:3] == q*3: + q = q*3 + assert s.endswith(q), repr(s[-len(q):]) + assert len(s) >= 2*len(q) + s = s[len(q):-len(q)] + return re.sub(r"\\(\'|\"|\\|[abfnrtv]|x.{0,2}|[0-7]{1,3})", escape, s) + +def test(): + for i in range(256): + c = chr(i) + s = repr(c) + e = evalString(s) + if e != c: + print i, c, s, e + + +if __name__ == "__main__": + test() diff --git a/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/pgen2/parse.py b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/pgen2/parse.py new file mode 100644 index 00000000..6bebdbba --- /dev/null +++ b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/pgen2/parse.py @@ -0,0 +1,201 @@ +# Copyright 2004-2005 Elemental Security, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Parser engine for the grammar tables generated by pgen. + +The grammar table must be loaded first. + +See Parser/parser.c in the Python distribution for additional info on +how this parsing engine works. + +""" + +# Local imports +from . import token + +class ParseError(Exception): + """Exception to signal the parser is stuck.""" + + def __init__(self, msg, type, value, context): + Exception.__init__(self, "%s: type=%r, value=%r, context=%r" % + (msg, type, value, context)) + self.msg = msg + self.type = type + self.value = value + self.context = context + +class Parser(object): + """Parser engine. + + The proper usage sequence is: + + p = Parser(grammar, [converter]) # create instance + p.setup([start]) # prepare for parsing + : + if p.addtoken(...): # parse a token; may raise ParseError + break + root = p.rootnode # root of abstract syntax tree + + A Parser instance may be reused by calling setup() repeatedly. + + A Parser instance contains state pertaining to the current token + sequence, and should not be used concurrently by different threads + to parse separate token sequences. + + See driver.py for how to get input tokens by tokenizing a file or + string. + + Parsing is complete when addtoken() returns True; the root of the + abstract syntax tree can then be retrieved from the rootnode + instance variable. When a syntax error occurs, addtoken() raises + the ParseError exception. There is no error recovery; the parser + cannot be used after a syntax error was reported (but it can be + reinitialized by calling setup()). + + """ + + def __init__(self, grammar, convert=None): + """Constructor. + + The grammar argument is a grammar.Grammar instance; see the + grammar module for more information. + + The parser is not ready yet for parsing; you must call the + setup() method to get it started. + + The optional convert argument is a function mapping concrete + syntax tree nodes to abstract syntax tree nodes. If not + given, no conversion is done and the syntax tree produced is + the concrete syntax tree. If given, it must be a function of + two arguments, the first being the grammar (a grammar.Grammar + instance), and the second being the concrete syntax tree node + to be converted. The syntax tree is converted from the bottom + up. + + A concrete syntax tree node is a (type, value, context, nodes) + tuple, where type is the node type (a token or symbol number), + value is None for symbols and a string for tokens, context is + None or an opaque value used for error reporting (typically a + (lineno, offset) pair), and nodes is a list of children for + symbols, and None for tokens. + + An abstract syntax tree node may be anything; this is entirely + up to the converter function. + + """ + self.grammar = grammar + self.convert = convert or (lambda grammar, node: node) + + def setup(self, start=None): + """Prepare for parsing. + + This *must* be called before starting to parse. + + The optional argument is an alternative start symbol; it + defaults to the grammar's start symbol. + + You can use a Parser instance to parse any number of programs; + each time you call setup() the parser is reset to an initial + state determined by the (implicit or explicit) start symbol. + + """ + if start is None: + start = self.grammar.start + # Each stack entry is a tuple: (dfa, state, node). + # A node is a tuple: (type, value, context, children), + # where children is a list of nodes or None, and context may be None. + newnode = (start, None, None, []) + stackentry = (self.grammar.dfas[start], 0, newnode) + self.stack = [stackentry] + self.rootnode = None + self.used_names = set() # Aliased to self.rootnode.used_names in pop() + + def addtoken(self, type, value, context): + """Add a token; return True iff this is the end of the program.""" + # Map from token to label + ilabel = self.classify(type, value, context) + # Loop until the token is shifted; may raise exceptions + while True: + dfa, state, node = self.stack[-1] + states, first = dfa + arcs = states[state] + # Look for a state with this label + for i, newstate in arcs: + t, v = self.grammar.labels[i] + if ilabel == i: + # Look it up in the list of labels + assert t < 256 + # Shift a token; we're done with it + self.shift(type, value, newstate, context) + # Pop while we are in an accept-only state + state = newstate + while states[state] == [(0, state)]: + self.pop() + if not self.stack: + # Done parsing! + return True + dfa, state, node = self.stack[-1] + states, first = dfa + # Done with this token + return False + elif t >= 256: + # See if it's a symbol and if we're in its first set + itsdfa = self.grammar.dfas[t] + itsstates, itsfirst = itsdfa + if ilabel in itsfirst: + # Push a symbol + self.push(t, self.grammar.dfas[t], newstate, context) + break # To continue the outer while loop + else: + if (0, state) in arcs: + # An accepting state, pop it and try something else + self.pop() + if not self.stack: + # Done parsing, but another token is input + raise ParseError("too much input", + type, value, context) + else: + # No success finding a transition + raise ParseError("bad input", type, value, context) + + def classify(self, type, value, context): + """Turn a token into a label. (Internal)""" + if type == token.NAME: + # Keep a listing of all used names + self.used_names.add(value) + # Check for reserved words + ilabel = self.grammar.keywords.get(value) + if ilabel is not None: + return ilabel + ilabel = self.grammar.tokens.get(type) + if ilabel is None: + raise ParseError("bad token", type, value, context) + return ilabel + + def shift(self, type, value, newstate, context): + """Shift a token. (Internal)""" + dfa, state, node = self.stack[-1] + newnode = (type, value, context, None) + newnode = self.convert(self.grammar, newnode) + if newnode is not None: + node[-1].append(newnode) + self.stack[-1] = (dfa, newstate, node) + + def push(self, type, newdfa, newstate, context): + """Push a nonterminal. (Internal)""" + dfa, state, node = self.stack[-1] + newnode = (type, None, context, []) + self.stack[-1] = (dfa, newstate, node) + self.stack.append((newdfa, 0, newnode)) + + def pop(self): + """Pop a nonterminal. (Internal)""" + popdfa, popstate, popnode = self.stack.pop() + newnode = self.convert(self.grammar, popnode) + if newnode is not None: + if self.stack: + dfa, state, node = self.stack[-1] + node[-1].append(newnode) + else: + self.rootnode = newnode + self.rootnode.used_names = self.used_names diff --git a/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/pgen2/pgen.py b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/pgen2/pgen.py new file mode 100644 index 00000000..63084a4c --- /dev/null +++ b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/pgen2/pgen.py @@ -0,0 +1,386 @@ +# Copyright 2004-2005 Elemental Security, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +# Pgen imports +from . import grammar, token, tokenize + +class PgenGrammar(grammar.Grammar): + pass + +class ParserGenerator(object): + + def __init__(self, filename, stream=None): + close_stream = None + if stream is None: + stream = open(filename) + close_stream = stream.close + self.filename = filename + self.stream = stream + self.generator = tokenize.generate_tokens(stream.readline) + self.gettoken() # Initialize lookahead + self.dfas, self.startsymbol = self.parse() + if close_stream is not None: + close_stream() + self.first = {} # map from symbol name to set of tokens + self.addfirstsets() + + def make_grammar(self): + c = PgenGrammar() + names = self.dfas.keys() + names.sort() + names.remove(self.startsymbol) + names.insert(0, self.startsymbol) + for name in names: + i = 256 + len(c.symbol2number) + c.symbol2number[name] = i + c.number2symbol[i] = name + for name in names: + dfa = self.dfas[name] + states = [] + for state in dfa: + arcs = [] + for label, next in state.arcs.iteritems(): + arcs.append((self.make_label(c, label), dfa.index(next))) + if state.isfinal: + arcs.append((0, dfa.index(state))) + states.append(arcs) + c.states.append(states) + c.dfas[c.symbol2number[name]] = (states, self.make_first(c, name)) + c.start = c.symbol2number[self.startsymbol] + return c + + def make_first(self, c, name): + rawfirst = self.first[name] + first = {} + for label in rawfirst: + ilabel = self.make_label(c, label) + ##assert ilabel not in first # XXX failed on <> ... != + first[ilabel] = 1 + return first + + def make_label(self, c, label): + # XXX Maybe this should be a method on a subclass of converter? + ilabel = len(c.labels) + if label[0].isalpha(): + # Either a symbol name or a named token + if label in c.symbol2number: + # A symbol name (a non-terminal) + if label in c.symbol2label: + return c.symbol2label[label] + else: + c.labels.append((c.symbol2number[label], None)) + c.symbol2label[label] = ilabel + return ilabel + else: + # A named token (NAME, NUMBER, STRING) + itoken = getattr(token, label, None) + assert isinstance(itoken, int), label + assert itoken in token.tok_name, label + if itoken in c.tokens: + return c.tokens[itoken] + else: + c.labels.append((itoken, None)) + c.tokens[itoken] = ilabel + return ilabel + else: + # Either a keyword or an operator + assert label[0] in ('"', "'"), label + value = eval(label) + if value[0].isalpha(): + # A keyword + if value in c.keywords: + return c.keywords[value] + else: + c.labels.append((token.NAME, value)) + c.keywords[value] = ilabel + return ilabel + else: + # An operator (any non-numeric token) + itoken = grammar.opmap[value] # Fails if unknown token + if itoken in c.tokens: + return c.tokens[itoken] + else: + c.labels.append((itoken, None)) + c.tokens[itoken] = ilabel + return ilabel + + def addfirstsets(self): + names = self.dfas.keys() + names.sort() + for name in names: + if name not in self.first: + self.calcfirst(name) + #print name, self.first[name].keys() + + def calcfirst(self, name): + dfa = self.dfas[name] + self.first[name] = None # dummy to detect left recursion + state = dfa[0] + totalset = {} + overlapcheck = {} + for label, next in state.arcs.iteritems(): + if label in self.dfas: + if label in self.first: + fset = self.first[label] + if fset is None: + raise ValueError("recursion for rule %r" % name) + else: + self.calcfirst(label) + fset = self.first[label] + totalset.update(fset) + overlapcheck[label] = fset + else: + totalset[label] = 1 + overlapcheck[label] = {label: 1} + inverse = {} + for label, itsfirst in overlapcheck.iteritems(): + for symbol in itsfirst: + if symbol in inverse: + raise ValueError("rule %s is ambiguous; %s is in the" + " first sets of %s as well as %s" % + (name, symbol, label, inverse[symbol])) + inverse[symbol] = label + self.first[name] = totalset + + def parse(self): + dfas = {} + startsymbol = None + # MSTART: (NEWLINE | RULE)* ENDMARKER + while self.type != token.ENDMARKER: + while self.type == token.NEWLINE: + self.gettoken() + # RULE: NAME ':' RHS NEWLINE + name = self.expect(token.NAME) + self.expect(token.OP, ":") + a, z = self.parse_rhs() + self.expect(token.NEWLINE) + #self.dump_nfa(name, a, z) + dfa = self.make_dfa(a, z) + #self.dump_dfa(name, dfa) + oldlen = len(dfa) + self.simplify_dfa(dfa) + newlen = len(dfa) + dfas[name] = dfa + #print name, oldlen, newlen + if startsymbol is None: + startsymbol = name + return dfas, startsymbol + + def make_dfa(self, start, finish): + # To turn an NFA into a DFA, we define the states of the DFA + # to correspond to *sets* of states of the NFA. Then do some + # state reduction. Let's represent sets as dicts with 1 for + # values. + assert isinstance(start, NFAState) + assert isinstance(finish, NFAState) + def closure(state): + base = {} + addclosure(state, base) + return base + def addclosure(state, base): + assert isinstance(state, NFAState) + if state in base: + return + base[state] = 1 + for label, next in state.arcs: + if label is None: + addclosure(next, base) + states = [DFAState(closure(start), finish)] + for state in states: # NB states grows while we're iterating + arcs = {} + for nfastate in state.nfaset: + for label, next in nfastate.arcs: + if label is not None: + addclosure(next, arcs.setdefault(label, {})) + for label, nfaset in arcs.iteritems(): + for st in states: + if st.nfaset == nfaset: + break + else: + st = DFAState(nfaset, finish) + states.append(st) + state.addarc(st, label) + return states # List of DFAState instances; first one is start + + def dump_nfa(self, name, start, finish): + print "Dump of NFA for", name + todo = [start] + for i, state in enumerate(todo): + print " State", i, state is finish and "(final)" or "" + for label, next in state.arcs: + if next in todo: + j = todo.index(next) + else: + j = len(todo) + todo.append(next) + if label is None: + print " -> %d" % j + else: + print " %s -> %d" % (label, j) + + def dump_dfa(self, name, dfa): + print "Dump of DFA for", name + for i, state in enumerate(dfa): + print " State", i, state.isfinal and "(final)" or "" + for label, next in state.arcs.iteritems(): + print " %s -> %d" % (label, dfa.index(next)) + + def simplify_dfa(self, dfa): + # This is not theoretically optimal, but works well enough. + # Algorithm: repeatedly look for two states that have the same + # set of arcs (same labels pointing to the same nodes) and + # unify them, until things stop changing. + + # dfa is a list of DFAState instances + changes = True + while changes: + changes = False + for i, state_i in enumerate(dfa): + for j in range(i+1, len(dfa)): + state_j = dfa[j] + if state_i == state_j: + #print " unify", i, j + del dfa[j] + for state in dfa: + state.unifystate(state_j, state_i) + changes = True + break + + def parse_rhs(self): + # RHS: ALT ('|' ALT)* + a, z = self.parse_alt() + if self.value != "|": + return a, z + else: + aa = NFAState() + zz = NFAState() + aa.addarc(a) + z.addarc(zz) + while self.value == "|": + self.gettoken() + a, z = self.parse_alt() + aa.addarc(a) + z.addarc(zz) + return aa, zz + + def parse_alt(self): + # ALT: ITEM+ + a, b = self.parse_item() + while (self.value in ("(", "[") or + self.type in (token.NAME, token.STRING)): + c, d = self.parse_item() + b.addarc(c) + b = d + return a, b + + def parse_item(self): + # ITEM: '[' RHS ']' | ATOM ['+' | '*'] + if self.value == "[": + self.gettoken() + a, z = self.parse_rhs() + self.expect(token.OP, "]") + a.addarc(z) + return a, z + else: + a, z = self.parse_atom() + value = self.value + if value not in ("+", "*"): + return a, z + self.gettoken() + z.addarc(a) + if value == "+": + return a, z + else: + return a, a + + def parse_atom(self): + # ATOM: '(' RHS ')' | NAME | STRING + if self.value == "(": + self.gettoken() + a, z = self.parse_rhs() + self.expect(token.OP, ")") + return a, z + elif self.type in (token.NAME, token.STRING): + a = NFAState() + z = NFAState() + a.addarc(z, self.value) + self.gettoken() + return a, z + else: + self.raise_error("expected (...) or NAME or STRING, got %s/%s", + self.type, self.value) + + def expect(self, type, value=None): + if self.type != type or (value is not None and self.value != value): + self.raise_error("expected %s/%s, got %s/%s", + type, value, self.type, self.value) + value = self.value + self.gettoken() + return value + + def gettoken(self): + tup = self.generator.next() + while tup[0] in (tokenize.COMMENT, tokenize.NL): + tup = self.generator.next() + self.type, self.value, self.begin, self.end, self.line = tup + #print token.tok_name[self.type], repr(self.value) + + def raise_error(self, msg, *args): + if args: + try: + msg = msg % args + except: + msg = " ".join([msg] + map(str, args)) + raise SyntaxError(msg, (self.filename, self.end[0], + self.end[1], self.line)) + +class NFAState(object): + + def __init__(self): + self.arcs = [] # list of (label, NFAState) pairs + + def addarc(self, next, label=None): + assert label is None or isinstance(label, str) + assert isinstance(next, NFAState) + self.arcs.append((label, next)) + +class DFAState(object): + + def __init__(self, nfaset, final): + assert isinstance(nfaset, dict) + assert isinstance(iter(nfaset).next(), NFAState) + assert isinstance(final, NFAState) + self.nfaset = nfaset + self.isfinal = final in nfaset + self.arcs = {} # map from label to DFAState + + def addarc(self, next, label): + assert isinstance(label, str) + assert label not in self.arcs + assert isinstance(next, DFAState) + self.arcs[label] = next + + def unifystate(self, old, new): + for label, next in self.arcs.iteritems(): + if next is old: + self.arcs[label] = new + + def __eq__(self, other): + # Equality test -- ignore the nfaset instance variable + assert isinstance(other, DFAState) + if self.isfinal != other.isfinal: + return False + # Can't just return self.arcs == other.arcs, because that + # would invoke this method recursively, with cycles... + if len(self.arcs) != len(other.arcs): + return False + for label, next in self.arcs.iteritems(): + if next is not other.arcs.get(label): + return False + return True + + __hash__ = None # For Py3 compatibility. + +def generate_grammar(filename="Grammar.txt"): + p = ParserGenerator(filename) + return p.make_grammar() diff --git a/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/pgen2/token.py b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/pgen2/token.py new file mode 100644 index 00000000..61468b31 --- /dev/null +++ b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/pgen2/token.py @@ -0,0 +1,82 @@ +#! /usr/bin/env python + +"""Token constants (from "token.h").""" + +# Taken from Python (r53757) and modified to include some tokens +# originally monkeypatched in by pgen2.tokenize + +#--start constants-- +ENDMARKER = 0 +NAME = 1 +NUMBER = 2 +STRING = 3 +NEWLINE = 4 +INDENT = 5 +DEDENT = 6 +LPAR = 7 +RPAR = 8 +LSQB = 9 +RSQB = 10 +COLON = 11 +COMMA = 12 +SEMI = 13 +PLUS = 14 +MINUS = 15 +STAR = 16 +SLASH = 17 +VBAR = 18 +AMPER = 19 +LESS = 20 +GREATER = 21 +EQUAL = 22 +DOT = 23 +PERCENT = 24 +BACKQUOTE = 25 +LBRACE = 26 +RBRACE = 27 +EQEQUAL = 28 +NOTEQUAL = 29 +LESSEQUAL = 30 +GREATEREQUAL = 31 +TILDE = 32 +CIRCUMFLEX = 33 +LEFTSHIFT = 34 +RIGHTSHIFT = 35 +DOUBLESTAR = 36 +PLUSEQUAL = 37 +MINEQUAL = 38 +STAREQUAL = 39 +SLASHEQUAL = 40 +PERCENTEQUAL = 41 +AMPEREQUAL = 42 +VBAREQUAL = 43 +CIRCUMFLEXEQUAL = 44 +LEFTSHIFTEQUAL = 45 +RIGHTSHIFTEQUAL = 46 +DOUBLESTAREQUAL = 47 +DOUBLESLASH = 48 +DOUBLESLASHEQUAL = 49 +AT = 50 +OP = 51 +COMMENT = 52 +NL = 53 +RARROW = 54 +ERRORTOKEN = 55 +N_TOKENS = 56 +NT_OFFSET = 256 +#--end constants-- + +tok_name = {} +for _name, _value in globals().items(): + if type(_value) is type(0): + tok_name[_value] = _name + + +def ISTERMINAL(x): + return x < NT_OFFSET + +def ISNONTERMINAL(x): + return x >= NT_OFFSET + +def ISEOF(x): + return x == ENDMARKER diff --git a/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/pgen2/tokenize.py b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/pgen2/tokenize.py new file mode 100644 index 00000000..f6e0284c --- /dev/null +++ b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/pgen2/tokenize.py @@ -0,0 +1,499 @@ +# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006 Python Software Foundation. +# All rights reserved. + +"""Tokenization help for Python programs. + +generate_tokens(readline) is a generator that breaks a stream of +text into Python tokens. It accepts a readline-like method which is called +repeatedly to get the next line of input (or "" for EOF). It generates +5-tuples with these members: + + the token type (see token.py) + the token (a string) + the starting (row, column) indices of the token (a 2-tuple of ints) + the ending (row, column) indices of the token (a 2-tuple of ints) + the original line (string) + +It is designed to match the working of the Python tokenizer exactly, except +that it produces COMMENT tokens for comments and gives type OP for all +operators + +Older entry points + tokenize_loop(readline, tokeneater) + tokenize(readline, tokeneater=printtoken) +are the same, except instead of generating tokens, tokeneater is a callback +function to which the 5 fields described above are passed as 5 arguments, +each time a new token is found.""" + +__author__ = 'Ka-Ping Yee ' +__credits__ = \ + 'GvR, ESR, Tim Peters, Thomas Wouters, Fred Drake, Skip Montanaro' + +import string, re +from codecs import BOM_UTF8, lookup +from lib2to3.pgen2.token import * + +from . import token +__all__ = [x for x in dir(token) if x[0] != '_'] + ["tokenize", + "generate_tokens", "untokenize"] +del token + +try: + bytes +except NameError: + # Support bytes type in Python <= 2.5, so 2to3 turns itself into + # valid Python 3 code. + bytes = str + +def group(*choices): return '(' + '|'.join(choices) + ')' +def any(*choices): return group(*choices) + '*' +def maybe(*choices): return group(*choices) + '?' + +Whitespace = r'[ \f\t]*' +Comment = r'#[^\r\n]*' +Ignore = Whitespace + any(r'\\\r?\n' + Whitespace) + maybe(Comment) +Name = r'[a-zA-Z_]\w*' + +Binnumber = r'0[bB][01]*' +Hexnumber = r'0[xX][\da-fA-F]*[lL]?' +Octnumber = r'0[oO]?[0-7]*[lL]?' +Decnumber = r'[1-9]\d*[lL]?' +Intnumber = group(Binnumber, Hexnumber, Octnumber, Decnumber) +Exponent = r'[eE][-+]?\d+' +Pointfloat = group(r'\d+\.\d*', r'\.\d+') + maybe(Exponent) +Expfloat = r'\d+' + Exponent +Floatnumber = group(Pointfloat, Expfloat) +Imagnumber = group(r'\d+[jJ]', Floatnumber + r'[jJ]') +Number = group(Imagnumber, Floatnumber, Intnumber) + +# Tail end of ' string. +Single = r"[^'\\]*(?:\\.[^'\\]*)*'" +# Tail end of " string. +Double = r'[^"\\]*(?:\\.[^"\\]*)*"' +# Tail end of ''' string. +Single3 = r"[^'\\]*(?:(?:\\.|'(?!''))[^'\\]*)*'''" +# Tail end of """ string. +Double3 = r'[^"\\]*(?:(?:\\.|"(?!""))[^"\\]*)*"""' +Triple = group("[ubUB]?[rR]?'''", '[ubUB]?[rR]?"""') +# Single-line ' or " string. +String = group(r"[uU]?[rR]?'[^\n'\\]*(?:\\.[^\n'\\]*)*'", + r'[uU]?[rR]?"[^\n"\\]*(?:\\.[^\n"\\]*)*"') + +# Because of leftmost-then-longest match semantics, be sure to put the +# longest operators first (e.g., if = came before ==, == would get +# recognized as two instances of =). +Operator = group(r"\*\*=?", r">>=?", r"<<=?", r"<>", r"!=", + r"//=?", r"->", + r"[+\-*/%&|^=<>]=?", + r"~") + +Bracket = '[][(){}]' +Special = group(r'\r?\n', r'[:;.,`@]') +Funny = group(Operator, Bracket, Special) + +PlainToken = group(Number, Funny, String, Name) +Token = Ignore + PlainToken + +# First (or only) line of ' or " string. +ContStr = group(r"[uUbB]?[rR]?'[^\n'\\]*(?:\\.[^\n'\\]*)*" + + group("'", r'\\\r?\n'), + r'[uUbB]?[rR]?"[^\n"\\]*(?:\\.[^\n"\\]*)*' + + group('"', r'\\\r?\n')) +PseudoExtras = group(r'\\\r?\n', Comment, Triple) +PseudoToken = Whitespace + group(PseudoExtras, Number, Funny, ContStr, Name) + +tokenprog, pseudoprog, single3prog, double3prog = map( + re.compile, (Token, PseudoToken, Single3, Double3)) +endprogs = {"'": re.compile(Single), '"': re.compile(Double), + "'''": single3prog, '"""': double3prog, + "r'''": single3prog, 'r"""': double3prog, + "u'''": single3prog, 'u"""': double3prog, + "b'''": single3prog, 'b"""': double3prog, + "ur'''": single3prog, 'ur"""': double3prog, + "br'''": single3prog, 'br"""': double3prog, + "R'''": single3prog, 'R"""': double3prog, + "U'''": single3prog, 'U"""': double3prog, + "B'''": single3prog, 'B"""': double3prog, + "uR'''": single3prog, 'uR"""': double3prog, + "Ur'''": single3prog, 'Ur"""': double3prog, + "UR'''": single3prog, 'UR"""': double3prog, + "bR'''": single3prog, 'bR"""': double3prog, + "Br'''": single3prog, 'Br"""': double3prog, + "BR'''": single3prog, 'BR"""': double3prog, + 'r': None, 'R': None, + 'u': None, 'U': None, + 'b': None, 'B': None} + +triple_quoted = {} +for t in ("'''", '"""', + "r'''", 'r"""', "R'''", 'R"""', + "u'''", 'u"""', "U'''", 'U"""', + "b'''", 'b"""', "B'''", 'B"""', + "ur'''", 'ur"""', "Ur'''", 'Ur"""', + "uR'''", 'uR"""', "UR'''", 'UR"""', + "br'''", 'br"""', "Br'''", 'Br"""', + "bR'''", 'bR"""', "BR'''", 'BR"""',): + triple_quoted[t] = t +single_quoted = {} +for t in ("'", '"', + "r'", 'r"', "R'", 'R"', + "u'", 'u"', "U'", 'U"', + "b'", 'b"', "B'", 'B"', + "ur'", 'ur"', "Ur'", 'Ur"', + "uR'", 'uR"', "UR'", 'UR"', + "br'", 'br"', "Br'", 'Br"', + "bR'", 'bR"', "BR'", 'BR"', ): + single_quoted[t] = t + +tabsize = 8 + +class TokenError(Exception): pass + +class StopTokenizing(Exception): pass + +def printtoken(type, token, start, end, line): # for testing + (srow, scol) = start + (erow, ecol) = end + print "%d,%d-%d,%d:\t%s\t%s" % \ + (srow, scol, erow, ecol, tok_name[type], repr(token)) + +def tokenize(readline, tokeneater=printtoken): + """ + The tokenize() function accepts two parameters: one representing the + input stream, and one providing an output mechanism for tokenize(). + + The first parameter, readline, must be a callable object which provides + the same interface as the readline() method of built-in file objects. + Each call to the function should return one line of input as a string. + + The second parameter, tokeneater, must also be a callable object. It is + called once for each token, with five arguments, corresponding to the + tuples generated by generate_tokens(). + """ + try: + tokenize_loop(readline, tokeneater) + except StopTokenizing: + pass + +# backwards compatible interface +def tokenize_loop(readline, tokeneater): + for token_info in generate_tokens(readline): + tokeneater(*token_info) + +class Untokenizer: + + def __init__(self): + self.tokens = [] + self.prev_row = 1 + self.prev_col = 0 + + def add_whitespace(self, start): + row, col = start + assert row <= self.prev_row + col_offset = col - self.prev_col + if col_offset: + self.tokens.append(" " * col_offset) + + def untokenize(self, iterable): + for t in iterable: + if len(t) == 2: + self.compat(t, iterable) + break + tok_type, token, start, end, line = t + self.add_whitespace(start) + self.tokens.append(token) + self.prev_row, self.prev_col = end + if tok_type in (NEWLINE, NL): + self.prev_row += 1 + self.prev_col = 0 + return "".join(self.tokens) + + def compat(self, token, iterable): + startline = False + indents = [] + toks_append = self.tokens.append + toknum, tokval = token + if toknum in (NAME, NUMBER): + tokval += ' ' + if toknum in (NEWLINE, NL): + startline = True + for tok in iterable: + toknum, tokval = tok[:2] + + if toknum in (NAME, NUMBER): + tokval += ' ' + + if toknum == INDENT: + indents.append(tokval) + continue + elif toknum == DEDENT: + indents.pop() + continue + elif toknum in (NEWLINE, NL): + startline = True + elif startline and indents: + toks_append(indents[-1]) + startline = False + toks_append(tokval) + +cookie_re = re.compile(r'^[ \t\f]*#.*coding[:=][ \t]*([-\w.]+)') + +def _get_normal_name(orig_enc): + """Imitates get_normal_name in tokenizer.c.""" + # Only care about the first 12 characters. + enc = orig_enc[:12].lower().replace("_", "-") + if enc == "utf-8" or enc.startswith("utf-8-"): + return "utf-8" + if enc in ("latin-1", "iso-8859-1", "iso-latin-1") or \ + enc.startswith(("latin-1-", "iso-8859-1-", "iso-latin-1-")): + return "iso-8859-1" + return orig_enc + +def detect_encoding(readline): + """ + The detect_encoding() function is used to detect the encoding that should + be used to decode a Python source file. It requires one argment, readline, + in the same way as the tokenize() generator. + + It will call readline a maximum of twice, and return the encoding used + (as a string) and a list of any lines (left as bytes) it has read + in. + + It detects the encoding from the presence of a utf-8 bom or an encoding + cookie as specified in pep-0263. If both a bom and a cookie are present, but + disagree, a SyntaxError will be raised. If the encoding cookie is an invalid + charset, raise a SyntaxError. Note that if a utf-8 bom is found, + 'utf-8-sig' is returned. + + If no encoding is specified, then the default of 'utf-8' will be returned. + """ + bom_found = False + encoding = None + default = 'utf-8' + def read_or_stop(): + try: + return readline() + except StopIteration: + return bytes() + + def find_cookie(line): + try: + line_string = line.decode('ascii') + except UnicodeDecodeError: + return None + match = cookie_re.match(line_string) + if not match: + return None + encoding = _get_normal_name(match.group(1)) + try: + codec = lookup(encoding) + except LookupError: + # This behaviour mimics the Python interpreter + raise SyntaxError("unknown encoding: " + encoding) + + if bom_found: + if codec.name != 'utf-8': + # This behaviour mimics the Python interpreter + raise SyntaxError('encoding problem: utf-8') + encoding += '-sig' + return encoding + + first = read_or_stop() + if first.startswith(BOM_UTF8): + bom_found = True + first = first[3:] + default = 'utf-8-sig' + if not first: + return default, [] + + encoding = find_cookie(first) + if encoding: + return encoding, [first] + + second = read_or_stop() + if not second: + return default, [first] + + encoding = find_cookie(second) + if encoding: + return encoding, [first, second] + + return default, [first, second] + +def untokenize(iterable): + """Transform tokens back into Python source code. + + Each element returned by the iterable must be a token sequence + with at least two elements, a token number and token value. If + only two tokens are passed, the resulting output is poor. + + Round-trip invariant for full input: + Untokenized source will match input source exactly + + Round-trip invariant for limited intput: + # Output text will tokenize the back to the input + t1 = [tok[:2] for tok in generate_tokens(f.readline)] + newcode = untokenize(t1) + readline = iter(newcode.splitlines(1)).next + t2 = [tok[:2] for tokin generate_tokens(readline)] + assert t1 == t2 + """ + ut = Untokenizer() + return ut.untokenize(iterable) + +def generate_tokens(readline): + """ + The generate_tokens() generator requires one argment, readline, which + must be a callable object which provides the same interface as the + readline() method of built-in file objects. Each call to the function + should return one line of input as a string. Alternately, readline + can be a callable function terminating with StopIteration: + readline = open(myfile).next # Example of alternate readline + + The generator produces 5-tuples with these members: the token type; the + token string; a 2-tuple (srow, scol) of ints specifying the row and + column where the token begins in the source; a 2-tuple (erow, ecol) of + ints specifying the row and column where the token ends in the source; + and the line on which the token was found. The line passed is the + logical line; continuation lines are included. + """ + lnum = parenlev = continued = 0 + namechars, numchars = string.ascii_letters + '_', '0123456789' + contstr, needcont = '', 0 + contline = None + indents = [0] + + while 1: # loop over lines in stream + try: + line = readline() + except StopIteration: + line = '' + lnum = lnum + 1 + pos, max = 0, len(line) + + if contstr: # continued string + if not line: + raise TokenError, ("EOF in multi-line string", strstart) + endmatch = endprog.match(line) + if endmatch: + pos = end = endmatch.end(0) + yield (STRING, contstr + line[:end], + strstart, (lnum, end), contline + line) + contstr, needcont = '', 0 + contline = None + elif needcont and line[-2:] != '\\\n' and line[-3:] != '\\\r\n': + yield (ERRORTOKEN, contstr + line, + strstart, (lnum, len(line)), contline) + contstr = '' + contline = None + continue + else: + contstr = contstr + line + contline = contline + line + continue + + elif parenlev == 0 and not continued: # new statement + if not line: break + column = 0 + while pos < max: # measure leading whitespace + if line[pos] == ' ': column = column + 1 + elif line[pos] == '\t': column = (column//tabsize + 1)*tabsize + elif line[pos] == '\f': column = 0 + else: break + pos = pos + 1 + if pos == max: break + + if line[pos] in '#\r\n': # skip comments or blank lines + if line[pos] == '#': + comment_token = line[pos:].rstrip('\r\n') + nl_pos = pos + len(comment_token) + yield (COMMENT, comment_token, + (lnum, pos), (lnum, pos + len(comment_token)), line) + yield (NL, line[nl_pos:], + (lnum, nl_pos), (lnum, len(line)), line) + else: + yield ((NL, COMMENT)[line[pos] == '#'], line[pos:], + (lnum, pos), (lnum, len(line)), line) + continue + + if column > indents[-1]: # count indents or dedents + indents.append(column) + yield (INDENT, line[:pos], (lnum, 0), (lnum, pos), line) + while column < indents[-1]: + if column not in indents: + raise IndentationError( + "unindent does not match any outer indentation level", + ("", lnum, pos, line)) + indents = indents[:-1] + yield (DEDENT, '', (lnum, pos), (lnum, pos), line) + + else: # continued statement + if not line: + raise TokenError, ("EOF in multi-line statement", (lnum, 0)) + continued = 0 + + while pos < max: + pseudomatch = pseudoprog.match(line, pos) + if pseudomatch: # scan for tokens + start, end = pseudomatch.span(1) + spos, epos, pos = (lnum, start), (lnum, end), end + token, initial = line[start:end], line[start] + + if initial in numchars or \ + (initial == '.' and token != '.'): # ordinary number + yield (NUMBER, token, spos, epos, line) + elif initial in '\r\n': + newline = NEWLINE + if parenlev > 0: + newline = NL + yield (newline, token, spos, epos, line) + elif initial == '#': + assert not token.endswith("\n") + yield (COMMENT, token, spos, epos, line) + elif token in triple_quoted: + endprog = endprogs[token] + endmatch = endprog.match(line, pos) + if endmatch: # all on one line + pos = endmatch.end(0) + token = line[start:pos] + yield (STRING, token, spos, (lnum, pos), line) + else: + strstart = (lnum, start) # multiple lines + contstr = line[start:] + contline = line + break + elif initial in single_quoted or \ + token[:2] in single_quoted or \ + token[:3] in single_quoted: + if token[-1] == '\n': # continued string + strstart = (lnum, start) + endprog = (endprogs[initial] or endprogs[token[1]] or + endprogs[token[2]]) + contstr, needcont = line[start:], 1 + contline = line + break + else: # ordinary string + yield (STRING, token, spos, epos, line) + elif initial in namechars: # ordinary name + yield (NAME, token, spos, epos, line) + elif initial == '\\': # continued stmt + # This yield is new; needed for better idempotency: + yield (NL, token, spos, (lnum, pos), line) + continued = 1 + else: + if initial in '([{': parenlev = parenlev + 1 + elif initial in ')]}': parenlev = parenlev - 1 + yield (OP, token, spos, epos, line) + else: + yield (ERRORTOKEN, line[pos], + (lnum, pos), (lnum, pos+1), line) + pos = pos + 1 + + for indent in indents[1:]: # pop remaining indent levels + yield (DEDENT, '', (lnum, 0), (lnum, 0), '') + yield (ENDMARKER, '', (lnum, 0), (lnum, 0), '') + +if __name__ == '__main__': # testing + import sys + if len(sys.argv) > 1: tokenize(open(sys.argv[1]).readline) + else: tokenize(sys.stdin.readline) diff --git a/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/pygram.py b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/pygram.py new file mode 100644 index 00000000..621ff24c --- /dev/null +++ b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/pygram.py @@ -0,0 +1,40 @@ +# Copyright 2006 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Export the Python grammar and symbols.""" + +# Python imports +import os + +# Local imports +from .pgen2 import token +from .pgen2 import driver +from . import pytree + +# The grammar file +_GRAMMAR_FILE = os.path.join(os.path.dirname(__file__), "Grammar.txt") +_PATTERN_GRAMMAR_FILE = os.path.join(os.path.dirname(__file__), + "PatternGrammar.txt") + + +class Symbols(object): + + def __init__(self, grammar): + """Initializer. + + Creates an attribute for each grammar symbol (nonterminal), + whose value is the symbol's type (an int >= 256). + """ + for name, symbol in grammar.symbol2number.iteritems(): + setattr(self, name, symbol) + + +python_grammar = driver.load_grammar(_GRAMMAR_FILE) + +python_symbols = Symbols(python_grammar) + +python_grammar_no_print_statement = python_grammar.copy() +del python_grammar_no_print_statement.keywords["print"] + +pattern_grammar = driver.load_grammar(_PATTERN_GRAMMAR_FILE) +pattern_symbols = Symbols(pattern_grammar) diff --git a/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/pytree.py b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/pytree.py new file mode 100644 index 00000000..179caca5 --- /dev/null +++ b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/pytree.py @@ -0,0 +1,887 @@ +# Copyright 2006 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +""" +Python parse tree definitions. + +This is a very concrete parse tree; we need to keep every token and +even the comments and whitespace between tokens. + +There's also a pattern matching implementation here. +""" + +__author__ = "Guido van Rossum " + +import sys +import warnings +from StringIO import StringIO + +HUGE = 0x7FFFFFFF # maximum repeat count, default max + +_type_reprs = {} +def type_repr(type_num): + global _type_reprs + if not _type_reprs: + from .pygram import python_symbols + # printing tokens is possible but not as useful + # from .pgen2 import token // token.__dict__.items(): + for name, val in python_symbols.__dict__.items(): + if type(val) == int: _type_reprs[val] = name + return _type_reprs.setdefault(type_num, type_num) + +class Base(object): + + """ + Abstract base class for Node and Leaf. + + This provides some default functionality and boilerplate using the + template pattern. + + A node may be a subnode of at most one parent. + """ + + # Default values for instance variables + type = None # int: token number (< 256) or symbol number (>= 256) + parent = None # Parent node pointer, or None + children = () # Tuple of subnodes + was_changed = False + was_checked = False + + def __new__(cls, *args, **kwds): + """Constructor that prevents Base from being instantiated.""" + assert cls is not Base, "Cannot instantiate Base" + return object.__new__(cls) + + def __eq__(self, other): + """ + Compare two nodes for equality. + + This calls the method _eq(). + """ + if self.__class__ is not other.__class__: + return NotImplemented + return self._eq(other) + + __hash__ = None # For Py3 compatibility. + + def __ne__(self, other): + """ + Compare two nodes for inequality. + + This calls the method _eq(). + """ + if self.__class__ is not other.__class__: + return NotImplemented + return not self._eq(other) + + def _eq(self, other): + """ + Compare two nodes for equality. + + This is called by __eq__ and __ne__. It is only called if the two nodes + have the same type. This must be implemented by the concrete subclass. + Nodes should be considered equal if they have the same structure, + ignoring the prefix string and other context information. + """ + raise NotImplementedError + + def clone(self): + """ + Return a cloned (deep) copy of self. + + This must be implemented by the concrete subclass. + """ + raise NotImplementedError + + def post_order(self): + """ + Return a post-order iterator for the tree. + + This must be implemented by the concrete subclass. + """ + raise NotImplementedError + + def pre_order(self): + """ + Return a pre-order iterator for the tree. + + This must be implemented by the concrete subclass. + """ + raise NotImplementedError + + def set_prefix(self, prefix): + """ + Set the prefix for the node (see Leaf class). + + DEPRECATED; use the prefix property directly. + """ + warnings.warn("set_prefix() is deprecated; use the prefix property", + DeprecationWarning, stacklevel=2) + self.prefix = prefix + + def get_prefix(self): + """ + Return the prefix for the node (see Leaf class). + + DEPRECATED; use the prefix property directly. + """ + warnings.warn("get_prefix() is deprecated; use the prefix property", + DeprecationWarning, stacklevel=2) + return self.prefix + + def replace(self, new): + """Replace this node with a new one in the parent.""" + assert self.parent is not None, str(self) + assert new is not None + if not isinstance(new, list): + new = [new] + l_children = [] + found = False + for ch in self.parent.children: + if ch is self: + assert not found, (self.parent.children, self, new) + if new is not None: + l_children.extend(new) + found = True + else: + l_children.append(ch) + assert found, (self.children, self, new) + self.parent.changed() + self.parent.children = l_children + for x in new: + x.parent = self.parent + self.parent = None + + def get_lineno(self): + """Return the line number which generated the invocant node.""" + node = self + while not isinstance(node, Leaf): + if not node.children: + return + node = node.children[0] + return node.lineno + + def changed(self): + if self.parent: + self.parent.changed() + self.was_changed = True + + def remove(self): + """ + Remove the node from the tree. Returns the position of the node in its + parent's children before it was removed. + """ + if self.parent: + for i, node in enumerate(self.parent.children): + if node is self: + self.parent.changed() + del self.parent.children[i] + self.parent = None + return i + + @property + def next_sibling(self): + """ + The node immediately following the invocant in their parent's children + list. If the invocant does not have a next sibling, it is None + """ + if self.parent is None: + return None + + # Can't use index(); we need to test by identity + for i, child in enumerate(self.parent.children): + if child is self: + try: + return self.parent.children[i+1] + except IndexError: + return None + + @property + def prev_sibling(self): + """ + The node immediately preceding the invocant in their parent's children + list. If the invocant does not have a previous sibling, it is None. + """ + if self.parent is None: + return None + + # Can't use index(); we need to test by identity + for i, child in enumerate(self.parent.children): + if child is self: + if i == 0: + return None + return self.parent.children[i-1] + + def leaves(self): + for child in self.children: + for x in child.leaves(): + yield x + + def depth(self): + if self.parent is None: + return 0 + return 1 + self.parent.depth() + + def get_suffix(self): + """ + Return the string immediately following the invocant node. This is + effectively equivalent to node.next_sibling.prefix + """ + next_sib = self.next_sibling + if next_sib is None: + return u"" + return next_sib.prefix + + if sys.version_info < (3, 0): + def __str__(self): + return unicode(self).encode("ascii") + +class Node(Base): + + """Concrete implementation for interior nodes.""" + + def __init__(self,type, children, + context=None, + prefix=None, + fixers_applied=None): + """ + Initializer. + + Takes a type constant (a symbol number >= 256), a sequence of + child nodes, and an optional context keyword argument. + + As a side effect, the parent pointers of the children are updated. + """ + assert type >= 256, type + self.type = type + self.children = list(children) + for ch in self.children: + assert ch.parent is None, repr(ch) + ch.parent = self + if prefix is not None: + self.prefix = prefix + if fixers_applied: + self.fixers_applied = fixers_applied[:] + else: + self.fixers_applied = None + + def __repr__(self): + """Return a canonical string representation.""" + return "%s(%s, %r)" % (self.__class__.__name__, + type_repr(self.type), + self.children) + + def __unicode__(self): + """ + Return a pretty string representation. + + This reproduces the input source exactly. + """ + return u"".join(map(unicode, self.children)) + + if sys.version_info > (3, 0): + __str__ = __unicode__ + + def _eq(self, other): + """Compare two nodes for equality.""" + return (self.type, self.children) == (other.type, other.children) + + def clone(self): + """Return a cloned (deep) copy of self.""" + return Node(self.type, [ch.clone() for ch in self.children], + fixers_applied=self.fixers_applied) + + def post_order(self): + """Return a post-order iterator for the tree.""" + for child in self.children: + for node in child.post_order(): + yield node + yield self + + def pre_order(self): + """Return a pre-order iterator for the tree.""" + yield self + for child in self.children: + for node in child.pre_order(): + yield node + + def _prefix_getter(self): + """ + The whitespace and comments preceding this node in the input. + """ + if not self.children: + return "" + return self.children[0].prefix + + def _prefix_setter(self, prefix): + if self.children: + self.children[0].prefix = prefix + + prefix = property(_prefix_getter, _prefix_setter) + + def set_child(self, i, child): + """ + Equivalent to 'node.children[i] = child'. This method also sets the + child's parent attribute appropriately. + """ + child.parent = self + self.children[i].parent = None + self.children[i] = child + self.changed() + + def insert_child(self, i, child): + """ + Equivalent to 'node.children.insert(i, child)'. This method also sets + the child's parent attribute appropriately. + """ + child.parent = self + self.children.insert(i, child) + self.changed() + + def append_child(self, child): + """ + Equivalent to 'node.children.append(child)'. This method also sets the + child's parent attribute appropriately. + """ + child.parent = self + self.children.append(child) + self.changed() + + +class Leaf(Base): + + """Concrete implementation for leaf nodes.""" + + # Default values for instance variables + _prefix = "" # Whitespace and comments preceding this token in the input + lineno = 0 # Line where this token starts in the input + column = 0 # Column where this token tarts in the input + + def __init__(self, type, value, + context=None, + prefix=None, + fixers_applied=[]): + """ + Initializer. + + Takes a type constant (a token number < 256), a string value, and an + optional context keyword argument. + """ + assert 0 <= type < 256, type + if context is not None: + self._prefix, (self.lineno, self.column) = context + self.type = type + self.value = value + if prefix is not None: + self._prefix = prefix + self.fixers_applied = fixers_applied[:] + + def __repr__(self): + """Return a canonical string representation.""" + return "%s(%r, %r)" % (self.__class__.__name__, + self.type, + self.value) + + def __unicode__(self): + """ + Return a pretty string representation. + + This reproduces the input source exactly. + """ + return self.prefix + unicode(self.value) + + if sys.version_info > (3, 0): + __str__ = __unicode__ + + def _eq(self, other): + """Compare two nodes for equality.""" + return (self.type, self.value) == (other.type, other.value) + + def clone(self): + """Return a cloned (deep) copy of self.""" + return Leaf(self.type, self.value, + (self.prefix, (self.lineno, self.column)), + fixers_applied=self.fixers_applied) + + def leaves(self): + yield self + + def post_order(self): + """Return a post-order iterator for the tree.""" + yield self + + def pre_order(self): + """Return a pre-order iterator for the tree.""" + yield self + + def _prefix_getter(self): + """ + The whitespace and comments preceding this token in the input. + """ + return self._prefix + + def _prefix_setter(self, prefix): + self.changed() + self._prefix = prefix + + prefix = property(_prefix_getter, _prefix_setter) + +def convert(gr, raw_node): + """ + Convert raw node information to a Node or Leaf instance. + + This is passed to the parser driver which calls it whenever a reduction of a + grammar rule produces a new complete node, so that the tree is build + strictly bottom-up. + """ + type, value, context, children = raw_node + if children or type in gr.number2symbol: + # If there's exactly one child, return that child instead of + # creating a new node. + if len(children) == 1: + return children[0] + return Node(type, children, context=context) + else: + return Leaf(type, value, context=context) + + +class BasePattern(object): + + """ + A pattern is a tree matching pattern. + + It looks for a specific node type (token or symbol), and + optionally for a specific content. + + This is an abstract base class. There are three concrete + subclasses: + + - LeafPattern matches a single leaf node; + - NodePattern matches a single node (usually non-leaf); + - WildcardPattern matches a sequence of nodes of variable length. + """ + + # Defaults for instance variables + type = None # Node type (token if < 256, symbol if >= 256) + content = None # Optional content matching pattern + name = None # Optional name used to store match in results dict + + def __new__(cls, *args, **kwds): + """Constructor that prevents BasePattern from being instantiated.""" + assert cls is not BasePattern, "Cannot instantiate BasePattern" + return object.__new__(cls) + + def __repr__(self): + args = [type_repr(self.type), self.content, self.name] + while args and args[-1] is None: + del args[-1] + return "%s(%s)" % (self.__class__.__name__, ", ".join(map(repr, args))) + + def optimize(self): + """ + A subclass can define this as a hook for optimizations. + + Returns either self or another node with the same effect. + """ + return self + + def match(self, node, results=None): + """ + Does this pattern exactly match a node? + + Returns True if it matches, False if not. + + If results is not None, it must be a dict which will be + updated with the nodes matching named subpatterns. + + Default implementation for non-wildcard patterns. + """ + if self.type is not None and node.type != self.type: + return False + if self.content is not None: + r = None + if results is not None: + r = {} + if not self._submatch(node, r): + return False + if r: + results.update(r) + if results is not None and self.name: + results[self.name] = node + return True + + def match_seq(self, nodes, results=None): + """ + Does this pattern exactly match a sequence of nodes? + + Default implementation for non-wildcard patterns. + """ + if len(nodes) != 1: + return False + return self.match(nodes[0], results) + + def generate_matches(self, nodes): + """ + Generator yielding all matches for this pattern. + + Default implementation for non-wildcard patterns. + """ + r = {} + if nodes and self.match(nodes[0], r): + yield 1, r + + +class LeafPattern(BasePattern): + + def __init__(self, type=None, content=None, name=None): + """ + Initializer. Takes optional type, content, and name. + + The type, if given must be a token type (< 256). If not given, + this matches any *leaf* node; the content may still be required. + + The content, if given, must be a string. + + If a name is given, the matching node is stored in the results + dict under that key. + """ + if type is not None: + assert 0 <= type < 256, type + if content is not None: + assert isinstance(content, basestring), repr(content) + self.type = type + self.content = content + self.name = name + + def match(self, node, results=None): + """Override match() to insist on a leaf node.""" + if not isinstance(node, Leaf): + return False + return BasePattern.match(self, node, results) + + def _submatch(self, node, results=None): + """ + Match the pattern's content to the node's children. + + This assumes the node type matches and self.content is not None. + + Returns True if it matches, False if not. + + If results is not None, it must be a dict which will be + updated with the nodes matching named subpatterns. + + When returning False, the results dict may still be updated. + """ + return self.content == node.value + + +class NodePattern(BasePattern): + + wildcards = False + + def __init__(self, type=None, content=None, name=None): + """ + Initializer. Takes optional type, content, and name. + + The type, if given, must be a symbol type (>= 256). If the + type is None this matches *any* single node (leaf or not), + except if content is not None, in which it only matches + non-leaf nodes that also match the content pattern. + + The content, if not None, must be a sequence of Patterns that + must match the node's children exactly. If the content is + given, the type must not be None. + + If a name is given, the matching node is stored in the results + dict under that key. + """ + if type is not None: + assert type >= 256, type + if content is not None: + assert not isinstance(content, basestring), repr(content) + content = list(content) + for i, item in enumerate(content): + assert isinstance(item, BasePattern), (i, item) + if isinstance(item, WildcardPattern): + self.wildcards = True + self.type = type + self.content = content + self.name = name + + def _submatch(self, node, results=None): + """ + Match the pattern's content to the node's children. + + This assumes the node type matches and self.content is not None. + + Returns True if it matches, False if not. + + If results is not None, it must be a dict which will be + updated with the nodes matching named subpatterns. + + When returning False, the results dict may still be updated. + """ + if self.wildcards: + for c, r in generate_matches(self.content, node.children): + if c == len(node.children): + if results is not None: + results.update(r) + return True + return False + if len(self.content) != len(node.children): + return False + for subpattern, child in zip(self.content, node.children): + if not subpattern.match(child, results): + return False + return True + + +class WildcardPattern(BasePattern): + + """ + A wildcard pattern can match zero or more nodes. + + This has all the flexibility needed to implement patterns like: + + .* .+ .? .{m,n} + (a b c | d e | f) + (...)* (...)+ (...)? (...){m,n} + + except it always uses non-greedy matching. + """ + + def __init__(self, content=None, min=0, max=HUGE, name=None): + """ + Initializer. + + Args: + content: optional sequence of subsequences of patterns; + if absent, matches one node; + if present, each subsequence is an alternative [*] + min: optional minimum number of times to match, default 0 + max: optional maximum number of times to match, default HUGE + name: optional name assigned to this match + + [*] Thus, if content is [[a, b, c], [d, e], [f, g, h]] this is + equivalent to (a b c | d e | f g h); if content is None, + this is equivalent to '.' in regular expression terms. + The min and max parameters work as follows: + min=0, max=maxint: .* + min=1, max=maxint: .+ + min=0, max=1: .? + min=1, max=1: . + If content is not None, replace the dot with the parenthesized + list of alternatives, e.g. (a b c | d e | f g h)* + """ + assert 0 <= min <= max <= HUGE, (min, max) + if content is not None: + content = tuple(map(tuple, content)) # Protect against alterations + # Check sanity of alternatives + assert len(content), repr(content) # Can't have zero alternatives + for alt in content: + assert len(alt), repr(alt) # Can have empty alternatives + self.content = content + self.min = min + self.max = max + self.name = name + + def optimize(self): + """Optimize certain stacked wildcard patterns.""" + subpattern = None + if (self.content is not None and + len(self.content) == 1 and len(self.content[0]) == 1): + subpattern = self.content[0][0] + if self.min == 1 and self.max == 1: + if self.content is None: + return NodePattern(name=self.name) + if subpattern is not None and self.name == subpattern.name: + return subpattern.optimize() + if (self.min <= 1 and isinstance(subpattern, WildcardPattern) and + subpattern.min <= 1 and self.name == subpattern.name): + return WildcardPattern(subpattern.content, + self.min*subpattern.min, + self.max*subpattern.max, + subpattern.name) + return self + + def match(self, node, results=None): + """Does this pattern exactly match a node?""" + return self.match_seq([node], results) + + def match_seq(self, nodes, results=None): + """Does this pattern exactly match a sequence of nodes?""" + for c, r in self.generate_matches(nodes): + if c == len(nodes): + if results is not None: + results.update(r) + if self.name: + results[self.name] = list(nodes) + return True + return False + + def generate_matches(self, nodes): + """ + Generator yielding matches for a sequence of nodes. + + Args: + nodes: sequence of nodes + + Yields: + (count, results) tuples where: + count: the match comprises nodes[:count]; + results: dict containing named submatches. + """ + if self.content is None: + # Shortcut for special case (see __init__.__doc__) + for count in xrange(self.min, 1 + min(len(nodes), self.max)): + r = {} + if self.name: + r[self.name] = nodes[:count] + yield count, r + elif self.name == "bare_name": + yield self._bare_name_matches(nodes) + else: + # The reason for this is that hitting the recursion limit usually + # results in some ugly messages about how RuntimeErrors are being + # ignored. We don't do this on non-CPython implementation because + # they don't have this problem. + if hasattr(sys, "getrefcount"): + save_stderr = sys.stderr + sys.stderr = StringIO() + try: + for count, r in self._recursive_matches(nodes, 0): + if self.name: + r[self.name] = nodes[:count] + yield count, r + except RuntimeError: + # We fall back to the iterative pattern matching scheme if the recursive + # scheme hits the recursion limit. + for count, r in self._iterative_matches(nodes): + if self.name: + r[self.name] = nodes[:count] + yield count, r + finally: + if hasattr(sys, "getrefcount"): + sys.stderr = save_stderr + + def _iterative_matches(self, nodes): + """Helper to iteratively yield the matches.""" + nodelen = len(nodes) + if 0 >= self.min: + yield 0, {} + + results = [] + # generate matches that use just one alt from self.content + for alt in self.content: + for c, r in generate_matches(alt, nodes): + yield c, r + results.append((c, r)) + + # for each match, iterate down the nodes + while results: + new_results = [] + for c0, r0 in results: + # stop if the entire set of nodes has been matched + if c0 < nodelen and c0 <= self.max: + for alt in self.content: + for c1, r1 in generate_matches(alt, nodes[c0:]): + if c1 > 0: + r = {} + r.update(r0) + r.update(r1) + yield c0 + c1, r + new_results.append((c0 + c1, r)) + results = new_results + + def _bare_name_matches(self, nodes): + """Special optimized matcher for bare_name.""" + count = 0 + r = {} + done = False + max = len(nodes) + while not done and count < max: + done = True + for leaf in self.content: + if leaf[0].match(nodes[count], r): + count += 1 + done = False + break + r[self.name] = nodes[:count] + return count, r + + def _recursive_matches(self, nodes, count): + """Helper to recursively yield the matches.""" + assert self.content is not None + if count >= self.min: + yield 0, {} + if count < self.max: + for alt in self.content: + for c0, r0 in generate_matches(alt, nodes): + for c1, r1 in self._recursive_matches(nodes[c0:], count+1): + r = {} + r.update(r0) + r.update(r1) + yield c0 + c1, r + + +class NegatedPattern(BasePattern): + + def __init__(self, content=None): + """ + Initializer. + + The argument is either a pattern or None. If it is None, this + only matches an empty sequence (effectively '$' in regex + lingo). If it is not None, this matches whenever the argument + pattern doesn't have any matches. + """ + if content is not None: + assert isinstance(content, BasePattern), repr(content) + self.content = content + + def match(self, node): + # We never match a node in its entirety + return False + + def match_seq(self, nodes): + # We only match an empty sequence of nodes in its entirety + return len(nodes) == 0 + + def generate_matches(self, nodes): + if self.content is None: + # Return a match if there is an empty sequence + if len(nodes) == 0: + yield 0, {} + else: + # Return a match if the argument pattern has no matches + for c, r in self.content.generate_matches(nodes): + return + yield 0, {} + + +def generate_matches(patterns, nodes): + """ + Generator yielding matches for a sequence of patterns and nodes. + + Args: + patterns: a sequence of patterns + nodes: a sequence of nodes + + Yields: + (count, results) tuples where: + count: the entire sequence of patterns matches nodes[:count]; + results: dict containing named submatches. + """ + if not patterns: + yield 0, {} + else: + p, rest = patterns[0], patterns[1:] + for c0, r0 in p.generate_matches(nodes): + if not rest: + yield c0, r0 + else: + for c1, r1 in generate_matches(rest, nodes[c0:]): + r = {} + r.update(r0) + r.update(r1) + yield c0 + c1, r diff --git a/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/refactor.py b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/refactor.py new file mode 100644 index 00000000..a4c168df --- /dev/null +++ b/ptvsd/pydevd/third_party/pep8/lib2to3/lib2to3/refactor.py @@ -0,0 +1,747 @@ +# Copyright 2006 Google, Inc. All Rights Reserved. +# Licensed to PSF under a Contributor Agreement. + +"""Refactoring framework. + +Used as a main program, this can refactor any number of files and/or +recursively descend down directories. Imported as a module, this +provides infrastructure to write your own refactoring tool. +""" + +from __future__ import with_statement + +__author__ = "Guido van Rossum " + + +# Python imports +import os +import sys +import logging +import operator +import collections +import StringIO +from itertools import chain + +# Local imports +from .pgen2 import driver, tokenize, token +from .fixer_util import find_root +from . import pytree, pygram +from . import btm_utils as bu +from . import btm_matcher as bm + + +def get_all_fix_names(fixer_pkg, remove_prefix=True): + """Return a sorted list of all available fix names in the given package.""" + pkg = __import__(fixer_pkg, [], [], ["*"]) + fixer_dir = os.path.dirname(pkg.__file__) + fix_names = [] + for name in sorted(os.listdir(fixer_dir)): + if name.startswith("fix_") and name.endswith(".py"): + if remove_prefix: + name = name[4:] + fix_names.append(name[:-3]) + return fix_names + + +class _EveryNode(Exception): + pass + + +def _get_head_types(pat): + """ Accepts a pytree Pattern Node and returns a set + of the pattern types which will match first. """ + + if isinstance(pat, (pytree.NodePattern, pytree.LeafPattern)): + # NodePatters must either have no type and no content + # or a type and content -- so they don't get any farther + # Always return leafs + if pat.type is None: + raise _EveryNode + return set([pat.type]) + + if isinstance(pat, pytree.NegatedPattern): + if pat.content: + return _get_head_types(pat.content) + raise _EveryNode # Negated Patterns don't have a type + + if isinstance(pat, pytree.WildcardPattern): + # Recurse on each node in content + r = set() + for p in pat.content: + for x in p: + r.update(_get_head_types(x)) + return r + + raise Exception("Oh no! I don't understand pattern %s" %(pat)) + + +def _get_headnode_dict(fixer_list): + """ Accepts a list of fixers and returns a dictionary + of head node type --> fixer list. """ + head_nodes = collections.defaultdict(list) + every = [] + for fixer in fixer_list: + if fixer.pattern: + try: + heads = _get_head_types(fixer.pattern) + except _EveryNode: + every.append(fixer) + else: + for node_type in heads: + head_nodes[node_type].append(fixer) + else: + if fixer._accept_type is not None: + head_nodes[fixer._accept_type].append(fixer) + else: + every.append(fixer) + for node_type in chain(pygram.python_grammar.symbol2number.itervalues(), + pygram.python_grammar.tokens): + head_nodes[node_type].extend(every) + return dict(head_nodes) + + +def get_fixers_from_package(pkg_name): + """ + Return the fully qualified names for fixers in the package pkg_name. + """ + return [pkg_name + "." + fix_name + for fix_name in get_all_fix_names(pkg_name, False)] + +def _identity(obj): + return obj + +if sys.version_info < (3, 0): + import codecs + _open_with_encoding = codecs.open + # codecs.open doesn't translate newlines sadly. + def _from_system_newlines(input): + return input.replace(u"\r\n", u"\n") + def _to_system_newlines(input): + if os.linesep != "\n": + return input.replace(u"\n", os.linesep) + else: + return input +else: + _open_with_encoding = open + _from_system_newlines = _identity + _to_system_newlines = _identity + + +def _detect_future_features(source): + have_docstring = False + gen = tokenize.generate_tokens(StringIO.StringIO(source).readline) + def advance(): + tok = gen.next() + return tok[0], tok[1] + ignore = frozenset((token.NEWLINE, tokenize.NL, token.COMMENT)) + features = set() + try: + while True: + tp, value = advance() + if tp in ignore: + continue + elif tp == token.STRING: + if have_docstring: + break + have_docstring = True + elif tp == token.NAME and value == u"from": + tp, value = advance() + if tp != token.NAME or value != u"__future__": + break + tp, value = advance() + if tp != token.NAME or value != u"import": + break + tp, value = advance() + if tp == token.OP and value == u"(": + tp, value = advance() + while tp == token.NAME: + features.add(value) + tp, value = advance() + if tp != token.OP or value != u",": + break + tp, value = advance() + else: + break + except StopIteration: + pass + return frozenset(features) + + +class FixerError(Exception): + """A fixer could not be loaded.""" + + +class RefactoringTool(object): + + _default_options = {"print_function" : False, + "write_unchanged_files" : False} + + CLASS_PREFIX = "Fix" # The prefix for fixer classes + FILE_PREFIX = "fix_" # The prefix for modules with a fixer within + + def __init__(self, fixer_names, options=None, explicit=None): + """Initializer. + + Args: + fixer_names: a list of fixers to import + options: an dict with configuration. + explicit: a list of fixers to run even if they are explicit. + """ + self.fixers = fixer_names + self.explicit = explicit or [] + self.options = self._default_options.copy() + if options is not None: + self.options.update(options) + if self.options["print_function"]: + self.grammar = pygram.python_grammar_no_print_statement + else: + self.grammar = pygram.python_grammar + # When this is True, the refactor*() methods will call write_file() for + # files processed even if they were not changed during refactoring. If + # and only if the refactor method's write parameter was True. + self.write_unchanged_files = self.options.get("write_unchanged_files") + self.errors = [] + self.logger = logging.getLogger("RefactoringTool") + self.fixer_log = [] + self.wrote = False + self.driver = driver.Driver(self.grammar, + convert=pytree.convert, + logger=self.logger) + self.pre_order, self.post_order = self.get_fixers() + + + self.files = [] # List of files that were or should be modified + + self.BM = bm.BottomMatcher() + self.bmi_pre_order = [] # Bottom Matcher incompatible fixers + self.bmi_post_order = [] + + for fixer in chain(self.post_order, self.pre_order): + if fixer.BM_compatible: + self.BM.add_fixer(fixer) + # remove fixers that will be handled by the bottom-up + # matcher + elif fixer in self.pre_order: + self.bmi_pre_order.append(fixer) + elif fixer in self.post_order: + self.bmi_post_order.append(fixer) + + self.bmi_pre_order_heads = _get_headnode_dict(self.bmi_pre_order) + self.bmi_post_order_heads = _get_headnode_dict(self.bmi_post_order) + + + + def get_fixers(self): + """Inspects the options to load the requested patterns and handlers. + + Returns: + (pre_order, post_order), where pre_order is the list of fixers that + want a pre-order AST traversal, and post_order is the list that want + post-order traversal. + """ + pre_order_fixers = [] + post_order_fixers = [] + for fix_mod_path in self.fixers: + mod = __import__(fix_mod_path, {}, {}, ["*"]) + fix_name = fix_mod_path.rsplit(".", 1)[-1] + if fix_name.startswith(self.FILE_PREFIX): + fix_name = fix_name[len(self.FILE_PREFIX):] + parts = fix_name.split("_") + class_name = self.CLASS_PREFIX + "".join([p.title() for p in parts]) + try: + fix_class = getattr(mod, class_name) + except AttributeError: + raise FixerError("Can't find %s.%s" % (fix_name, class_name)) + fixer = fix_class(self.options, self.fixer_log) + if fixer.explicit and self.explicit is not True and \ + fix_mod_path not in self.explicit: + self.log_message("Skipping implicit fixer: %s", fix_name) + continue + + self.log_debug("Adding transformation: %s", fix_name) + if fixer.order == "pre": + pre_order_fixers.append(fixer) + elif fixer.order == "post": + post_order_fixers.append(fixer) + else: + raise FixerError("Illegal fixer order: %r" % fixer.order) + + key_func = operator.attrgetter("run_order") + pre_order_fixers.sort(key=key_func) + post_order_fixers.sort(key=key_func) + return (pre_order_fixers, post_order_fixers) + + def log_error(self, msg, *args, **kwds): + """Called when an error occurs.""" + raise + + def log_message(self, msg, *args): + """Hook to log a message.""" + if args: + msg = msg % args + self.logger.info(msg) + + def log_debug(self, msg, *args): + if args: + msg = msg % args + self.logger.debug(msg) + + def print_output(self, old_text, new_text, filename, equal): + """Called with the old version, new version, and filename of a + refactored file.""" + pass + + def refactor(self, items, write=False, doctests_only=False): + """Refactor a list of files and directories.""" + + for dir_or_file in items: + if os.path.isdir(dir_or_file): + self.refactor_dir(dir_or_file, write, doctests_only) + else: + self.refactor_file(dir_or_file, write, doctests_only) + + def refactor_dir(self, dir_name, write=False, doctests_only=False): + """Descends down a directory and refactor every Python file found. + + Python files are assumed to have a .py extension. + + Files and subdirectories starting with '.' are skipped. + """ + py_ext = os.extsep + "py" + for dirpath, dirnames, filenames in os.walk(dir_name): + self.log_debug("Descending into %s", dirpath) + dirnames.sort() + filenames.sort() + for name in filenames: + if (not name.startswith(".") and + os.path.splitext(name)[1] == py_ext): + fullname = os.path.join(dirpath, name) + self.refactor_file(fullname, write, doctests_only) + # Modify dirnames in-place to remove subdirs with leading dots + dirnames[:] = [dn for dn in dirnames if not dn.startswith(".")] + + def _read_python_source(self, filename): + """ + Do our best to decode a Python source file correctly. + """ + try: + f = open(filename, "rb") + except IOError as err: + self.log_error("Can't open %s: %s", filename, err) + return None, None + try: + encoding = tokenize.detect_encoding(f.readline)[0] + finally: + f.close() + with _open_with_encoding(filename, "r", encoding=encoding) as f: + return _from_system_newlines(f.read()), encoding + + def refactor_file(self, filename, write=False, doctests_only=False): + """Refactors a file.""" + input, encoding = self._read_python_source(filename) + if input is None: + # Reading the file failed. + return + input += u"\n" # Silence certain parse errors + if doctests_only: + self.log_debug("Refactoring doctests in %s", filename) + output = self.refactor_docstring(input, filename) + if self.write_unchanged_files or output != input: + self.processed_file(output, filename, input, write, encoding) + else: + self.log_debug("No doctest changes in %s", filename) + else: + tree = self.refactor_string(input, filename) + if self.write_unchanged_files or (tree and tree.was_changed): + # The [:-1] is to take off the \n we added earlier + self.processed_file(unicode(tree)[:-1], filename, + write=write, encoding=encoding) + else: + self.log_debug("No changes in %s", filename) + + def refactor_string(self, data, name): + """Refactor a given input string. + + Args: + data: a string holding the code to be refactored. + name: a human-readable name for use in error/log messages. + + Returns: + An AST corresponding to the refactored input stream; None if + there were errors during the parse. + """ + features = _detect_future_features(data) + if "print_function" in features: + self.driver.grammar = pygram.python_grammar_no_print_statement + try: + tree = self.driver.parse_string(data) + except Exception as err: + self.log_error("Can't parse %s: %s: %s", + name, err.__class__.__name__, err) + return + finally: + self.driver.grammar = self.grammar + tree.future_features = features + self.log_debug("Refactoring %s", name) + self.refactor_tree(tree, name) + return tree + + def refactor_stdin(self, doctests_only=False): + input = sys.stdin.read() + if doctests_only: + self.log_debug("Refactoring doctests in stdin") + output = self.refactor_docstring(input, "") + if self.write_unchanged_files or output != input: + self.processed_file(output, "", input) + else: + self.log_debug("No doctest changes in stdin") + else: + tree = self.refactor_string(input, "") + if self.write_unchanged_files or (tree and tree.was_changed): + self.processed_file(unicode(tree), "", input) + else: + self.log_debug("No changes in stdin") + + def refactor_tree(self, tree, name): + """Refactors a parse tree (modifying the tree in place). + + For compatible patterns the bottom matcher module is + used. Otherwise the tree is traversed node-to-node for + matches. + + Args: + tree: a pytree.Node instance representing the root of the tree + to be refactored. + name: a human-readable name for this tree. + + Returns: + True if the tree was modified, False otherwise. + """ + + for fixer in chain(self.pre_order, self.post_order): + fixer.start_tree(tree, name) + + #use traditional matching for the incompatible fixers + self.traverse_by(self.bmi_pre_order_heads, tree.pre_order()) + self.traverse_by(self.bmi_post_order_heads, tree.post_order()) + + # obtain a set of candidate nodes + match_set = self.BM.run(tree.leaves()) + + while any(match_set.values()): + for fixer in self.BM.fixers: + if fixer in match_set and match_set[fixer]: + #sort by depth; apply fixers from bottom(of the AST) to top + match_set[fixer].sort(key=pytree.Base.depth, reverse=True) + + if fixer.keep_line_order: + #some fixers(eg fix_imports) must be applied + #with the original file's line order + match_set[fixer].sort(key=pytree.Base.get_lineno) + + for node in list(match_set[fixer]): + if node in match_set[fixer]: + match_set[fixer].remove(node) + + try: + find_root(node) + except ValueError: + # this node has been cut off from a + # previous transformation ; skip + continue + + if node.fixers_applied and fixer in node.fixers_applied: + # do not apply the same fixer again + continue + + results = fixer.match(node) + + if results: + new = fixer.transform(node, results) + if new is not None: + node.replace(new) + #new.fixers_applied.append(fixer) + for node in new.post_order(): + # do not apply the fixer again to + # this or any subnode + if not node.fixers_applied: + node.fixers_applied = [] + node.fixers_applied.append(fixer) + + # update the original match set for + # the added code + new_matches = self.BM.run(new.leaves()) + for fxr in new_matches: + if not fxr in match_set: + match_set[fxr]=[] + + match_set[fxr].extend(new_matches[fxr]) + + for fixer in chain(self.pre_order, self.post_order): + fixer.finish_tree(tree, name) + return tree.was_changed + + def traverse_by(self, fixers, traversal): + """Traverse an AST, applying a set of fixers to each node. + + This is a helper method for refactor_tree(). + + Args: + fixers: a list of fixer instances. + traversal: a generator that yields AST nodes. + + Returns: + None + """ + if not fixers: + return + for node in traversal: + for fixer in fixers[node.type]: + results = fixer.match(node) + if results: + new = fixer.transform(node, results) + if new is not None: + node.replace(new) + node = new + + def processed_file(self, new_text, filename, old_text=None, write=False, + encoding=None): + """ + Called when a file has been refactored and there may be changes. + """ + self.files.append(filename) + if old_text is None: + old_text = self._read_python_source(filename)[0] + if old_text is None: + return + equal = old_text == new_text + self.print_output(old_text, new_text, filename, equal) + if equal: + self.log_debug("No changes to %s", filename) + if not self.write_unchanged_files: + return + if write: + self.write_file(new_text, filename, old_text, encoding) + else: + self.log_debug("Not writing changes to %s", filename) + + def write_file(self, new_text, filename, old_text, encoding=None): + """Writes a string to a file. + + It first shows a unified diff between the old text and the new text, and + then rewrites the file; the latter is only done if the write option is + set. + """ + try: + f = _open_with_encoding(filename, "w", encoding=encoding) + except os.error as err: + self.log_error("Can't create %s: %s", filename, err) + return + try: + f.write(_to_system_newlines(new_text)) + except os.error as err: + self.log_error("Can't write %s: %s", filename, err) + finally: + f.close() + self.log_debug("Wrote changes to %s", filename) + self.wrote = True + + PS1 = ">>> " + PS2 = "... " + + def refactor_docstring(self, input, filename): + """Refactors a docstring, looking for doctests. + + This returns a modified version of the input string. It looks + for doctests, which start with a ">>>" prompt, and may be + continued with "..." prompts, as long as the "..." is indented + the same as the ">>>". + + (Unfortunately we can't use the doctest module's parser, + since, like most parsers, it is not geared towards preserving + the original source.) + """ + result = [] + block = None + block_lineno = None + indent = None + lineno = 0 + for line in input.splitlines(True): + lineno += 1 + if line.lstrip().startswith(self.PS1): + if block is not None: + result.extend(self.refactor_doctest(block, block_lineno, + indent, filename)) + block_lineno = lineno + block = [line] + i = line.find(self.PS1) + indent = line[:i] + elif (indent is not None and + (line.startswith(indent + self.PS2) or + line == indent + self.PS2.rstrip() + u"\n")): + block.append(line) + else: + if block is not None: + result.extend(self.refactor_doctest(block, block_lineno, + indent, filename)) + block = None + indent = None + result.append(line) + if block is not None: + result.extend(self.refactor_doctest(block, block_lineno, + indent, filename)) + return u"".join(result) + + def refactor_doctest(self, block, lineno, indent, filename): + """Refactors one doctest. + + A doctest is given as a block of lines, the first of which starts + with ">>>" (possibly indented), while the remaining lines start + with "..." (identically indented). + + """ + try: + tree = self.parse_block(block, lineno, indent) + except Exception as err: + if self.logger.isEnabledFor(logging.DEBUG): + for line in block: + self.log_debug("Source: %s", line.rstrip(u"\n")) + self.log_error("Can't parse docstring in %s line %s: %s: %s", + filename, lineno, err.__class__.__name__, err) + return block + if self.refactor_tree(tree, filename): + new = unicode(tree).splitlines(True) + # Undo the adjustment of the line numbers in wrap_toks() below. + clipped, new = new[:lineno-1], new[lineno-1:] + assert clipped == [u"\n"] * (lineno-1), clipped + if not new[-1].endswith(u"\n"): + new[-1] += u"\n" + block = [indent + self.PS1 + new.pop(0)] + if new: + block += [indent + self.PS2 + line for line in new] + return block + + def summarize(self): + if self.wrote: + were = "were" + else: + were = "need to be" + if not self.files: + self.log_message("No files %s modified.", were) + else: + self.log_message("Files that %s modified:", were) + for file in self.files: + self.log_message(file) + if self.fixer_log: + self.log_message("Warnings/messages while refactoring:") + for message in self.fixer_log: + self.log_message(message) + if self.errors: + if len(self.errors) == 1: + self.log_message("There was 1 error:") + else: + self.log_message("There were %d errors:", len(self.errors)) + for msg, args, kwds in self.errors: + self.log_message(msg, *args, **kwds) + + def parse_block(self, block, lineno, indent): + """Parses a block into a tree. + + This is necessary to get correct line number / offset information + in the parser diagnostics and embedded into the parse tree. + """ + tree = self.driver.parse_tokens(self.wrap_toks(block, lineno, indent)) + tree.future_features = frozenset() + return tree + + def wrap_toks(self, block, lineno, indent): + """Wraps a tokenize stream to systematically modify start/end.""" + tokens = tokenize.generate_tokens(self.gen_lines(block, indent).next) + for type, value, (line0, col0), (line1, col1), line_text in tokens: + line0 += lineno - 1 + line1 += lineno - 1 + # Don't bother updating the columns; this is too complicated + # since line_text would also have to be updated and it would + # still break for tokens spanning lines. Let the user guess + # that the column numbers for doctests are relative to the + # end of the prompt string (PS1 or PS2). + yield type, value, (line0, col0), (line1, col1), line_text + + + def gen_lines(self, block, indent): + """Generates lines as expected by tokenize from a list of lines. + + This strips the first len(indent + self.PS1) characters off each line. + """ + prefix1 = indent + self.PS1 + prefix2 = indent + self.PS2 + prefix = prefix1 + for line in block: + if line.startswith(prefix): + yield line[len(prefix):] + elif line == prefix.rstrip() + u"\n": + yield u"\n" + else: + raise AssertionError("line=%r, prefix=%r" % (line, prefix)) + prefix = prefix2 + while True: + yield "" + + +class MultiprocessingUnsupported(Exception): + pass + + +class MultiprocessRefactoringTool(RefactoringTool): + + def __init__(self, *args, **kwargs): + super(MultiprocessRefactoringTool, self).__init__(*args, **kwargs) + self.queue = None + self.output_lock = None + + def refactor(self, items, write=False, doctests_only=False, + num_processes=1): + if num_processes == 1: + return super(MultiprocessRefactoringTool, self).refactor( + items, write, doctests_only) + try: + import multiprocessing + except ImportError: + raise MultiprocessingUnsupported + if self.queue is not None: + raise RuntimeError("already doing multiple processes") + self.queue = multiprocessing.JoinableQueue() + self.output_lock = multiprocessing.Lock() + processes = [multiprocessing.Process(target=self._child) + for i in xrange(num_processes)] + try: + for p in processes: + p.start() + super(MultiprocessRefactoringTool, self).refactor(items, write, + doctests_only) + finally: + self.queue.join() + for i in xrange(num_processes): + self.queue.put(None) + for p in processes: + if p.is_alive(): + p.join() + self.queue = None + + def _child(self): + task = self.queue.get() + while task is not None: + args, kwargs = task + try: + super(MultiprocessRefactoringTool, self).refactor_file( + *args, **kwargs) + finally: + self.queue.task_done() + task = self.queue.get() + + def refactor_file(self, *args, **kwargs): + if self.queue is not None: + self.queue.put((args, kwargs)) + else: + return super(MultiprocessRefactoringTool, self).refactor_file( + *args, **kwargs) diff --git a/ptvsd/pydevd/third_party/pep8/pycodestyle.py b/ptvsd/pydevd/third_party/pep8/pycodestyle.py new file mode 100644 index 00000000..a4b11fe6 --- /dev/null +++ b/ptvsd/pydevd/third_party/pep8/pycodestyle.py @@ -0,0 +1,2325 @@ +#!/usr/bin/env python +# pycodestyle.py - Check Python source code formatting, according to PEP 8 +# +# Copyright (C) 2006-2009 Johann C. Rocholl +# Copyright (C) 2009-2014 Florent Xicluna +# Copyright (C) 2014-2016 Ian Lee +# +# Permission is hereby granted, free of charge, to any person +# obtaining a copy of this software and associated documentation files +# (the "Software"), to deal in the Software without restriction, +# including without limitation the rights to use, copy, modify, merge, +# publish, distribute, sublicense, and/or sell copies of the Software, +# and to permit persons to whom the Software is furnished to do so, +# subject to the following conditions: +# +# The above copyright notice and this permission notice shall be +# included in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +r""" +Check Python source code formatting, according to PEP 8. + +For usage and a list of options, try this: +$ python pycodestyle.py -h + +This program and its regression test suite live here: +https://github.com/pycqa/pycodestyle + +Groups of errors and warnings: +E errors +W warnings +100 indentation +200 whitespace +300 blank lines +400 imports +500 line length +600 deprecation +700 statements +900 syntax error +""" +from __future__ import with_statement + +import inspect +import keyword +import os +import re +import sys +import time +import tokenize +import warnings + +from fnmatch import fnmatch +from optparse import OptionParser + +try: + from configparser import RawConfigParser + from io import TextIOWrapper +except ImportError: + from ConfigParser import RawConfigParser + +__version__ = '2.3.1' + +DEFAULT_EXCLUDE = '.svn,CVS,.bzr,.hg,.git,__pycache__,.tox' +DEFAULT_IGNORE = 'E121,E123,E126,E226,E24,E704,W503' +try: + if sys.platform == 'win32': + USER_CONFIG = os.path.expanduser(r'~\.pycodestyle') + else: + USER_CONFIG = os.path.join( + os.getenv('XDG_CONFIG_HOME') or os.path.expanduser('~/.config'), + 'pycodestyle' + ) +except ImportError: + USER_CONFIG = None + +PROJECT_CONFIG = ('setup.cfg', 'tox.ini') +TESTSUITE_PATH = os.path.join(os.path.dirname(__file__), 'testsuite') +MAX_LINE_LENGTH = 79 +REPORT_FORMAT = { + 'default': '%(path)s:%(row)d:%(col)d: %(code)s %(text)s', + 'pylint': '%(path)s:%(row)d: [%(code)s] %(text)s', +} + +PyCF_ONLY_AST = 1024 +SINGLETONS = frozenset(['False', 'None', 'True']) +KEYWORDS = frozenset(keyword.kwlist + ['print']) - SINGLETONS +UNARY_OPERATORS = frozenset(['>>', '**', '*', '+', '-']) +ARITHMETIC_OP = frozenset(['**', '*', '/', '//', '+', '-']) +WS_OPTIONAL_OPERATORS = ARITHMETIC_OP.union(['^', '&', '|', '<<', '>>', '%']) +WS_NEEDED_OPERATORS = frozenset([ + '**=', '*=', '/=', '//=', '+=', '-=', '!=', '<>', '<', '>', + '%=', '^=', '&=', '|=', '==', '<=', '>=', '<<=', '>>=', '=']) +WHITESPACE = frozenset(' \t') +NEWLINE = frozenset([tokenize.NL, tokenize.NEWLINE]) +SKIP_TOKENS = NEWLINE.union([tokenize.INDENT, tokenize.DEDENT]) +# ERRORTOKEN is triggered by backticks in Python 3 +SKIP_COMMENTS = SKIP_TOKENS.union([tokenize.COMMENT, tokenize.ERRORTOKEN]) +BENCHMARK_KEYS = ['directories', 'files', 'logical lines', 'physical lines'] + +INDENT_REGEX = re.compile(r'([ \t]*)') +RAISE_COMMA_REGEX = re.compile(r'raise\s+\w+\s*,') +RERAISE_COMMA_REGEX = re.compile(r'raise\s+\w+\s*,.*,\s*\w+\s*$') +ERRORCODE_REGEX = re.compile(r'\b[A-Z]\d{3}\b') +DOCSTRING_REGEX = re.compile(r'u?r?["\']') +EXTRANEOUS_WHITESPACE_REGEX = re.compile(r'[[({] | []}),;:]') +WHITESPACE_AFTER_COMMA_REGEX = re.compile(r'[,;:]\s*(?: |\t)') +COMPARE_SINGLETON_REGEX = re.compile(r'(\bNone|\bFalse|\bTrue)?\s*([=!]=)' + r'\s*(?(1)|(None|False|True))\b') +COMPARE_NEGATIVE_REGEX = re.compile(r'\b(not)\s+[^][)(}{ ]+\s+(in|is)\s') +COMPARE_TYPE_REGEX = re.compile(r'(?:[=!]=|is(?:\s+not)?)\s*type(?:s.\w+Type' + r'|\s*\(\s*([^)]*[^ )])\s*\))') +KEYWORD_REGEX = re.compile(r'(\s*)\b(?:%s)\b(\s*)' % r'|'.join(KEYWORDS)) +OPERATOR_REGEX = re.compile(r'(?:[^,\s])(\s*)(?:[-+*/|!<=>%&^]+)(\s*)') +LAMBDA_REGEX = re.compile(r'\blambda\b') +HUNK_REGEX = re.compile(r'^@@ -\d+(?:,\d+)? \+(\d+)(?:,(\d+))? @@.*$') +STARTSWITH_DEF_REGEX = re.compile(r'^(async\s+def|def)') +STARTSWITH_TOP_LEVEL_REGEX = re.compile(r'^(async\s+def\s+|def\s+|class\s+|@)') +STARTSWITH_INDENT_STATEMENT_REGEX = re.compile( + r'^\s*({0})'.format('|'.join(s.replace(' ', '\s+') for s in ( + 'def', 'async def', + 'for', 'async for', + 'if', 'elif', 'else', + 'try', 'except', 'finally', + 'with', 'async with', + 'class', + 'while', + ))) +) +DUNDER_REGEX = re.compile(r'^__([^\s]+)__ = ') + +# Work around Python < 2.6 behaviour, which does not generate NL after +# a comment which is on a line by itself. +COMMENT_WITH_NL = tokenize.generate_tokens(['#\n'].pop).send(None)[1] == '#\n' + + +############################################################################## +# Plugins (check functions) for physical lines +############################################################################## + + +def tabs_or_spaces(physical_line, indent_char): + r"""Never mix tabs and spaces. + + The most popular way of indenting Python is with spaces only. The + second-most popular way is with tabs only. Code indented with a mixture + of tabs and spaces should be converted to using spaces exclusively. When + invoking the Python command line interpreter with the -t option, it issues + warnings about code that illegally mixes tabs and spaces. When using -tt + these warnings become errors. These options are highly recommended! + + Okay: if a == 0:\n a = 1\n b = 1 + E101: if a == 0:\n a = 1\n\tb = 1 + """ + indent = INDENT_REGEX.match(physical_line).group(1) + for offset, char in enumerate(indent): + if char != indent_char: + return offset, "E101 indentation contains mixed spaces and tabs" + + +def tabs_obsolete(physical_line): + r"""For new projects, spaces-only are strongly recommended over tabs. + + Okay: if True:\n return + W191: if True:\n\treturn + """ + indent = INDENT_REGEX.match(physical_line).group(1) + if '\t' in indent: + return indent.index('\t'), "W191 indentation contains tabs" + + +def trailing_whitespace(physical_line): + r"""Trailing whitespace is superfluous. + + The warning returned varies on whether the line itself is blank, for easier + filtering for those who want to indent their blank lines. + + Okay: spam(1)\n# + W291: spam(1) \n# + W293: class Foo(object):\n \n bang = 12 + """ + physical_line = physical_line.rstrip('\n') # chr(10), newline + physical_line = physical_line.rstrip('\r') # chr(13), carriage return + physical_line = physical_line.rstrip('\x0c') # chr(12), form feed, ^L + stripped = physical_line.rstrip(' \t\v') + if physical_line != stripped: + if stripped: + return len(stripped), "W291 trailing whitespace" + else: + return 0, "W293 blank line contains whitespace" + + +def trailing_blank_lines(physical_line, lines, line_number, total_lines): + r"""Trailing blank lines are superfluous. + + Okay: spam(1) + W391: spam(1)\n + + However the last line should end with a new line (warning W292). + """ + if line_number == total_lines: + stripped_last_line = physical_line.rstrip() + if not stripped_last_line: + return 0, "W391 blank line at end of file" + if stripped_last_line == physical_line: + return len(physical_line), "W292 no newline at end of file" + + +def maximum_line_length(physical_line, max_line_length, multiline, noqa): + r"""Limit all lines to a maximum of 79 characters. + + There are still many devices around that are limited to 80 character + lines; plus, limiting windows to 80 characters makes it possible to have + several windows side-by-side. The default wrapping on such devices looks + ugly. Therefore, please limit all lines to a maximum of 79 characters. + For flowing long blocks of text (docstrings or comments), limiting the + length to 72 characters is recommended. + + Reports error E501. + """ + line = physical_line.rstrip() + length = len(line) + if length > max_line_length and not noqa: + # Special case for long URLs in multi-line docstrings or comments, + # but still report the error when the 72 first chars are whitespaces. + chunks = line.split() + if ((len(chunks) == 1 and multiline) or + (len(chunks) == 2 and chunks[0] == '#')) and \ + len(line) - len(chunks[-1]) < max_line_length - 7: + return + if hasattr(line, 'decode'): # Python 2 + # The line could contain multi-byte characters + try: + length = len(line.decode('utf-8')) + except UnicodeError: + pass + if length > max_line_length: + return (max_line_length, "E501 line too long " + "(%d > %d characters)" % (length, max_line_length)) + + +############################################################################## +# Plugins (check functions) for logical lines +############################################################################## + + +def blank_lines(logical_line, blank_lines, indent_level, line_number, + blank_before, previous_logical, + previous_unindented_logical_line, previous_indent_level, + lines): + r"""Separate top-level function and class definitions with two blank lines. + + Method definitions inside a class are separated by a single blank line. + + Extra blank lines may be used (sparingly) to separate groups of related + functions. Blank lines may be omitted between a bunch of related + one-liners (e.g. a set of dummy implementations). + + Use blank lines in functions, sparingly, to indicate logical sections. + + Okay: def a():\n pass\n\n\ndef b():\n pass + Okay: def a():\n pass\n\n\nasync def b():\n pass + Okay: def a():\n pass\n\n\n# Foo\n# Bar\n\ndef b():\n pass + Okay: default = 1\nfoo = 1 + Okay: classify = 1\nfoo = 1 + + E301: class Foo:\n b = 0\n def bar():\n pass + E302: def a():\n pass\n\ndef b(n):\n pass + E302: def a():\n pass\n\nasync def b(n):\n pass + E303: def a():\n pass\n\n\n\ndef b(n):\n pass + E303: def a():\n\n\n\n pass + E304: @decorator\n\ndef a():\n pass + E305: def a():\n pass\na() + E306: def a():\n def b():\n pass\n def c():\n pass + """ + if line_number < 3 and not previous_logical: + return # Don't expect blank lines before the first line + if previous_logical.startswith('@'): + if blank_lines: + yield 0, "E304 blank lines found after function decorator" + elif blank_lines > 2 or (indent_level and blank_lines == 2): + yield 0, "E303 too many blank lines (%d)" % blank_lines + elif STARTSWITH_TOP_LEVEL_REGEX.match(logical_line): + if indent_level: + if not (blank_before or previous_indent_level < indent_level or + DOCSTRING_REGEX.match(previous_logical)): + ancestor_level = indent_level + nested = False + # Search backwards for a def ancestor or tree root (top level). + for line in lines[line_number - 2::-1]: + if line.strip() and expand_indent(line) < ancestor_level: + ancestor_level = expand_indent(line) + nested = line.lstrip().startswith('def ') + if nested or ancestor_level == 0: + break + if nested: + yield 0, "E306 expected 1 blank line before a " \ + "nested definition, found 0" + else: + yield 0, "E301 expected 1 blank line, found 0" + elif blank_before != 2: + yield 0, "E302 expected 2 blank lines, found %d" % blank_before + elif (logical_line and not indent_level and blank_before != 2 and + previous_unindented_logical_line.startswith(('def ', 'class '))): + yield 0, "E305 expected 2 blank lines after " \ + "class or function definition, found %d" % blank_before + + +def extraneous_whitespace(logical_line): + r"""Avoid extraneous whitespace. + + Avoid extraneous whitespace in these situations: + - Immediately inside parentheses, brackets or braces. + - Immediately before a comma, semicolon, or colon. + + Okay: spam(ham[1], {eggs: 2}) + E201: spam( ham[1], {eggs: 2}) + E201: spam(ham[ 1], {eggs: 2}) + E201: spam(ham[1], { eggs: 2}) + E202: spam(ham[1], {eggs: 2} ) + E202: spam(ham[1 ], {eggs: 2}) + E202: spam(ham[1], {eggs: 2 }) + + E203: if x == 4: print x, y; x, y = y , x + E203: if x == 4: print x, y ; x, y = y, x + E203: if x == 4 : print x, y; x, y = y, x + """ + line = logical_line + for match in EXTRANEOUS_WHITESPACE_REGEX.finditer(line): + text = match.group() + char = text.strip() + found = match.start() + if text == char + ' ': + # assert char in '([{' + yield found + 1, "E201 whitespace after '%s'" % char + elif line[found - 1] != ',': + code = ('E202' if char in '}])' else 'E203') # if char in ',;:' + yield found, "%s whitespace before '%s'" % (code, char) + + +def whitespace_around_keywords(logical_line): + r"""Avoid extraneous whitespace around keywords. + + Okay: True and False + E271: True and False + E272: True and False + E273: True and\tFalse + E274: True\tand False + """ + for match in KEYWORD_REGEX.finditer(logical_line): + before, after = match.groups() + + if '\t' in before: + yield match.start(1), "E274 tab before keyword" + elif len(before) > 1: + yield match.start(1), "E272 multiple spaces before keyword" + + if '\t' in after: + yield match.start(2), "E273 tab after keyword" + elif len(after) > 1: + yield match.start(2), "E271 multiple spaces after keyword" + + +def missing_whitespace_after_import_keyword(logical_line): + r"""Multiple imports in form from x import (a, b, c) should have space + between import statement and parenthesised name list. + + Okay: from foo import (bar, baz) + E275: from foo import(bar, baz) + E275: from importable.module import(bar, baz) + """ + line = logical_line + indicator = ' import(' + if line.startswith('from '): + found = line.find(indicator) + if -1 < found: + pos = found + len(indicator) - 1 + yield pos, "E275 missing whitespace after keyword" + + +def missing_whitespace(logical_line): + r"""Each comma, semicolon or colon should be followed by whitespace. + + Okay: [a, b] + Okay: (3,) + Okay: a[1:4] + Okay: a[:4] + Okay: a[1:] + Okay: a[1:4:2] + E231: ['a','b'] + E231: foo(bar,baz) + E231: [{'a':'b'}] + """ + line = logical_line + for index in range(len(line) - 1): + char = line[index] + if char in ',;:' and line[index + 1] not in WHITESPACE: + before = line[:index] + if char == ':' and before.count('[') > before.count(']') and \ + before.rfind('{') < before.rfind('['): + continue # Slice syntax, no space required + if char == ',' and line[index + 1] == ')': + continue # Allow tuple with only one element: (3,) + yield index, "E231 missing whitespace after '%s'" % char + + +def indentation(logical_line, previous_logical, indent_char, + indent_level, previous_indent_level): + r"""Use 4 spaces per indentation level. + + For really old code that you don't want to mess up, you can continue to + use 8-space tabs. + + Okay: a = 1 + Okay: if a == 0:\n a = 1 + E111: a = 1 + E114: # a = 1 + + Okay: for item in items:\n pass + E112: for item in items:\npass + E115: for item in items:\n# Hi\n pass + + Okay: a = 1\nb = 2 + E113: a = 1\n b = 2 + E116: a = 1\n # b = 2 + """ + c = 0 if logical_line else 3 + tmpl = "E11%d %s" if logical_line else "E11%d %s (comment)" + if indent_level % 4: + yield 0, tmpl % (1 + c, "indentation is not a multiple of four") + indent_expect = previous_logical.endswith(':') + if indent_expect and indent_level <= previous_indent_level: + yield 0, tmpl % (2 + c, "expected an indented block") + elif not indent_expect and indent_level > previous_indent_level: + yield 0, tmpl % (3 + c, "unexpected indentation") + + +def continued_indentation(logical_line, tokens, indent_level, hang_closing, + indent_char, noqa, verbose): + r"""Continuation lines indentation. + + Continuation lines should align wrapped elements either vertically + using Python's implicit line joining inside parentheses, brackets + and braces, or using a hanging indent. + + When using a hanging indent these considerations should be applied: + - there should be no arguments on the first line, and + - further indentation should be used to clearly distinguish itself as a + continuation line. + + Okay: a = (\n) + E123: a = (\n ) + + Okay: a = (\n 42) + E121: a = (\n 42) + E122: a = (\n42) + E123: a = (\n 42\n ) + E124: a = (24,\n 42\n) + E125: if (\n b):\n pass + E126: a = (\n 42) + E127: a = (24,\n 42) + E128: a = (24,\n 42) + E129: if (a or\n b):\n pass + E131: a = (\n 42\n 24) + """ + first_row = tokens[0][2][0] + nrows = 1 + tokens[-1][2][0] - first_row + if noqa or nrows == 1: + return + + # indent_next tells us whether the next block is indented; assuming + # that it is indented by 4 spaces, then we should not allow 4-space + # indents on the final continuation line; in turn, some other + # indents are allowed to have an extra 4 spaces. + indent_next = logical_line.endswith(':') + + row = depth = 0 + valid_hangs = (4,) if indent_char != '\t' else (4, 8) + # remember how many brackets were opened on each line + parens = [0] * nrows + # relative indents of physical lines + rel_indent = [0] * nrows + # for each depth, collect a list of opening rows + open_rows = [[0]] + # for each depth, memorize the hanging indentation + hangs = [None] + # visual indents + indent_chances = {} + last_indent = tokens[0][2] + visual_indent = None + last_token_multiline = False + # for each depth, memorize the visual indent column + indent = [last_indent[1]] + if verbose >= 3: + print(">>> " + tokens[0][4].rstrip()) + + for token_type, text, start, end, line in tokens: + + newline = row < start[0] - first_row + if newline: + row = start[0] - first_row + newline = not last_token_multiline and token_type not in NEWLINE + + if newline: + # this is the beginning of a continuation line. + last_indent = start + if verbose >= 3: + print("... " + line.rstrip()) + + # record the initial indent. + rel_indent[row] = expand_indent(line) - indent_level + + # identify closing bracket + close_bracket = (token_type == tokenize.OP and text in ']})') + + # is the indent relative to an opening bracket line? + for open_row in reversed(open_rows[depth]): + hang = rel_indent[row] - rel_indent[open_row] + hanging_indent = hang in valid_hangs + if hanging_indent: + break + if hangs[depth]: + hanging_indent = (hang == hangs[depth]) + # is there any chance of visual indent? + visual_indent = (not close_bracket and hang > 0 and + indent_chances.get(start[1])) + + if close_bracket and indent[depth]: + # closing bracket for visual indent + if start[1] != indent[depth]: + yield (start, "E124 closing bracket does not match " + "visual indentation") + elif close_bracket and not hang: + # closing bracket matches indentation of opening bracket's line + if hang_closing: + yield start, "E133 closing bracket is missing indentation" + elif indent[depth] and start[1] < indent[depth]: + if visual_indent is not True: + # visual indent is broken + yield (start, "E128 continuation line " + "under-indented for visual indent") + elif hanging_indent or (indent_next and rel_indent[row] == 8): + # hanging indent is verified + if close_bracket and not hang_closing: + yield (start, "E123 closing bracket does not match " + "indentation of opening bracket's line") + hangs[depth] = hang + elif visual_indent is True: + # visual indent is verified + indent[depth] = start[1] + elif visual_indent in (text, str): + # ignore token lined up with matching one from a previous line + pass + else: + # indent is broken + if hang <= 0: + error = "E122", "missing indentation or outdented" + elif indent[depth]: + error = "E127", "over-indented for visual indent" + elif not close_bracket and hangs[depth]: + error = "E131", "unaligned for hanging indent" + else: + hangs[depth] = hang + if hang > 4: + error = "E126", "over-indented for hanging indent" + else: + error = "E121", "under-indented for hanging indent" + yield start, "%s continuation line %s" % error + + # look for visual indenting + if (parens[row] and + token_type not in (tokenize.NL, tokenize.COMMENT) and + not indent[depth]): + indent[depth] = start[1] + indent_chances[start[1]] = True + if verbose >= 4: + print("bracket depth %s indent to %s" % (depth, start[1])) + # deal with implicit string concatenation + elif (token_type in (tokenize.STRING, tokenize.COMMENT) or + text in ('u', 'ur', 'b', 'br')): + indent_chances[start[1]] = str + # special case for the "if" statement because len("if (") == 4 + elif not indent_chances and not row and not depth and text == 'if': + indent_chances[end[1] + 1] = True + elif text == ':' and line[end[1]:].isspace(): + open_rows[depth].append(row) + + # keep track of bracket depth + if token_type == tokenize.OP: + if text in '([{': + depth += 1 + indent.append(0) + hangs.append(None) + if len(open_rows) == depth: + open_rows.append([]) + open_rows[depth].append(row) + parens[row] += 1 + if verbose >= 4: + print("bracket depth %s seen, col %s, visual min = %s" % + (depth, start[1], indent[depth])) + elif text in ')]}' and depth > 0: + # parent indents should not be more than this one + prev_indent = indent.pop() or last_indent[1] + hangs.pop() + for d in range(depth): + if indent[d] > prev_indent: + indent[d] = 0 + for ind in list(indent_chances): + if ind >= prev_indent: + del indent_chances[ind] + del open_rows[depth + 1:] + depth -= 1 + if depth: + indent_chances[indent[depth]] = True + for idx in range(row, -1, -1): + if parens[idx]: + parens[idx] -= 1 + break + assert len(indent) == depth + 1 + if start[1] not in indent_chances: + # allow lining up tokens + indent_chances[start[1]] = text + + last_token_multiline = (start[0] != end[0]) + if last_token_multiline: + rel_indent[end[0] - first_row] = rel_indent[row] + + if indent_next and expand_indent(line) == indent_level + 4: + pos = (start[0], indent[0] + 4) + if visual_indent: + code = "E129 visually indented line" + else: + code = "E125 continuation line" + yield pos, "%s with same indent as next logical line" % code + + +def whitespace_before_parameters(logical_line, tokens): + r"""Avoid extraneous whitespace. + + Avoid extraneous whitespace in the following situations: + - before the open parenthesis that starts the argument list of a + function call. + - before the open parenthesis that starts an indexing or slicing. + + Okay: spam(1) + E211: spam (1) + + Okay: dict['key'] = list[index] + E211: dict ['key'] = list[index] + E211: dict['key'] = list [index] + """ + prev_type, prev_text, __, prev_end, __ = tokens[0] + for index in range(1, len(tokens)): + token_type, text, start, end, __ = tokens[index] + if (token_type == tokenize.OP and + text in '([' and + start != prev_end and + (prev_type == tokenize.NAME or prev_text in '}])') and + # Syntax "class A (B):" is allowed, but avoid it + (index < 2 or tokens[index - 2][1] != 'class') and + # Allow "return (a.foo for a in range(5))" + not keyword.iskeyword(prev_text)): + yield prev_end, "E211 whitespace before '%s'" % text + prev_type = token_type + prev_text = text + prev_end = end + + +def whitespace_around_operator(logical_line): + r"""Avoid extraneous whitespace around an operator. + + Okay: a = 12 + 3 + E221: a = 4 + 5 + E222: a = 4 + 5 + E223: a = 4\t+ 5 + E224: a = 4 +\t5 + """ + for match in OPERATOR_REGEX.finditer(logical_line): + before, after = match.groups() + + if '\t' in before: + yield match.start(1), "E223 tab before operator" + elif len(before) > 1: + yield match.start(1), "E221 multiple spaces before operator" + + if '\t' in after: + yield match.start(2), "E224 tab after operator" + elif len(after) > 1: + yield match.start(2), "E222 multiple spaces after operator" + + +def missing_whitespace_around_operator(logical_line, tokens): + r"""Surround operators with a single space on either side. + + - Always surround these binary operators with a single space on + either side: assignment (=), augmented assignment (+=, -= etc.), + comparisons (==, <, >, !=, <=, >=, in, not in, is, is not), + Booleans (and, or, not). + + - If operators with different priorities are used, consider adding + whitespace around the operators with the lowest priorities. + + Okay: i = i + 1 + Okay: submitted += 1 + Okay: x = x * 2 - 1 + Okay: hypot2 = x * x + y * y + Okay: c = (a + b) * (a - b) + Okay: foo(bar, key='word', *args, **kwargs) + Okay: alpha[:-i] + + E225: i=i+1 + E225: submitted +=1 + E225: x = x /2 - 1 + E225: z = x **y + E226: c = (a+b) * (a-b) + E226: hypot2 = x*x + y*y + E227: c = a|b + E228: msg = fmt%(errno, errmsg) + """ + parens = 0 + need_space = False + prev_type = tokenize.OP + prev_text = prev_end = None + for token_type, text, start, end, line in tokens: + if token_type in SKIP_COMMENTS: + continue + if text in ('(', 'lambda'): + parens += 1 + elif text == ')': + parens -= 1 + if need_space: + if start != prev_end: + # Found a (probably) needed space + if need_space is not True and not need_space[1]: + yield (need_space[0], + "E225 missing whitespace around operator") + need_space = False + elif text == '>' and prev_text in ('<', '-'): + # Tolerate the "<>" operator, even if running Python 3 + # Deal with Python 3's annotated return value "->" + pass + else: + if need_space is True or need_space[1]: + # A needed trailing space was not found + yield prev_end, "E225 missing whitespace around operator" + elif prev_text != '**': + code, optype = 'E226', 'arithmetic' + if prev_text == '%': + code, optype = 'E228', 'modulo' + elif prev_text not in ARITHMETIC_OP: + code, optype = 'E227', 'bitwise or shift' + yield (need_space[0], "%s missing whitespace " + "around %s operator" % (code, optype)) + need_space = False + elif token_type == tokenize.OP and prev_end is not None: + if text == '=' and parens: + # Allow keyword args or defaults: foo(bar=None). + pass + elif text in WS_NEEDED_OPERATORS: + need_space = True + elif text in UNARY_OPERATORS: + # Check if the operator is being used as a binary operator + # Allow unary operators: -123, -x, +1. + # Allow argument unpacking: foo(*args, **kwargs). + if (prev_text in '}])' if prev_type == tokenize.OP + else prev_text not in KEYWORDS): + need_space = None + elif text in WS_OPTIONAL_OPERATORS: + need_space = None + + if need_space is None: + # Surrounding space is optional, but ensure that + # trailing space matches opening space + need_space = (prev_end, start != prev_end) + elif need_space and start == prev_end: + # A needed opening space was not found + yield prev_end, "E225 missing whitespace around operator" + need_space = False + prev_type = token_type + prev_text = text + prev_end = end + + +def whitespace_around_comma(logical_line): + r"""Avoid extraneous whitespace after a comma or a colon. + + Note: these checks are disabled by default + + Okay: a = (1, 2) + E241: a = (1, 2) + E242: a = (1,\t2) + """ + line = logical_line + for m in WHITESPACE_AFTER_COMMA_REGEX.finditer(line): + found = m.start() + 1 + if '\t' in m.group(): + yield found, "E242 tab after '%s'" % m.group()[0] + else: + yield found, "E241 multiple spaces after '%s'" % m.group()[0] + + +def whitespace_around_named_parameter_equals(logical_line, tokens): + r"""Don't use spaces around the '=' sign in function arguments. + + Don't use spaces around the '=' sign when used to indicate a + keyword argument or a default parameter value. + + Okay: def complex(real, imag=0.0): + Okay: return magic(r=real, i=imag) + Okay: boolean(a == b) + Okay: boolean(a != b) + Okay: boolean(a <= b) + Okay: boolean(a >= b) + Okay: def foo(arg: int = 42): + Okay: async def foo(arg: int = 42): + + E251: def complex(real, imag = 0.0): + E251: return magic(r = real, i = imag) + """ + parens = 0 + no_space = False + prev_end = None + annotated_func_arg = False + in_def = bool(STARTSWITH_DEF_REGEX.match(logical_line)) + message = "E251 unexpected spaces around keyword / parameter equals" + for token_type, text, start, end, line in tokens: + if token_type == tokenize.NL: + continue + if no_space: + no_space = False + if start != prev_end: + yield (prev_end, message) + if token_type == tokenize.OP: + if text in '([': + parens += 1 + elif text in ')]': + parens -= 1 + elif in_def and text == ':' and parens == 1: + annotated_func_arg = True + elif parens and text == ',' and parens == 1: + annotated_func_arg = False + elif parens and text == '=' and not annotated_func_arg: + no_space = True + if start != prev_end: + yield (prev_end, message) + if not parens: + annotated_func_arg = False + + prev_end = end + + +def whitespace_before_comment(logical_line, tokens): + r"""Separate inline comments by at least two spaces. + + An inline comment is a comment on the same line as a statement. Inline + comments should be separated by at least two spaces from the statement. + They should start with a # and a single space. + + Each line of a block comment starts with a # and a single space + (unless it is indented text inside the comment). + + Okay: x = x + 1 # Increment x + Okay: x = x + 1 # Increment x + Okay: # Block comment + E261: x = x + 1 # Increment x + E262: x = x + 1 #Increment x + E262: x = x + 1 # Increment x + E265: #Block comment + E266: ### Block comment + """ + prev_end = (0, 0) + for token_type, text, start, end, line in tokens: + if token_type == tokenize.COMMENT: + inline_comment = line[:start[1]].strip() + if inline_comment: + if prev_end[0] == start[0] and start[1] < prev_end[1] + 2: + yield (prev_end, + "E261 at least two spaces before inline comment") + symbol, sp, comment = text.partition(' ') + bad_prefix = symbol not in '#:' and (symbol.lstrip('#')[:1] or '#') + if inline_comment: + if bad_prefix or comment[:1] in WHITESPACE: + yield start, "E262 inline comment should start with '# '" + elif bad_prefix and (bad_prefix != '!' or start[0] > 1): + if bad_prefix != '#': + yield start, "E265 block comment should start with '# '" + elif comment: + yield start, "E266 too many leading '#' for block comment" + elif token_type != tokenize.NL: + prev_end = end + + +def imports_on_separate_lines(logical_line): + r"""Place imports on separate lines. + + Okay: import os\nimport sys + E401: import sys, os + + Okay: from subprocess import Popen, PIPE + Okay: from myclas import MyClass + Okay: from foo.bar.yourclass import YourClass + Okay: import myclass + Okay: import foo.bar.yourclass + """ + line = logical_line + if line.startswith('import '): + found = line.find(',') + if -1 < found and ';' not in line[:found]: + yield found, "E401 multiple imports on one line" + + +def module_imports_on_top_of_file( + logical_line, indent_level, checker_state, noqa): + r"""Place imports at the top of the file. + + Always put imports at the top of the file, just after any module comments + and docstrings, and before module globals and constants. + + Okay: import os + Okay: # this is a comment\nimport os + Okay: '''this is a module docstring'''\nimport os + Okay: r'''this is a module docstring'''\nimport os + Okay: + try:\n\timport x\nexcept ImportError:\n\tpass\nelse:\n\tpass\nimport y + Okay: + try:\n\timport x\nexcept ImportError:\n\tpass\nfinally:\n\tpass\nimport y + E402: a=1\nimport os + E402: 'One string'\n"Two string"\nimport os + E402: a=1\nfrom sys import x + + Okay: if x:\n import os + """ + def is_string_literal(line): + if line[0] in 'uUbB': + line = line[1:] + if line and line[0] in 'rR': + line = line[1:] + return line and (line[0] == '"' or line[0] == "'") + + allowed_try_keywords = ('try', 'except', 'else', 'finally') + + if indent_level: # Allow imports in conditional statements or functions + return + if not logical_line: # Allow empty lines or comments + return + if noqa: + return + line = logical_line + if line.startswith('import ') or line.startswith('from '): + if checker_state.get('seen_non_imports', False): + yield 0, "E402 module level import not at top of file" + elif re.match(DUNDER_REGEX, line): + return + elif any(line.startswith(kw) for kw in allowed_try_keywords): + # Allow try, except, else, finally keywords intermixed with imports in + # order to support conditional importing + return + elif is_string_literal(line): + # The first literal is a docstring, allow it. Otherwise, report error. + if checker_state.get('seen_docstring', False): + checker_state['seen_non_imports'] = True + else: + checker_state['seen_docstring'] = True + else: + checker_state['seen_non_imports'] = True + + +def compound_statements(logical_line): + r"""Compound statements (on the same line) are generally discouraged. + + While sometimes it's okay to put an if/for/while with a small body + on the same line, never do this for multi-clause statements. + Also avoid folding such long lines! + + Always use a def statement instead of an assignment statement that + binds a lambda expression directly to a name. + + Okay: if foo == 'blah':\n do_blah_thing() + Okay: do_one() + Okay: do_two() + Okay: do_three() + + E701: if foo == 'blah': do_blah_thing() + E701: for x in lst: total += x + E701: while t < 10: t = delay() + E701: if foo == 'blah': do_blah_thing() + E701: else: do_non_blah_thing() + E701: try: something() + E701: finally: cleanup() + E701: if foo == 'blah': one(); two(); three() + E702: do_one(); do_two(); do_three() + E703: do_four(); # useless semicolon + E704: def f(x): return 2*x + E731: f = lambda x: 2*x + """ + line = logical_line + last_char = len(line) - 1 + found = line.find(':') + prev_found = 0 + counts = dict((char, 0) for char in '{}[]()') + while -1 < found < last_char: + update_counts(line[prev_found:found], counts) + if ((counts['{'] <= counts['}'] and # {'a': 1} (dict) + counts['['] <= counts[']'] and # [1:2] (slice) + counts['('] <= counts[')'])): # (annotation) + lambda_kw = LAMBDA_REGEX.search(line, 0, found) + if lambda_kw: + before = line[:lambda_kw.start()].rstrip() + if before[-1:] == '=' and isidentifier(before[:-1].strip()): + yield 0, ("E731 do not assign a lambda expression, use a " + "def") + break + if STARTSWITH_DEF_REGEX.match(line): + yield 0, "E704 multiple statements on one line (def)" + elif STARTSWITH_INDENT_STATEMENT_REGEX.match(line): + yield found, "E701 multiple statements on one line (colon)" + prev_found = found + found = line.find(':', found + 1) + found = line.find(';') + while -1 < found: + if found < last_char: + yield found, "E702 multiple statements on one line (semicolon)" + else: + yield found, "E703 statement ends with a semicolon" + found = line.find(';', found + 1) + + +def explicit_line_join(logical_line, tokens): + r"""Avoid explicit line join between brackets. + + The preferred way of wrapping long lines is by using Python's implied line + continuation inside parentheses, brackets and braces. Long lines can be + broken over multiple lines by wrapping expressions in parentheses. These + should be used in preference to using a backslash for line continuation. + + E502: aaa = [123, \\n 123] + E502: aaa = ("bbb " \\n "ccc") + + Okay: aaa = [123,\n 123] + Okay: aaa = ("bbb "\n "ccc") + Okay: aaa = "bbb " \\n "ccc" + Okay: aaa = 123 # \\ + """ + prev_start = prev_end = parens = 0 + comment = False + backslash = None + for token_type, text, start, end, line in tokens: + if token_type == tokenize.COMMENT: + comment = True + if start[0] != prev_start and parens and backslash and not comment: + yield backslash, "E502 the backslash is redundant between brackets" + if end[0] != prev_end: + if line.rstrip('\r\n').endswith('\\'): + backslash = (end[0], len(line.splitlines()[-1]) - 1) + else: + backslash = None + prev_start = prev_end = end[0] + else: + prev_start = start[0] + if token_type == tokenize.OP: + if text in '([{': + parens += 1 + elif text in ')]}': + parens -= 1 + + +def break_around_binary_operator(logical_line, tokens): + r""" + Avoid breaks before binary operators. + + The preferred place to break around a binary operator is after the + operator, not before it. + + W503: (width == 0\n + height == 0) + W503: (width == 0\n and height == 0) + + Okay: (width == 0 +\n height == 0) + Okay: foo(\n -x) + Okay: foo(x\n []) + Okay: x = '''\n''' + '' + Okay: foo(x,\n -y) + Okay: foo(x, # comment\n -y) + Okay: var = (1 &\n ~2) + Okay: var = (1 /\n -2) + Okay: var = (1 +\n -1 +\n -2) + """ + def is_binary_operator(token_type, text): + # The % character is strictly speaking a binary operator, but the + # common usage seems to be to put it next to the format parameters, + # after a line break. + return ((token_type == tokenize.OP or text in ['and', 'or']) and + text not in "()[]{},:.;@=%~") + + line_break = False + unary_context = True + # Previous non-newline token types and text + previous_token_type = None + previous_text = None + for token_type, text, start, end, line in tokens: + if token_type == tokenize.COMMENT: + continue + if ('\n' in text or '\r' in text) and token_type != tokenize.STRING: + line_break = True + else: + if (is_binary_operator(token_type, text) and line_break and + not unary_context and + not is_binary_operator(previous_token_type, + previous_text)): + yield start, "W503 line break before binary operator" + unary_context = text in '([{,;' + line_break = False + previous_token_type = token_type + previous_text = text + + +def comparison_to_singleton(logical_line, noqa): + r"""Comparison to singletons should use "is" or "is not". + + Comparisons to singletons like None should always be done + with "is" or "is not", never the equality operators. + + Okay: if arg is not None: + E711: if arg != None: + E711: if None == arg: + E712: if arg == True: + E712: if False == arg: + + Also, beware of writing if x when you really mean if x is not None -- + e.g. when testing whether a variable or argument that defaults to None was + set to some other value. The other value might have a type (such as a + container) that could be false in a boolean context! + """ + match = not noqa and COMPARE_SINGLETON_REGEX.search(logical_line) + if match: + singleton = match.group(1) or match.group(3) + same = (match.group(2) == '==') + + msg = "'if cond is %s:'" % (('' if same else 'not ') + singleton) + if singleton in ('None',): + code = 'E711' + else: + code = 'E712' + nonzero = ((singleton == 'True' and same) or + (singleton == 'False' and not same)) + msg += " or 'if %scond:'" % ('' if nonzero else 'not ') + yield match.start(2), ("%s comparison to %s should be %s" % + (code, singleton, msg)) + + +def comparison_negative(logical_line): + r"""Negative comparison should be done using "not in" and "is not". + + Okay: if x not in y:\n pass + Okay: assert (X in Y or X is Z) + Okay: if not (X in Y):\n pass + Okay: zz = x is not y + E713: Z = not X in Y + E713: if not X.B in Y:\n pass + E714: if not X is Y:\n pass + E714: Z = not X.B is Y + """ + match = COMPARE_NEGATIVE_REGEX.search(logical_line) + if match: + pos = match.start(1) + if match.group(2) == 'in': + yield pos, "E713 test for membership should be 'not in'" + else: + yield pos, "E714 test for object identity should be 'is not'" + + +def comparison_type(logical_line, noqa): + r"""Object type comparisons should always use isinstance(). + + Do not compare types directly. + + Okay: if isinstance(obj, int): + E721: if type(obj) is type(1): + + When checking if an object is a string, keep in mind that it might be a + unicode string too! In Python 2.3, str and unicode have a common base + class, basestring, so you can do: + + Okay: if isinstance(obj, basestring): + Okay: if type(a1) is type(b1): + """ + match = COMPARE_TYPE_REGEX.search(logical_line) + if match and not noqa: + inst = match.group(1) + if inst and isidentifier(inst) and inst not in SINGLETONS: + return # Allow comparison for types which are not obvious + yield match.start(), "E721 do not compare types, use 'isinstance()'" + + +def bare_except(logical_line, noqa): + r"""When catching exceptions, mention specific exceptions whenever possible. + + Okay: except Exception: + Okay: except BaseException: + E722: except: + """ + if noqa: + return + + regex = re.compile(r"except\s*:") + match = regex.match(logical_line) + if match: + yield match.start(), "E722 do not use bare except'" + + +def ambiguous_identifier(logical_line, tokens): + r"""Never use the characters 'l', 'O', or 'I' as variable names. + + In some fonts, these characters are indistinguishable from the numerals + one and zero. When tempted to use 'l', use 'L' instead. + + Okay: L = 0 + Okay: o = 123 + Okay: i = 42 + E741: l = 0 + E741: O = 123 + E741: I = 42 + + Variables can be bound in several other contexts, including class and + function definitions, 'global' and 'nonlocal' statements, exception + handlers, and 'with' statements. + + Okay: except AttributeError as o: + Okay: with lock as L: + E741: except AttributeError as O: + E741: with lock as l: + E741: global I + E741: nonlocal l + E742: class I(object): + E743: def l(x): + """ + idents_to_avoid = ('l', 'O', 'I') + prev_type, prev_text, prev_start, prev_end, __ = tokens[0] + for token_type, text, start, end, line in tokens[1:]: + ident = pos = None + # identifiers on the lhs of an assignment operator + if token_type == tokenize.OP and '=' in text: + if prev_text in idents_to_avoid: + ident = prev_text + pos = prev_start + # identifiers bound to a value with 'as', 'global', or 'nonlocal' + if prev_text in ('as', 'global', 'nonlocal'): + if text in idents_to_avoid: + ident = text + pos = start + if prev_text == 'class': + if text in idents_to_avoid: + yield start, "E742 ambiguous class definition '%s'" % text + if prev_text == 'def': + if text in idents_to_avoid: + yield start, "E743 ambiguous function definition '%s'" % text + if ident: + yield pos, "E741 ambiguous variable name '%s'" % ident + prev_text = text + prev_start = start + + +def python_3000_has_key(logical_line, noqa): + r"""The {}.has_key() method is removed in Python 3: use the 'in' operator. + + Okay: if "alph" in d:\n print d["alph"] + W601: assert d.has_key('alph') + """ + pos = logical_line.find('.has_key(') + if pos > -1 and not noqa: + yield pos, "W601 .has_key() is deprecated, use 'in'" + + +def python_3000_raise_comma(logical_line): + r"""When raising an exception, use "raise ValueError('message')". + + The older form is removed in Python 3. + + Okay: raise DummyError("Message") + W602: raise DummyError, "Message" + """ + match = RAISE_COMMA_REGEX.match(logical_line) + if match and not RERAISE_COMMA_REGEX.match(logical_line): + yield match.end() - 1, "W602 deprecated form of raising exception" + + +def python_3000_not_equal(logical_line): + r"""New code should always use != instead of <>. + + The older syntax is removed in Python 3. + + Okay: if a != 'no': + W603: if a <> 'no': + """ + pos = logical_line.find('<>') + if pos > -1: + yield pos, "W603 '<>' is deprecated, use '!='" + + +def python_3000_backticks(logical_line): + r"""Use repr() instead of backticks in Python 3. + + Okay: val = repr(1 + 2) + W604: val = `1 + 2` + """ + pos = logical_line.find('`') + if pos > -1: + yield pos, "W604 backticks are deprecated, use 'repr()'" + + +############################################################################## +# Helper functions +############################################################################## + + +if sys.version_info < (3,): + # Python 2: implicit encoding. + def readlines(filename): + """Read the source code.""" + with open(filename, 'rU') as f: + return f.readlines() + isidentifier = re.compile(r'[a-zA-Z_]\w*$').match + stdin_get_value = sys.stdin.read +else: + # Python 3 + def readlines(filename): + """Read the source code.""" + try: + with open(filename, 'rb') as f: + (coding, lines) = tokenize.detect_encoding(f.readline) + f = TextIOWrapper(f, coding, line_buffering=True) + return [line.decode(coding) for line in lines] + f.readlines() + except (LookupError, SyntaxError, UnicodeError): + # Fall back if file encoding is improperly declared + with open(filename, encoding='latin-1') as f: + return f.readlines() + isidentifier = str.isidentifier + + stdin_get_value = sys.stdin.read + +noqa = re.compile(r'# no(?:qa|pep8)\b', re.I).search + + +def expand_indent(line): + r"""Return the amount of indentation. + + Tabs are expanded to the next multiple of 8. + + >>> expand_indent(' ') + 4 + >>> expand_indent('\t') + 8 + >>> expand_indent(' \t') + 8 + >>> expand_indent(' \t') + 16 + """ + if '\t' not in line: + return len(line) - len(line.lstrip()) + result = 0 + for char in line: + if char == '\t': + result = result // 8 * 8 + 8 + elif char == ' ': + result += 1 + else: + break + return result + + +def mute_string(text): + """Replace contents with 'xxx' to prevent syntax matching. + + >>> mute_string('"abc"') + '"xxx"' + >>> mute_string("'''abc'''") + "'''xxx'''" + >>> mute_string("r'abc'") + "r'xxx'" + """ + # String modifiers (e.g. u or r) + start = text.index(text[-1]) + 1 + end = len(text) - 1 + # Triple quotes + if text[-3:] in ('"""', "'''"): + start += 2 + end -= 2 + return text[:start] + 'x' * (end - start) + text[end:] + + +def parse_udiff(diff, patterns=None, parent='.'): + """Return a dictionary of matching lines.""" + # For each file of the diff, the entry key is the filename, + # and the value is a set of row numbers to consider. + rv = {} + path = nrows = None + for line in diff.splitlines(): + if nrows: + if line[:1] != '-': + nrows -= 1 + continue + if line[:3] == '@@ ': + hunk_match = HUNK_REGEX.match(line) + (row, nrows) = [int(g or '1') for g in hunk_match.groups()] + rv[path].update(range(row, row + nrows)) + elif line[:3] == '+++': + path = line[4:].split('\t', 1)[0] + if path[:2] == 'b/': + path = path[2:] + rv[path] = set() + return dict([(os.path.join(parent, path), rows) + for (path, rows) in rv.items() + if rows and filename_match(path, patterns)]) + + +def normalize_paths(value, parent=os.curdir): + """Parse a comma-separated list of paths. + + Return a list of absolute paths. + """ + if not value: + return [] + if isinstance(value, list): + return value + paths = [] + for path in value.split(','): + path = path.strip() + if '/' in path: + path = os.path.abspath(os.path.join(parent, path)) + paths.append(path.rstrip('/')) + return paths + + +def filename_match(filename, patterns, default=True): + """Check if patterns contains a pattern that matches filename. + + If patterns is unspecified, this always returns True. + """ + if not patterns: + return default + return any(fnmatch(filename, pattern) for pattern in patterns) + + +def update_counts(s, counts): + r"""Adds one to the counts of each appearance of characters in s, + for characters in counts""" + for char in s: + if char in counts: + counts[char] += 1 + + +def _is_eol_token(token): + return token[0] in NEWLINE or token[4][token[3][1]:].lstrip() == '\\\n' + + +if COMMENT_WITH_NL: + def _is_eol_token(token, _eol_token=_is_eol_token): + return _eol_token(token) or (token[0] == tokenize.COMMENT and + token[1] == token[4]) + +############################################################################## +# Framework to run all checks +############################################################################## + + +_checks = {'physical_line': {}, 'logical_line': {}, 'tree': {}} + + +def _get_parameters(function): + if sys.version_info >= (3, 3): + return [parameter.name + for parameter + in inspect.signature(function).parameters.values() + if parameter.kind == parameter.POSITIONAL_OR_KEYWORD] + else: + return inspect.getargspec(function)[0] + + +def register_check(check, codes=None): + """Register a new check object.""" + def _add_check(check, kind, codes, args): + if check in _checks[kind]: + _checks[kind][check][0].extend(codes or []) + else: + _checks[kind][check] = (codes or [''], args) + if inspect.isfunction(check): + args = _get_parameters(check) + if args and args[0] in ('physical_line', 'logical_line'): + if codes is None: + codes = ERRORCODE_REGEX.findall(check.__doc__ or '') + _add_check(check, args[0], codes, args) + elif inspect.isclass(check): + if _get_parameters(check.__init__)[:2] == ['self', 'tree']: + _add_check(check, 'tree', codes, None) + + +def init_checks_registry(): + """Register all globally visible functions. + + The first argument name is either 'physical_line' or 'logical_line'. + """ + mod = inspect.getmodule(register_check) + for (name, function) in inspect.getmembers(mod, inspect.isfunction): + register_check(function) + + +init_checks_registry() + + +class Checker(object): + """Load a Python source file, tokenize it, check coding style.""" + + def __init__(self, filename=None, lines=None, + options=None, report=None, **kwargs): + if options is None: + options = StyleGuide(kwargs).options + else: + assert not kwargs + self._io_error = None + self._physical_checks = options.physical_checks + self._logical_checks = options.logical_checks + self._ast_checks = options.ast_checks + self.max_line_length = options.max_line_length + self.multiline = False # in a multiline string? + self.hang_closing = options.hang_closing + self.verbose = options.verbose + self.filename = filename + # Dictionary where a checker can store its custom state. + self._checker_states = {} + if filename is None: + self.filename = 'stdin' + self.lines = lines or [] + elif filename == '-': + self.filename = 'stdin' + self.lines = stdin_get_value().splitlines(True) + elif lines is None: + try: + self.lines = readlines(filename) + except IOError: + (exc_type, exc) = sys.exc_info()[:2] + self._io_error = '%s: %s' % (exc_type.__name__, exc) + self.lines = [] + else: + self.lines = lines + if self.lines: + ord0 = ord(self.lines[0][0]) + if ord0 in (0xef, 0xfeff): # Strip the UTF-8 BOM + if ord0 == 0xfeff: + self.lines[0] = self.lines[0][1:] + elif self.lines[0][:3] == '\xef\xbb\xbf': + self.lines[0] = self.lines[0][3:] + self.report = report or options.report + self.report_error = self.report.error + self.noqa = False + + def report_invalid_syntax(self): + """Check if the syntax is valid.""" + (exc_type, exc) = sys.exc_info()[:2] + if len(exc.args) > 1: + offset = exc.args[1] + if len(offset) > 2: + offset = offset[1:3] + else: + offset = (1, 0) + self.report_error(offset[0], offset[1] or 0, + 'E901 %s: %s' % (exc_type.__name__, exc.args[0]), + self.report_invalid_syntax) + + def readline(self): + """Get the next line from the input buffer.""" + if self.line_number >= self.total_lines: + return '' + line = self.lines[self.line_number] + self.line_number += 1 + if self.indent_char is None and line[:1] in WHITESPACE: + self.indent_char = line[0] + return line + + def run_check(self, check, argument_names): + """Run a check plugin.""" + arguments = [] + for name in argument_names: + arguments.append(getattr(self, name)) + return check(*arguments) + + def init_checker_state(self, name, argument_names): + """Prepare custom state for the specific checker plugin.""" + if 'checker_state' in argument_names: + self.checker_state = self._checker_states.setdefault(name, {}) + + def check_physical(self, line): + """Run all physical checks on a raw input line.""" + self.physical_line = line + for name, check, argument_names in self._physical_checks: + self.init_checker_state(name, argument_names) + result = self.run_check(check, argument_names) + if result is not None: + (offset, text) = result + self.report_error(self.line_number, offset, text, check) + if text[:4] == 'E101': + self.indent_char = line[0] + + def build_tokens_line(self): + """Build a logical line from tokens.""" + logical = [] + comments = [] + length = 0 + prev_row = prev_col = mapping = None + for token_type, text, start, end, line in self.tokens: + if token_type in SKIP_TOKENS: + continue + if not mapping: + mapping = [(0, start)] + if token_type == tokenize.COMMENT: + comments.append(text) + continue + if token_type == tokenize.STRING: + text = mute_string(text) + if prev_row: + (start_row, start_col) = start + if prev_row != start_row: # different row + prev_text = self.lines[prev_row - 1][prev_col - 1] + if prev_text == ',' or (prev_text not in '{[(' and + text not in '}])'): + text = ' ' + text + elif prev_col != start_col: # different column + text = line[prev_col:start_col] + text + logical.append(text) + length += len(text) + mapping.append((length, end)) + (prev_row, prev_col) = end + self.logical_line = ''.join(logical) + self.noqa = comments and noqa(''.join(comments)) + return mapping + + def check_logical(self): + """Build a line from tokens and run all logical checks on it.""" + self.report.increment_logical_line() + mapping = self.build_tokens_line() + + if not mapping: + return + + (start_row, start_col) = mapping[0][1] + start_line = self.lines[start_row - 1] + self.indent_level = expand_indent(start_line[:start_col]) + if self.blank_before < self.blank_lines: + self.blank_before = self.blank_lines + if self.verbose >= 2: + print(self.logical_line[:80].rstrip()) + for name, check, argument_names in self._logical_checks: + if self.verbose >= 4: + print(' ' + name) + self.init_checker_state(name, argument_names) + for offset, text in self.run_check(check, argument_names) or (): + if not isinstance(offset, tuple): + for token_offset, pos in mapping: + if offset <= token_offset: + break + offset = (pos[0], pos[1] + offset - token_offset) + self.report_error(offset[0], offset[1], text, check) + if self.logical_line: + self.previous_indent_level = self.indent_level + self.previous_logical = self.logical_line + if not self.indent_level: + self.previous_unindented_logical_line = self.logical_line + self.blank_lines = 0 + self.tokens = [] + + def check_ast(self): + """Build the file's AST and run all AST checks.""" + try: + tree = compile(''.join(self.lines), '', 'exec', PyCF_ONLY_AST) + except (ValueError, SyntaxError, TypeError): + return self.report_invalid_syntax() + for name, cls, __ in self._ast_checks: + checker = cls(tree, self.filename) + for lineno, offset, text, check in checker.run(): + if not self.lines or not noqa(self.lines[lineno - 1]): + self.report_error(lineno, offset, text, check) + + def generate_tokens(self): + """Tokenize the file, run physical line checks and yield tokens.""" + if self._io_error: + self.report_error(1, 0, 'E902 %s' % self._io_error, readlines) + tokengen = tokenize.generate_tokens(self.readline) + try: + for token in tokengen: + if token[2][0] > self.total_lines: + return + self.noqa = token[4] and noqa(token[4]) + self.maybe_check_physical(token) + yield token + except (SyntaxError, tokenize.TokenError): + self.report_invalid_syntax() + + def maybe_check_physical(self, token): + """If appropriate (based on token), check current physical line(s).""" + # Called after every token, but act only on end of line. + if _is_eol_token(token): + # Obviously, a newline token ends a single physical line. + self.check_physical(token[4]) + elif token[0] == tokenize.STRING and '\n' in token[1]: + # Less obviously, a string that contains newlines is a + # multiline string, either triple-quoted or with internal + # newlines backslash-escaped. Check every physical line in the + # string *except* for the last one: its newline is outside of + # the multiline string, so we consider it a regular physical + # line, and will check it like any other physical line. + # + # Subtleties: + # - we don't *completely* ignore the last line; if it contains + # the magical "# noqa" comment, we disable all physical + # checks for the entire multiline string + # - have to wind self.line_number back because initially it + # points to the last line of the string, and we want + # check_physical() to give accurate feedback + if noqa(token[4]): + return + self.multiline = True + self.line_number = token[2][0] + for line in token[1].split('\n')[:-1]: + self.check_physical(line + '\n') + self.line_number += 1 + self.multiline = False + + def check_all(self, expected=None, line_offset=0): + """Run all checks on the input file.""" + self.report.init_file(self.filename, self.lines, expected, line_offset) + self.total_lines = len(self.lines) + if self._ast_checks: + self.check_ast() + self.line_number = 0 + self.indent_char = None + self.indent_level = self.previous_indent_level = 0 + self.previous_logical = '' + self.previous_unindented_logical_line = '' + self.tokens = [] + self.blank_lines = self.blank_before = 0 + parens = 0 + for token in self.generate_tokens(): + self.tokens.append(token) + token_type, text = token[0:2] + if self.verbose >= 3: + if token[2][0] == token[3][0]: + pos = '[%s:%s]' % (token[2][1] or '', token[3][1]) + else: + pos = 'l.%s' % token[3][0] + print('l.%s\t%s\t%s\t%r' % + (token[2][0], pos, tokenize.tok_name[token[0]], text)) + if token_type == tokenize.OP: + if text in '([{': + parens += 1 + elif text in '}])': + parens -= 1 + elif not parens: + if token_type in NEWLINE: + if token_type == tokenize.NEWLINE: + self.check_logical() + self.blank_before = 0 + elif len(self.tokens) == 1: + # The physical line contains only this token. + self.blank_lines += 1 + del self.tokens[0] + else: + self.check_logical() + elif COMMENT_WITH_NL and token_type == tokenize.COMMENT: + if len(self.tokens) == 1: + # The comment also ends a physical line + token = list(token) + token[1] = text.rstrip('\r\n') + token[3] = (token[2][0], token[2][1] + len(token[1])) + self.tokens = [tuple(token)] + self.check_logical() + if self.tokens: + self.check_physical(self.lines[-1]) + self.check_logical() + return self.report.get_file_results() + + +class BaseReport(object): + """Collect the results of the checks.""" + + print_filename = False + + def __init__(self, options): + self._benchmark_keys = options.benchmark_keys + self._ignore_code = options.ignore_code + # Results + self.elapsed = 0 + self.total_errors = 0 + self.counters = dict.fromkeys(self._benchmark_keys, 0) + self.messages = {} + + def start(self): + """Start the timer.""" + self._start_time = time.time() + + def stop(self): + """Stop the timer.""" + self.elapsed = time.time() - self._start_time + + def init_file(self, filename, lines, expected, line_offset): + """Signal a new file.""" + self.filename = filename + self.lines = lines + self.expected = expected or () + self.line_offset = line_offset + self.file_errors = 0 + self.counters['files'] += 1 + self.counters['physical lines'] += len(lines) + + def increment_logical_line(self): + """Signal a new logical line.""" + self.counters['logical lines'] += 1 + + def error(self, line_number, offset, text, check): + """Report an error, according to options.""" + code = text[:4] + if self._ignore_code(code): + return + if code in self.counters: + self.counters[code] += 1 + else: + self.counters[code] = 1 + self.messages[code] = text[5:] + # Don't care about expected errors or warnings + if code in self.expected: + return + if self.print_filename and not self.file_errors: + print(self.filename) + self.file_errors += 1 + self.total_errors += 1 + return code + + def get_file_results(self): + """Return the count of errors and warnings for this file.""" + return self.file_errors + + def get_count(self, prefix=''): + """Return the total count of errors and warnings.""" + return sum([self.counters[key] + for key in self.messages if key.startswith(prefix)]) + + def get_statistics(self, prefix=''): + """Get statistics for message codes that start with the prefix. + + prefix='' matches all errors and warnings + prefix='E' matches all errors + prefix='W' matches all warnings + prefix='E4' matches all errors that have to do with imports + """ + return ['%-7s %s %s' % (self.counters[key], key, self.messages[key]) + for key in sorted(self.messages) if key.startswith(prefix)] + + def print_statistics(self, prefix=''): + """Print overall statistics (number of errors and warnings).""" + for line in self.get_statistics(prefix): + print(line) + + def print_benchmark(self): + """Print benchmark numbers.""" + print('%-7.2f %s' % (self.elapsed, 'seconds elapsed')) + if self.elapsed: + for key in self._benchmark_keys: + print('%-7d %s per second (%d total)' % + (self.counters[key] / self.elapsed, key, + self.counters[key])) + + +class FileReport(BaseReport): + """Collect the results of the checks and print only the filenames.""" + + print_filename = True + + +class StandardReport(BaseReport): + """Collect and print the results of the checks.""" + + def __init__(self, options): + super(StandardReport, self).__init__(options) + self._fmt = REPORT_FORMAT.get(options.format.lower(), + options.format) + self._repeat = options.repeat + self._show_source = options.show_source + self._show_pep8 = options.show_pep8 + + def init_file(self, filename, lines, expected, line_offset): + """Signal a new file.""" + self._deferred_print = [] + return super(StandardReport, self).init_file( + filename, lines, expected, line_offset) + + def error(self, line_number, offset, text, check): + """Report an error, according to options.""" + code = super(StandardReport, self).error(line_number, offset, + text, check) + if code and (self.counters[code] == 1 or self._repeat): + self._deferred_print.append( + (line_number, offset, code, text[5:], check.__doc__)) + return code + + def get_file_results(self): + """Print the result and return the overall count for this file.""" + self._deferred_print.sort() + for line_number, offset, code, text, doc in self._deferred_print: + print(self._fmt % { + 'path': self.filename, + 'row': self.line_offset + line_number, 'col': offset + 1, + 'code': code, 'text': text, + }) + if self._show_source: + if line_number > len(self.lines): + line = '' + else: + line = self.lines[line_number - 1] + print(line.rstrip()) + print(re.sub(r'\S', ' ', line[:offset]) + '^') + if self._show_pep8 and doc: + print(' ' + doc.strip()) + + # stdout is block buffered when not stdout.isatty(). + # line can be broken where buffer boundary since other processes + # write to same file. + # flush() after print() to avoid buffer boundary. + # Typical buffer size is 8192. line written safely when + # len(line) < 8192. + sys.stdout.flush() + return self.file_errors + + +class DiffReport(StandardReport): + """Collect and print the results for the changed lines only.""" + + def __init__(self, options): + super(DiffReport, self).__init__(options) + self._selected = options.selected_lines + + def error(self, line_number, offset, text, check): + if line_number not in self._selected[self.filename]: + return + return super(DiffReport, self).error(line_number, offset, text, check) + + +class StyleGuide(object): + """Initialize a PEP-8 instance with few options.""" + + def __init__(self, *args, **kwargs): + # build options from the command line + self.checker_class = kwargs.pop('checker_class', Checker) + parse_argv = kwargs.pop('parse_argv', False) + config_file = kwargs.pop('config_file', False) + parser = kwargs.pop('parser', None) + # build options from dict + options_dict = dict(*args, **kwargs) + arglist = None if parse_argv else options_dict.get('paths', None) + options, self.paths = process_options( + arglist, parse_argv, config_file, parser) + if options_dict: + options.__dict__.update(options_dict) + if 'paths' in options_dict: + self.paths = options_dict['paths'] + + self.runner = self.input_file + self.options = options + + if not options.reporter: + options.reporter = BaseReport if options.quiet else StandardReport + + options.select = tuple(options.select or ()) + if not (options.select or options.ignore or + options.testsuite or options.doctest) and DEFAULT_IGNORE: + # The default choice: ignore controversial checks + options.ignore = tuple(DEFAULT_IGNORE.split(',')) + else: + # Ignore all checks which are not explicitly selected + options.ignore = ('',) if options.select else tuple(options.ignore) + options.benchmark_keys = BENCHMARK_KEYS[:] + options.ignore_code = self.ignore_code + options.physical_checks = self.get_checks('physical_line') + options.logical_checks = self.get_checks('logical_line') + options.ast_checks = self.get_checks('tree') + self.init_report() + + def init_report(self, reporter=None): + """Initialize the report instance.""" + self.options.report = (reporter or self.options.reporter)(self.options) + return self.options.report + + def check_files(self, paths=None): + """Run all checks on the paths.""" + if paths is None: + paths = self.paths + report = self.options.report + runner = self.runner + report.start() + try: + for path in paths: + if os.path.isdir(path): + self.input_dir(path) + elif not self.excluded(path): + runner(path) + except KeyboardInterrupt: + print('... stopped') + report.stop() + return report + + def input_file(self, filename, lines=None, expected=None, line_offset=0): + """Run all checks on a Python source file.""" + if self.options.verbose: + print('checking %s' % filename) + fchecker = self.checker_class( + filename, lines=lines, options=self.options) + return fchecker.check_all(expected=expected, line_offset=line_offset) + + def input_dir(self, dirname): + """Check all files in this directory and all subdirectories.""" + dirname = dirname.rstrip('/') + if self.excluded(dirname): + return 0 + counters = self.options.report.counters + verbose = self.options.verbose + filepatterns = self.options.filename + runner = self.runner + for root, dirs, files in os.walk(dirname): + if verbose: + print('directory ' + root) + counters['directories'] += 1 + for subdir in sorted(dirs): + if self.excluded(subdir, root): + dirs.remove(subdir) + for filename in sorted(files): + # contain a pattern that matches? + if ((filename_match(filename, filepatterns) and + not self.excluded(filename, root))): + runner(os.path.join(root, filename)) + + def excluded(self, filename, parent=None): + """Check if the file should be excluded. + + Check if 'options.exclude' contains a pattern that matches filename. + """ + if not self.options.exclude: + return False + basename = os.path.basename(filename) + if filename_match(basename, self.options.exclude): + return True + if parent: + filename = os.path.join(parent, filename) + filename = os.path.abspath(filename) + return filename_match(filename, self.options.exclude) + + def ignore_code(self, code): + """Check if the error code should be ignored. + + If 'options.select' contains a prefix of the error code, + return False. Else, if 'options.ignore' contains a prefix of + the error code, return True. + """ + if len(code) < 4 and any(s.startswith(code) + for s in self.options.select): + return False + return (code.startswith(self.options.ignore) and + not code.startswith(self.options.select)) + + def get_checks(self, argument_name): + """Get all the checks for this category. + + Find all globally visible functions where the first argument name + starts with argument_name and which contain selected tests. + """ + checks = [] + for check, attrs in _checks[argument_name].items(): + (codes, args) = attrs + if any(not (code and self.ignore_code(code)) for code in codes): + checks.append((check.__name__, check, args)) + return sorted(checks) + + +def get_parser(prog='pycodestyle', version=__version__): + """Create the parser for the program.""" + parser = OptionParser(prog=prog, version=version, + usage="%prog [options] input ...") + parser.config_options = [ + 'exclude', 'filename', 'select', 'ignore', 'max-line-length', + 'hang-closing', 'count', 'format', 'quiet', 'show-pep8', + 'show-source', 'statistics', 'verbose'] + parser.add_option('-v', '--verbose', default=0, action='count', + help="print status messages, or debug with -vv") + parser.add_option('-q', '--quiet', default=0, action='count', + help="report only file names, or nothing with -qq") + parser.add_option('-r', '--repeat', default=True, action='store_true', + help="(obsolete) show all occurrences of the same error") + parser.add_option('--first', action='store_false', dest='repeat', + help="show first occurrence of each error") + parser.add_option('--exclude', metavar='patterns', default=DEFAULT_EXCLUDE, + help="exclude files or directories which match these " + "comma separated patterns (default: %default)") + parser.add_option('--filename', metavar='patterns', default='*.py', + help="when parsing directories, only check filenames " + "matching these comma separated patterns " + "(default: %default)") + parser.add_option('--select', metavar='errors', default='', + help="select errors and warnings (e.g. E,W6)") + parser.add_option('--ignore', metavar='errors', default='', + help="skip errors and warnings (e.g. E4,W) " + "(default: %s)" % DEFAULT_IGNORE) + parser.add_option('--show-source', action='store_true', + help="show source code for each error") + parser.add_option('--show-pep8', action='store_true', + help="show text of PEP 8 for each error " + "(implies --first)") + parser.add_option('--statistics', action='store_true', + help="count errors and warnings") + parser.add_option('--count', action='store_true', + help="print total number of errors and warnings " + "to standard error and set exit code to 1 if " + "total is not null") + parser.add_option('--max-line-length', type='int', metavar='n', + default=MAX_LINE_LENGTH, + help="set maximum allowed line length " + "(default: %default)") + parser.add_option('--hang-closing', action='store_true', + help="hang closing bracket instead of matching " + "indentation of opening bracket's line") + parser.add_option('--format', metavar='format', default='default', + help="set the error format [default|pylint|]") + parser.add_option('--diff', action='store_true', + help="report changes only within line number ranges in " + "the unified diff received on STDIN") + group = parser.add_option_group("Testing Options") + if os.path.exists(TESTSUITE_PATH): + group.add_option('--testsuite', metavar='dir', + help="run regression tests from dir") + group.add_option('--doctest', action='store_true', + help="run doctest on myself") + group.add_option('--benchmark', action='store_true', + help="measure processing speed") + return parser + + +def read_config(options, args, arglist, parser): + """Read and parse configurations. + + If a config file is specified on the command line with the "--config" + option, then only it is used for configuration. + + Otherwise, the user configuration (~/.config/pycodestyle) and any local + configurations in the current directory or above will be merged together + (in that order) using the read method of ConfigParser. + """ + config = RawConfigParser() + + cli_conf = options.config + + local_dir = os.curdir + + if USER_CONFIG and os.path.isfile(USER_CONFIG): + if options.verbose: + print('user configuration: %s' % USER_CONFIG) + config.read(USER_CONFIG) + + parent = tail = args and os.path.abspath(os.path.commonprefix(args)) + while tail: + if config.read(os.path.join(parent, fn) for fn in PROJECT_CONFIG): + local_dir = parent + if options.verbose: + print('local configuration: in %s' % parent) + break + (parent, tail) = os.path.split(parent) + + if cli_conf and os.path.isfile(cli_conf): + if options.verbose: + print('cli configuration: %s' % cli_conf) + config.read(cli_conf) + + pycodestyle_section = None + if config.has_section(parser.prog): + pycodestyle_section = parser.prog + elif config.has_section('pep8'): + pycodestyle_section = 'pep8' # Deprecated + warnings.warn('[pep8] section is deprecated. Use [pycodestyle].') + + if pycodestyle_section: + option_list = dict([(o.dest, o.type or o.action) + for o in parser.option_list]) + + # First, read the default values + (new_options, __) = parser.parse_args([]) + + # Second, parse the configuration + for opt in config.options(pycodestyle_section): + if opt.replace('_', '-') not in parser.config_options: + print(" unknown option '%s' ignored" % opt) + continue + if options.verbose > 1: + print(" %s = %s" % (opt, + config.get(pycodestyle_section, opt))) + normalized_opt = opt.replace('-', '_') + opt_type = option_list[normalized_opt] + if opt_type in ('int', 'count'): + value = config.getint(pycodestyle_section, opt) + elif opt_type in ('store_true', 'store_false'): + value = config.getboolean(pycodestyle_section, opt) + else: + value = config.get(pycodestyle_section, opt) + if normalized_opt == 'exclude': + value = normalize_paths(value, local_dir) + setattr(new_options, normalized_opt, value) + + # Third, overwrite with the command-line options + (options, __) = parser.parse_args(arglist, values=new_options) + options.doctest = options.testsuite = False + return options + + +def process_options(arglist=None, parse_argv=False, config_file=None, + parser=None): + """Process options passed either via arglist or via command line args. + + Passing in the ``config_file`` parameter allows other tools, such as flake8 + to specify their own options to be processed in pycodestyle. + """ + if not parser: + parser = get_parser() + if not parser.has_option('--config'): + group = parser.add_option_group("Configuration", description=( + "The project options are read from the [%s] section of the " + "tox.ini file or the setup.cfg file located in any parent folder " + "of the path(s) being processed. Allowed options are: %s." % + (parser.prog, ', '.join(parser.config_options)))) + group.add_option('--config', metavar='path', default=config_file, + help="user config file location") + # Don't read the command line if the module is used as a library. + if not arglist and not parse_argv: + arglist = [] + # If parse_argv is True and arglist is None, arguments are + # parsed from the command line (sys.argv) + (options, args) = parser.parse_args(arglist) + options.reporter = None + + if options.ensure_value('testsuite', False): + args.append(options.testsuite) + elif not options.ensure_value('doctest', False): + if parse_argv and not args: + if options.diff or any(os.path.exists(name) + for name in PROJECT_CONFIG): + args = ['.'] + else: + parser.error('input not specified') + options = read_config(options, args, arglist, parser) + options.reporter = parse_argv and options.quiet == 1 and FileReport + + options.filename = _parse_multi_options(options.filename) + options.exclude = normalize_paths(options.exclude) + options.select = _parse_multi_options(options.select) + options.ignore = _parse_multi_options(options.ignore) + + if options.diff: + options.reporter = DiffReport + stdin = stdin_get_value() + options.selected_lines = parse_udiff(stdin, options.filename, args[0]) + args = sorted(options.selected_lines) + + return options, args + + +def _parse_multi_options(options, split_token=','): + r"""Split and strip and discard empties. + + Turns the following: + + A, + B, + + into ["A", "B"] + """ + if options: + return [o.strip() for o in options.split(split_token) if o.strip()] + else: + return options + + +def _main(): + """Parse options and run checks on Python source.""" + import signal + + # Handle "Broken pipe" gracefully + try: + signal.signal(signal.SIGPIPE, lambda signum, frame: sys.exit(1)) + except AttributeError: + pass # not supported on Windows + + style_guide = StyleGuide(parse_argv=True) + options = style_guide.options + + if options.doctest or options.testsuite: + from testsuite.support import run_tests + report = run_tests(style_guide) + else: + report = style_guide.check_files() + + if options.statistics: + report.print_statistics() + + if options.benchmark: + report.print_benchmark() + + if options.testsuite and not options.quiet: + report.print_results() + + if report.total_errors: + if options.count: + sys.stderr.write(str(report.total_errors) + '\n') + sys.exit(1) + + +if __name__ == '__main__': + _main() diff --git a/ptvsd/pydevd/third_party/wrapped_for_pydev/ctypes/__init__.py b/ptvsd/pydevd/third_party/wrapped_for_pydev/ctypes/__init__.py new file mode 100644 index 00000000..3b927b4d --- /dev/null +++ b/ptvsd/pydevd/third_party/wrapped_for_pydev/ctypes/__init__.py @@ -0,0 +1,518 @@ +#@PydevCodeAnalysisIgnore +"""create and manipulate C data types in Python""" + +import os as _os, sys as _sys +from itertools import chain as _chain + +# special developer support to use ctypes from the CVS sandbox, +# without installing it +# XXX Remove this for the python core version +_magicfile = _os.path.join(_os.path.dirname(__file__), ".CTYPES_DEVEL") +if _os.path.isfile(_magicfile): + execfile(_magicfile) +del _magicfile + +__version__ = "0.9.9.6" + +from _ctypes import Union, Structure, Array +from _ctypes import _Pointer +from _ctypes import CFuncPtr as _CFuncPtr +from _ctypes import __version__ as _ctypes_version +from _ctypes import RTLD_LOCAL, RTLD_GLOBAL +from _ctypes import ArgumentError + +from struct import calcsize as _calcsize + +if __version__ != _ctypes_version: + raise Exception, ("Version number mismatch", __version__, _ctypes_version) + +if _os.name in ("nt", "ce"): + from _ctypes import FormatError + +from _ctypes import FUNCFLAG_CDECL as _FUNCFLAG_CDECL, \ + FUNCFLAG_PYTHONAPI as _FUNCFLAG_PYTHONAPI + +""" +WINOLEAPI -> HRESULT +WINOLEAPI_(type) + +STDMETHODCALLTYPE + +STDMETHOD(name) +STDMETHOD_(type, name) + +STDAPICALLTYPE +""" + +def create_string_buffer(init, size=None): + """create_string_buffer(aString) -> character array + create_string_buffer(anInteger) -> character array + create_string_buffer(aString, anInteger) -> character array + """ + if isinstance(init, (str, unicode)): + if size is None: + size = len(init) + 1 + buftype = c_char * size + buf = buftype() + buf.value = init + return buf + elif isinstance(init, (int, long)): + buftype = c_char * init + buf = buftype() + return buf + raise TypeError, init + +def c_buffer(init, size=None): +## "deprecated, use create_string_buffer instead" +## import warnings +## warnings.warn("c_buffer is deprecated, use create_string_buffer instead", +## DeprecationWarning, stacklevel=2) + return create_string_buffer(init, size) + +_c_functype_cache = {} +def CFUNCTYPE(restype, *argtypes): + """CFUNCTYPE(restype, *argtypes) -> function prototype. + + restype: the result type + argtypes: a sequence specifying the argument types + + The function prototype can be called in three ways to create a + callable object: + + prototype(integer address) -> foreign function + prototype(callable) -> create and return a C callable function from callable + prototype(integer index, method name[, paramflags]) -> foreign function calling a COM method + prototype((ordinal number, dll object)[, paramflags]) -> foreign function exported by ordinal + prototype((function name, dll object)[, paramflags]) -> foreign function exported by name + """ + try: + return _c_functype_cache[(restype, argtypes)] + except KeyError: + class CFunctionType(_CFuncPtr): + _argtypes_ = argtypes + _restype_ = restype + _flags_ = _FUNCFLAG_CDECL + _c_functype_cache[(restype, argtypes)] = CFunctionType + return CFunctionType + +if _os.name in ("nt", "ce"): + from _ctypes import LoadLibrary as _dlopen + from _ctypes import FUNCFLAG_STDCALL as _FUNCFLAG_STDCALL + if _os.name == "ce": + # 'ce' doesn't have the stdcall calling convention + _FUNCFLAG_STDCALL = _FUNCFLAG_CDECL + + _win_functype_cache = {} + def WINFUNCTYPE(restype, *argtypes): + # docstring set later (very similar to CFUNCTYPE.__doc__) + try: + return _win_functype_cache[(restype, argtypes)] + except KeyError: + class WinFunctionType(_CFuncPtr): + _argtypes_ = argtypes + _restype_ = restype + _flags_ = _FUNCFLAG_STDCALL + _win_functype_cache[(restype, argtypes)] = WinFunctionType + return WinFunctionType + if WINFUNCTYPE.__doc__: + WINFUNCTYPE.__doc__ = CFUNCTYPE.__doc__.replace("CFUNCTYPE", "WINFUNCTYPE") + +elif _os.name == "posix": + from _ctypes import dlopen as _dlopen #@UnresolvedImport + +from _ctypes import sizeof, byref, addressof, alignment +from _ctypes import _SimpleCData + +class py_object(_SimpleCData): + _type_ = "O" + +class c_short(_SimpleCData): + _type_ = "h" + +class c_ushort(_SimpleCData): + _type_ = "H" + +class c_long(_SimpleCData): + _type_ = "l" + +class c_ulong(_SimpleCData): + _type_ = "L" + +if _calcsize("i") == _calcsize("l"): + # if int and long have the same size, make c_int an alias for c_long + c_int = c_long + c_uint = c_ulong +else: + class c_int(_SimpleCData): + _type_ = "i" + + class c_uint(_SimpleCData): + _type_ = "I" + +class c_float(_SimpleCData): + _type_ = "f" + +class c_double(_SimpleCData): + _type_ = "d" + +if _calcsize("l") == _calcsize("q"): + # if long and long long have the same size, make c_longlong an alias for c_long + c_longlong = c_long + c_ulonglong = c_ulong +else: + class c_longlong(_SimpleCData): + _type_ = "q" + + class c_ulonglong(_SimpleCData): + _type_ = "Q" + ## def from_param(cls, val): + ## return ('d', float(val), val) + ## from_param = classmethod(from_param) + +class c_ubyte(_SimpleCData): + _type_ = "B" +c_ubyte.__ctype_le__ = c_ubyte.__ctype_be__ = c_ubyte +# backward compatibility: +##c_uchar = c_ubyte + +class c_byte(_SimpleCData): + _type_ = "b" +c_byte.__ctype_le__ = c_byte.__ctype_be__ = c_byte + +class c_char(_SimpleCData): + _type_ = "c" +c_char.__ctype_le__ = c_char.__ctype_be__ = c_char + +class c_char_p(_SimpleCData): + _type_ = "z" + +class c_void_p(_SimpleCData): + _type_ = "P" +c_voidp = c_void_p # backwards compatibility (to a bug) + +# This cache maps types to pointers to them. +_pointer_type_cache = {} + +def POINTER(cls): + try: + return _pointer_type_cache[cls] + except KeyError: + pass + if type(cls) is str: + klass = type(_Pointer)("LP_%s" % cls, + (_Pointer,), + {}) + _pointer_type_cache[id(klass)] = klass + return klass + else: + name = "LP_%s" % cls.__name__ + klass = type(_Pointer)(name, + (_Pointer,), + {'_type_': cls}) + _pointer_type_cache[cls] = klass + return klass + +try: + from _ctypes import set_conversion_mode +except ImportError: + pass +else: + if _os.name in ("nt", "ce"): + set_conversion_mode("mbcs", "ignore") + else: + set_conversion_mode("ascii", "strict") + + class c_wchar_p(_SimpleCData): + _type_ = "Z" + + class c_wchar(_SimpleCData): + _type_ = "u" + + POINTER(c_wchar).from_param = c_wchar_p.from_param #_SimpleCData.c_wchar_p_from_param + + def create_unicode_buffer(init, size=None): + """create_unicode_buffer(aString) -> character array + create_unicode_buffer(anInteger) -> character array + create_unicode_buffer(aString, anInteger) -> character array + """ + if isinstance(init, (str, unicode)): + if size is None: + size = len(init) + 1 + buftype = c_wchar * size + buf = buftype() + buf.value = init + return buf + elif isinstance(init, (int, long)): + buftype = c_wchar * init + buf = buftype() + return buf + raise TypeError, init + +POINTER(c_char).from_param = c_char_p.from_param #_SimpleCData.c_char_p_from_param + +# XXX Deprecated +def SetPointerType(pointer, cls): + if _pointer_type_cache.get(cls, None) is not None: + raise RuntimeError, \ + "This type already exists in the cache" + if not _pointer_type_cache.has_key(id(pointer)): + raise RuntimeError, \ + "What's this???" + pointer.set_type(cls) + _pointer_type_cache[cls] = pointer + del _pointer_type_cache[id(pointer)] + + +def pointer(inst): + return POINTER(type(inst))(inst) + +# XXX Deprecated +def ARRAY(typ, len): + return typ * len + +################################################################ + + +class CDLL(object): + """An instance of this class represents a loaded dll/shared + library, exporting functions using the standard C calling + convention (named 'cdecl' on Windows). + + The exported functions can be accessed as attributes, or by + indexing with the function name. Examples: + + .qsort -> callable object + ['qsort'] -> callable object + + Calling the functions releases the Python GIL during the call and + reaquires it afterwards. + """ + class _FuncPtr(_CFuncPtr): + _flags_ = _FUNCFLAG_CDECL + _restype_ = c_int # default, can be overridden in instances + + def __init__(self, name, mode=RTLD_LOCAL, handle=None): + self._name = name + if handle is None: + self._handle = _dlopen(self._name, mode) + else: + self._handle = handle + + def __repr__(self): + return "<%s '%s', handle %x at %x>" % \ + (self.__class__.__name__, self._name, + (self._handle & (_sys.maxint * 2 + 1)), + id(self)) + + def __getattr__(self, name): + if name.startswith('__') and name.endswith('__'): + raise AttributeError, name + return self.__getitem__(name) + + def __getitem__(self, name_or_ordinal): + func = self._FuncPtr((name_or_ordinal, self)) + if not isinstance(name_or_ordinal, (int, long)): + func.__name__ = name_or_ordinal + setattr(self, name_or_ordinal, func) + return func + +class PyDLL(CDLL): + """This class represents the Python library itself. It allows to + access Python API functions. The GIL is not released, and + Python exceptions are handled correctly. + """ + class _FuncPtr(_CFuncPtr): + _flags_ = _FUNCFLAG_CDECL | _FUNCFLAG_PYTHONAPI + _restype_ = c_int # default, can be overridden in instances + +if _os.name in ("nt", "ce"): + + class WinDLL(CDLL): + """This class represents a dll exporting functions using the + Windows stdcall calling convention. + """ + class _FuncPtr(_CFuncPtr): + _flags_ = _FUNCFLAG_STDCALL + _restype_ = c_int # default, can be overridden in instances + + # XXX Hm, what about HRESULT as normal parameter? + # Mustn't it derive from c_long then? + from _ctypes import _check_HRESULT, _SimpleCData + class HRESULT(_SimpleCData): + _type_ = "l" + # _check_retval_ is called with the function's result when it + # is used as restype. It checks for the FAILED bit, and + # raises a WindowsError if it is set. + # + # The _check_retval_ method is implemented in C, so that the + # method definition itself is not included in the traceback + # when it raises an error - that is what we want (and Python + # doesn't have a way to raise an exception in the caller's + # frame). + _check_retval_ = _check_HRESULT + + class OleDLL(CDLL): + """This class represents a dll exporting functions using the + Windows stdcall calling convention, and returning HRESULT. + HRESULT error values are automatically raised as WindowsError + exceptions. + """ + class _FuncPtr(_CFuncPtr): + _flags_ = _FUNCFLAG_STDCALL + _restype_ = HRESULT + +class LibraryLoader(object): + def __init__(self, dlltype): + self._dlltype = dlltype + + def __getattr__(self, name): + if name[0] == '_': + raise AttributeError(name) + dll = self._dlltype(name) + setattr(self, name, dll) + return dll + + def __getitem__(self, name): + return getattr(self, name) + + def LoadLibrary(self, name): + return self._dlltype(name) + +cdll = LibraryLoader(CDLL) +pydll = LibraryLoader(PyDLL) + +if _os.name in ("nt", "ce"): + pythonapi = PyDLL("python dll", None, _sys.dllhandle) +elif _sys.platform == "cygwin": + pythonapi = PyDLL("libpython%d.%d.dll" % _sys.version_info[:2]) +else: + pythonapi = PyDLL(None) + + +if _os.name in ("nt", "ce"): + windll = LibraryLoader(WinDLL) + oledll = LibraryLoader(OleDLL) + + if _os.name == "nt": + GetLastError = windll.kernel32.GetLastError + else: + GetLastError = windll.coredll.GetLastError + + def WinError(code=None, descr=None): + if code is None: + code = GetLastError() + if descr is None: + descr = FormatError(code).strip() + return WindowsError(code, descr) + +_pointer_type_cache[None] = c_void_p + +if sizeof(c_uint) == sizeof(c_void_p): + c_size_t = c_uint +elif sizeof(c_ulong) == sizeof(c_void_p): + c_size_t = c_ulong + +# functions + +from _ctypes import _memmove_addr, _memset_addr, _string_at_addr, _cast_addr + +## void *memmove(void *, const void *, size_t); +memmove = CFUNCTYPE(c_void_p, c_void_p, c_void_p, c_size_t)(_memmove_addr) + +## void *memset(void *, int, size_t) +memset = CFUNCTYPE(c_void_p, c_void_p, c_int, c_size_t)(_memset_addr) + +def PYFUNCTYPE(restype, *argtypes): + class CFunctionType(_CFuncPtr): + _argtypes_ = argtypes + _restype_ = restype + _flags_ = _FUNCFLAG_CDECL | _FUNCFLAG_PYTHONAPI + return CFunctionType +_cast = PYFUNCTYPE(py_object, c_void_p, py_object)(_cast_addr) + +def cast(obj, typ): + result = _cast(obj, typ) + result.__keepref = obj + return result + +_string_at = CFUNCTYPE(py_object, c_void_p, c_int)(_string_at_addr) +def string_at(ptr, size=0): + """string_at(addr[, size]) -> string + + Return the string at addr.""" + return _string_at(ptr, size) + +try: + from _ctypes import _wstring_at_addr +except ImportError: + pass +else: + _wstring_at = CFUNCTYPE(py_object, c_void_p, c_int)(_wstring_at_addr) + def wstring_at(ptr, size=0): + """wstring_at(addr[, size]) -> string + + Return the string at addr.""" + return _wstring_at(ptr, size) + + +if _os.name == "nt": # COM stuff + def DllGetClassObject(rclsid, riid, ppv): + # First ask ctypes.com.server than comtypes.server for the + # class object. + + # trick py2exe by doing dynamic imports + result = -2147221231 # CLASS_E_CLASSNOTAVAILABLE + try: + ctcom = __import__("ctypes.com.server", globals(), locals(), ['*']) + except ImportError: + pass + else: + result = ctcom.DllGetClassObject(rclsid, riid, ppv) + + if result == -2147221231: # CLASS_E_CLASSNOTAVAILABLE + try: + ccom = __import__("comtypes.server", globals(), locals(), ['*']) + except ImportError: + pass + else: + result = ccom.DllGetClassObject(rclsid, riid, ppv) + + return result + + def DllCanUnloadNow(): + # First ask ctypes.com.server than comtypes.server if we can unload or not. + # trick py2exe by doing dynamic imports + result = 0 # S_OK + try: + ctcom = __import__("ctypes.com.server", globals(), locals(), ['*']) + except ImportError: + pass + else: + result = ctcom.DllCanUnloadNow() + if result != 0: # != S_OK + return result + + try: + ccom = __import__("comtypes.server", globals(), locals(), ['*']) + except ImportError: + return result + try: + return ccom.DllCanUnloadNow() + except AttributeError: + pass + return result + +from ctypes._endian import BigEndianStructure, LittleEndianStructure + +# Fill in specifically-sized types +c_int8 = c_byte +c_uint8 = c_ubyte +for kind in [c_short, c_int, c_long, c_longlong]: + if sizeof(kind) == 2: c_int16 = kind + elif sizeof(kind) == 4: c_int32 = kind + elif sizeof(kind) == 8: c_int64 = kind +for kind in [c_ushort, c_uint, c_ulong, c_ulonglong]: + if sizeof(kind) == 2: c_uint16 = kind + elif sizeof(kind) == 4: c_uint32 = kind + elif sizeof(kind) == 8: c_uint64 = kind +del(kind) diff --git a/ptvsd/pydevd/third_party/wrapped_for_pydev/ctypes/_ctypes.dll b/ptvsd/pydevd/third_party/wrapped_for_pydev/ctypes/_ctypes.dll new file mode 100644 index 00000000..238e869a Binary files /dev/null and b/ptvsd/pydevd/third_party/wrapped_for_pydev/ctypes/_ctypes.dll differ diff --git a/ptvsd/pydevd/third_party/wrapped_for_pydev/ctypes/_endian.py b/ptvsd/pydevd/third_party/wrapped_for_pydev/ctypes/_endian.py new file mode 100644 index 00000000..7de03760 --- /dev/null +++ b/ptvsd/pydevd/third_party/wrapped_for_pydev/ctypes/_endian.py @@ -0,0 +1,58 @@ +#@PydevCodeAnalysisIgnore +import sys +from ctypes import * + +_array_type = type(c_int * 3) + +def _other_endian(typ): + """Return the type with the 'other' byte order. Simple types like + c_int and so on already have __ctype_be__ and __ctype_le__ + attributes which contain the types, for more complicated types + only arrays are supported. + """ + try: + return getattr(typ, _OTHER_ENDIAN) + except AttributeError: + if type(typ) == _array_type: + return _other_endian(typ._type_) * typ._length_ + raise TypeError("This type does not support other endian: %s" % typ) + +class _swapped_meta(type(Structure)): + def __setattr__(self, attrname, value): + if attrname == "_fields_": + fields = [] + for desc in value: + name = desc[0] + typ = desc[1] + rest = desc[2:] + fields.append((name, _other_endian(typ)) + rest) + value = fields + super(_swapped_meta, self).__setattr__(attrname, value) + +################################################################ + +# Note: The Structure metaclass checks for the *presence* (not the +# value!) of a _swapped_bytes_ attribute to determine the bit order in +# structures containing bit fields. + +if sys.byteorder == "little": + _OTHER_ENDIAN = "__ctype_be__" + + LittleEndianStructure = Structure + + class BigEndianStructure(Structure): + """Structure with big endian byte order""" + __metaclass__ = _swapped_meta + _swappedbytes_ = None + +elif sys.byteorder == "big": + _OTHER_ENDIAN = "__ctype_le__" + + BigEndianStructure = Structure + class LittleEndianStructure(Structure): + """Structure with little endian byte order""" + __metaclass__ = _swapped_meta + _swappedbytes_ = None + +else: + raise RuntimeError("Invalid byteorder") diff --git a/ptvsd/pydevd/third_party/wrapped_for_pydev/ctypes/ctypes-README.txt b/ptvsd/pydevd/third_party/wrapped_for_pydev/ctypes/ctypes-README.txt new file mode 100644 index 00000000..bf8de1e8 --- /dev/null +++ b/ptvsd/pydevd/third_party/wrapped_for_pydev/ctypes/ctypes-README.txt @@ -0,0 +1,134 @@ +(Note: this is a compiled distribution of ctypes, compiled for cygwin + to allow using the cygwin conversions directly from interpreterInfo. The tests + have been removed to reduce the added size. It is only used by PyDev on cygwin). + +Overview + + ctypes is a ffi (Foreign Function Interface) package for Python. + + It allows to call functions exposed from dlls/shared libraries and + has extensive facilities to create, access and manipulate simpole + and complicated C data types transparently from Python - in other + words: wrap libraries in pure Python. + + ctypes runs on Windows, MacOS X, Linux, Solaris, FreeBSD. It may + also run on other systems, provided that libffi supports this + platform. + + On Windows, ctypes contains (the beginning of) a COM framework + mainly targetted to use and implement custom COM interfaces. + + +News + + ctypes now uses the same code base and libffi on all platforms. + For easier installation, the libffi sources are now included in + the source distribution - no need to find, build, and install a + compatible libffi version. + + +Requirements + + ctypes 0.9 requires Python 2.3 or higher, since it makes intensive + use of the new type system. + + ctypes uses libffi, which is copyright Red Hat, Inc. Complete + license see below. + + +Installation + + Windows + + On Windows, it is the easiest to download the executable + installer for your Python version and execute this. + + Installation from source + + Separate source distributions are available for windows and + non-windows systems. Please use the .zip file for Windows (it + contains the ctypes.com framework), and use the .tar.gz file + for non-Windows systems (it contains the complete + cross-platform libffi sources). + + To install ctypes from source, unpack the distribution, enter + the ctypes-0.9.x source directory, and enter + + python setup.py build + + This will build the Python extension modules. A C compiler is + required. On OS X, the segment attribute live_support must be + defined. If your compiler doesn't know about it, upgrade or + set the environment variable CCASFLAGS="-Dno_live_support". + + To run the supplied tests, enter + + python setup.py test + + To install ctypes, enter + + python setup.py install --help + + to see the avaibable options, and finally + + python setup.py install [options] + + + For Windows CE, a project file is provided in + wince\_ctypes.vcw. MS embedded Visual C 4.0 is required to + build the extension modules. + + +Additional notes + + Current version: 0.9.9.3 + + Homepage: http://starship.python.net/crew/theller/ctypes.html + + +ctypes license + + Copyright (c) 2000, 2001, 2002, 2003, 2004, 2005, 2006 Thomas Heller + + Permission is hereby granted, free of charge, to any person + obtaining a copy of this software and associated documentation files + (the "Software"), to deal in the Software without restriction, + including without limitation the rights to use, copy, modify, merge, + publish, distribute, sublicense, and/or sell copies of the Software, + and to permit persons to whom the Software is furnished to do so, + subject to the following conditions: + + The above copyright notice and this permission notice shall be + included in all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS + BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN + ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN + CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE. + +libffi license + + libffi - Copyright (c) 1996-2003 Red Hat, Inc. + + Permission is hereby granted, free of charge, to any person + obtaining a copy of this software and associated documentation files + (the ``Software''), to deal in the Software without restriction, + including without limitation the rights to use, copy, modify, merge, + publish, distribute, sublicense, and/or sell copies of the Software, + and to permit persons to whom the Software is furnished to do so, + subject to the following conditions: + + The above copyright notice and this permission notice shall be + included in all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED ``AS IS'', WITHOUT WARRANTY OF ANY KIND, + EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + NONINFRINGEMENT. IN NO EVENT SHALL CYGNUS SOLUTIONS BE LIABLE FOR + ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF + CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION + WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/ptvsd/pydevd/third_party/wrapped_for_pydev/ctypes/macholib/.cvsignore b/ptvsd/pydevd/third_party/wrapped_for_pydev/ctypes/macholib/.cvsignore new file mode 100644 index 00000000..0d20b648 --- /dev/null +++ b/ptvsd/pydevd/third_party/wrapped_for_pydev/ctypes/macholib/.cvsignore @@ -0,0 +1 @@ +*.pyc diff --git a/ptvsd/pydevd/third_party/wrapped_for_pydev/ctypes/macholib/__init__.py b/ptvsd/pydevd/third_party/wrapped_for_pydev/ctypes/macholib/__init__.py new file mode 100644 index 00000000..5621defc --- /dev/null +++ b/ptvsd/pydevd/third_party/wrapped_for_pydev/ctypes/macholib/__init__.py @@ -0,0 +1,9 @@ +""" +Enough Mach-O to make your head spin. + +See the relevant header files in /usr/include/mach-o + +And also Apple's documentation. +""" + +__version__ = '1.0' diff --git a/ptvsd/pydevd/third_party/wrapped_for_pydev/ctypes/macholib/dyld.py b/ptvsd/pydevd/third_party/wrapped_for_pydev/ctypes/macholib/dyld.py new file mode 100644 index 00000000..85073aac --- /dev/null +++ b/ptvsd/pydevd/third_party/wrapped_for_pydev/ctypes/macholib/dyld.py @@ -0,0 +1,167 @@ +#@PydevCodeAnalysisIgnore +""" +dyld emulation +""" + +import os +from framework import framework_info +from dylib import dylib_info +from itertools import * + +__all__ = [ + 'dyld_find', 'framework_find', + 'framework_info', 'dylib_info', +] + +# These are the defaults as per man dyld(1) +# +DEFAULT_FRAMEWORK_FALLBACK = [ + os.path.expanduser("~/Library/Frameworks"), + "/Library/Frameworks", + "/Network/Library/Frameworks", + "/System/Library/Frameworks", +] + +DEFAULT_LIBRARY_FALLBACK = [ + os.path.expanduser("~/lib"), + "/usr/local/lib", + "/lib", + "/usr/lib", +] + +def ensure_utf8(s): + """Not all of PyObjC and Python understand unicode paths very well yet""" + if isinstance(s, unicode): + return s.encode('utf8') + return s + +def dyld_env(env, var): + if env is None: + env = os.environ + rval = env.get(var) + if rval is None: + return [] + return rval.split(':') + +def dyld_image_suffix(env=None): + if env is None: + env = os.environ + return env.get('DYLD_IMAGE_SUFFIX') + +def dyld_framework_path(env=None): + return dyld_env(env, 'DYLD_FRAMEWORK_PATH') + +def dyld_library_path(env=None): + return dyld_env(env, 'DYLD_LIBRARY_PATH') + +def dyld_fallback_framework_path(env=None): + return dyld_env(env, 'DYLD_FALLBACK_FRAMEWORK_PATH') + +def dyld_fallback_library_path(env=None): + return dyld_env(env, 'DYLD_FALLBACK_LIBRARY_PATH') + +def dyld_image_suffix_search(iterator, env=None): + """For a potential path iterator, add DYLD_IMAGE_SUFFIX semantics""" + suffix = dyld_image_suffix(env) + if suffix is None: + return iterator + def _inject(iterator=iterator, suffix=suffix): + for path in iterator: + if path.endswith('.dylib'): + yield path[:-len('.dylib')] + suffix + '.dylib' + else: + yield path + suffix + yield path + return _inject() + +def dyld_override_search(name, env=None): + # If DYLD_FRAMEWORK_PATH is set and this dylib_name is a + # framework name, use the first file that exists in the framework + # path if any. If there is none go on to search the DYLD_LIBRARY_PATH + # if any. + + framework = framework_info(name) + + if framework is not None: + for path in dyld_framework_path(env): + yield os.path.join(path, framework['name']) + + # If DYLD_LIBRARY_PATH is set then use the first file that exists + # in the path. If none use the original name. + for path in dyld_library_path(env): + yield os.path.join(path, os.path.basename(name)) + +def dyld_executable_path_search(name, executable_path=None): + # If we haven't done any searching and found a library and the + # dylib_name starts with "@executable_path/" then construct the + # library name. + if name.startswith('@executable_path/') and executable_path is not None: + yield os.path.join(executable_path, name[len('@executable_path/'):]) + +def dyld_default_search(name, env=None): + yield name + + framework = framework_info(name) + + if framework is not None: + fallback_framework_path = dyld_fallback_framework_path(env) + for path in fallback_framework_path: + yield os.path.join(path, framework['name']) + + fallback_library_path = dyld_fallback_library_path(env) + for path in fallback_library_path: + yield os.path.join(path, os.path.basename(name)) + + if framework is not None and not fallback_framework_path: + for path in DEFAULT_FRAMEWORK_FALLBACK: + yield os.path.join(path, framework['name']) + + if not fallback_library_path: + for path in DEFAULT_LIBRARY_FALLBACK: + yield os.path.join(path, os.path.basename(name)) + +def dyld_find(name, executable_path=None, env=None): + """ + Find a library or framework using dyld semantics + """ + name = ensure_utf8(name) + executable_path = ensure_utf8(executable_path) + for path in dyld_image_suffix_search(chain( + dyld_override_search(name, env), + dyld_executable_path_search(name, executable_path), + dyld_default_search(name, env), + ), env): + if os.path.isfile(path): + return path + raise ValueError, "dylib %s could not be found" % (name,) + +def framework_find(fn, executable_path=None, env=None): + """ + Find a framework using dyld semantics in a very loose manner. + + Will take input such as: + Python + Python.framework + Python.framework/Versions/Current + """ + try: + return dyld_find(fn, executable_path=executable_path, env=env) + except ValueError: + pass + fmwk_index = fn.rfind('.framework') + if fmwk_index == -1: + fmwk_index = len(fn) + fn += '.framework' + fn = os.path.join(fn, os.path.basename(fn[:fmwk_index])) + try: + return dyld_find(fn, executable_path=executable_path, env=env) + except ValueError: + raise e + +def test_dyld_find(): + env = {} + assert dyld_find('libSystem.dylib') == '/usr/lib/libSystem.dylib' + assert dyld_find('System.framework/System') == '/System/Library/Frameworks/System.framework/System' + +if __name__ == '__main__': + test_dyld_find() diff --git a/ptvsd/pydevd/third_party/wrapped_for_pydev/ctypes/macholib/dylib.py b/ptvsd/pydevd/third_party/wrapped_for_pydev/ctypes/macholib/dylib.py new file mode 100644 index 00000000..aa107507 --- /dev/null +++ b/ptvsd/pydevd/third_party/wrapped_for_pydev/ctypes/macholib/dylib.py @@ -0,0 +1,63 @@ +""" +Generic dylib path manipulation +""" + +import re + +__all__ = ['dylib_info'] + +DYLIB_RE = re.compile(r"""(?x) +(?P^.*)(?:^|/) +(?P + (?P\w+?) + (?:\.(?P[^._]+))? + (?:_(?P[^._]+))? + \.dylib$ +) +""") + +def dylib_info(filename): + """ + A dylib name can take one of the following four forms: + Location/Name.SomeVersion_Suffix.dylib + Location/Name.SomeVersion.dylib + Location/Name_Suffix.dylib + Location/Name.dylib + + returns None if not found or a mapping equivalent to: + dict( + location='Location', + name='Name.SomeVersion_Suffix.dylib', + shortname='Name', + version='SomeVersion', + suffix='Suffix', + ) + + Note that SomeVersion and Suffix are optional and may be None + if not present. + """ + is_dylib = DYLIB_RE.match(filename) + if not is_dylib: + return None + return is_dylib.groupdict() + + +def test_dylib_info(): + def d(location=None, name=None, shortname=None, version=None, suffix=None): + return dict( + location=location, + name=name, + shortname=shortname, + version=version, + suffix=suffix + ) + assert dylib_info('completely/invalid') is None + assert dylib_info('completely/invalide_debug') is None + assert dylib_info('P/Foo.dylib') == d('P', 'Foo.dylib', 'Foo') + assert dylib_info('P/Foo_debug.dylib') == d('P', 'Foo_debug.dylib', 'Foo', suffix='debug') + assert dylib_info('P/Foo.A.dylib') == d('P', 'Foo.A.dylib', 'Foo', 'A') + assert dylib_info('P/Foo_debug.A.dylib') == d('P', 'Foo_debug.A.dylib', 'Foo_debug', 'A') + assert dylib_info('P/Foo.A_debug.dylib') == d('P', 'Foo.A_debug.dylib', 'Foo', 'A', 'debug') + +if __name__ == '__main__': + test_dylib_info() diff --git a/ptvsd/pydevd/third_party/wrapped_for_pydev/ctypes/macholib/framework.py b/ptvsd/pydevd/third_party/wrapped_for_pydev/ctypes/macholib/framework.py new file mode 100644 index 00000000..ad6ed554 --- /dev/null +++ b/ptvsd/pydevd/third_party/wrapped_for_pydev/ctypes/macholib/framework.py @@ -0,0 +1,65 @@ +""" +Generic framework path manipulation +""" + +import re + +__all__ = ['framework_info'] + +STRICT_FRAMEWORK_RE = re.compile(r"""(?x) +(?P^.*)(?:^|/) +(?P + (?P\w+).framework/ + (?:Versions/(?P[^/]+)/)? + (?P=shortname) + (?:_(?P[^_]+))? +)$ +""") + +def framework_info(filename): + """ + A framework name can take one of the following four forms: + Location/Name.framework/Versions/SomeVersion/Name_Suffix + Location/Name.framework/Versions/SomeVersion/Name + Location/Name.framework/Name_Suffix + Location/Name.framework/Name + + returns None if not found, or a mapping equivalent to: + dict( + location='Location', + name='Name.framework/Versions/SomeVersion/Name_Suffix', + shortname='Name', + version='SomeVersion', + suffix='Suffix', + ) + + Note that SomeVersion and Suffix are optional and may be None + if not present + """ + is_framework = STRICT_FRAMEWORK_RE.match(filename) + if not is_framework: + return None + return is_framework.groupdict() + +def test_framework_info(): + def d(location=None, name=None, shortname=None, version=None, suffix=None): + return dict( + location=location, + name=name, + shortname=shortname, + version=version, + suffix=suffix + ) + assert framework_info('completely/invalid') is None + assert framework_info('completely/invalid/_debug') is None + assert framework_info('P/F.framework') is None + assert framework_info('P/F.framework/_debug') is None + assert framework_info('P/F.framework/F') == d('P', 'F.framework/F', 'F') + assert framework_info('P/F.framework/F_debug') == d('P', 'F.framework/F_debug', 'F', suffix='debug') + assert framework_info('P/F.framework/Versions') is None + assert framework_info('P/F.framework/Versions/A') is None + assert framework_info('P/F.framework/Versions/A/F') == d('P', 'F.framework/Versions/A/F', 'F', 'A') + assert framework_info('P/F.framework/Versions/A/F_debug') == d('P', 'F.framework/Versions/A/F_debug', 'F', 'A', 'debug') + +if __name__ == '__main__': + test_framework_info() diff --git a/ptvsd/pydevd/third_party/wrapped_for_pydev/ctypes/util.py b/ptvsd/pydevd/third_party/wrapped_for_pydev/ctypes/util.py new file mode 100644 index 00000000..6db0cfbb --- /dev/null +++ b/ptvsd/pydevd/third_party/wrapped_for_pydev/ctypes/util.py @@ -0,0 +1,124 @@ +#@PydevCodeAnalysisIgnore +import sys, os +import ctypes + +# find_library(name) returns the pathname of a library, or None. +if os.name == "nt": + def find_library(name): + # See MSDN for the REAL search order. + for directory in os.environ['PATH'].split(os.pathsep): + fname = os.path.join(directory, name) + if os.path.exists(fname): + return fname + if fname.lower().endswith(".dll"): + continue + fname = fname + ".dll" + if os.path.exists(fname): + return fname + return None + +if os.name == "ce": + # search path according to MSDN: + # - absolute path specified by filename + # - The .exe launch directory + # - the Windows directory + # - ROM dll files (where are they?) + # - OEM specified search path: HKLM\Loader\SystemPath + def find_library(name): + return name + +if os.name == "posix" and sys.platform == "darwin": + from ctypes.macholib.dyld import dyld_find as _dyld_find + def find_library(name): + possible = ['lib%s.dylib' % name, + '%s.dylib' % name, + '%s.framework/%s' % (name, name)] + for name in possible: + try: + return _dyld_find(name) + except ValueError: + continue + return None + +elif os.name == "posix": + # Andreas Degert's find functions, using gcc, /sbin/ldconfig, objdump + import re, tempfile + + def _findLib_gcc(name): + expr = '[^\(\)\s]*lib%s\.[^\(\)\s]*' % name + cmd = 'if type gcc &>/dev/null; then CC=gcc; else CC=cc; fi;' \ + '$CC -Wl,-t -o /dev/null 2>&1 -l' + name + try: + fdout, outfile = tempfile.mkstemp() + fd = os.popen(cmd) + trace = fd.read() + err = fd.close() + finally: + try: + os.unlink(outfile) + except OSError, e: + import errno + if e.errno != errno.ENOENT: + raise + res = re.search(expr, trace) + if not res: + return None + return res.group(0) + + def _findLib_ld(name): + expr = '/[^\(\)\s]*lib%s\.[^\(\)\s]*' % name + res = re.search(expr, os.popen('/sbin/ldconfig -p 2>/dev/null').read()) + if not res: + # Hm, this works only for libs needed by the python executable. + cmd = 'ldd %s 2>/dev/null' % sys.executable + res = re.search(expr, os.popen(cmd).read()) + if not res: + return None + return res.group(0) + + def _get_soname(f): + cmd = "objdump -p -j .dynamic 2>/dev/null " + f + res = re.search(r'\sSONAME\s+([^\s]+)', os.popen(cmd).read()) + if not res: + return None + return res.group(1) + + def find_library(name): + lib = _findLib_ld(name) or _findLib_gcc(name) + if not lib: + return None + return _get_soname(lib) + +################################################################ +# test code + +def test(): + from ctypes import cdll + if os.name == "nt": + sys.stdout.write('%s\n' % (cdll.msvcrt,)) + sys.stdout.write('%s\n' % (cdll.load("msvcrt"),)) + sys.stdout.write('%s\n' % (find_library("msvcrt"),)) + + if os.name == "posix": + # find and load_version + sys.stdout.write('%s\n' % (find_library("m"),)) + sys.stdout.write('%s\n' % (find_library("c"),)) + sys.stdout.write('%s\n' % (find_library("bz2"),)) + + # getattr +## print_ cdll.m +## print_ cdll.bz2 + + # load + if sys.platform == "darwin": + sys.stdout.write('%s\n' % (cdll.LoadLibrary("libm.dylib"),)) + sys.stdout.write('%s\n' % (cdll.LoadLibrary("libcrypto.dylib"),)) + sys.stdout.write('%s\n' % (cdll.LoadLibrary("libSystem.dylib"),)) + sys.stdout.write('%s\n' % (cdll.LoadLibrary("System.framework/System"),)) + else: + sys.stdout.write('%s\n' % (cdll.LoadLibrary("libm.so"),)) + sys.stdout.write('%s\n' % (cdll.LoadLibrary("libcrypt.so"),)) + sys.stdout.write('%s\n' % (find_library("crypt"),)) + +if __name__ == "__main__": + test() diff --git a/ptvsd/pydevd/third_party/wrapped_for_pydev/ctypes/wintypes.py b/ptvsd/pydevd/third_party/wrapped_for_pydev/ctypes/wintypes.py new file mode 100644 index 00000000..d31f11e2 --- /dev/null +++ b/ptvsd/pydevd/third_party/wrapped_for_pydev/ctypes/wintypes.py @@ -0,0 +1,98 @@ +#@PydevCodeAnalysisIgnore +# XXX This module needs cleanup. + +from ctypes import * + +DWORD = c_ulong +WORD = c_ushort +BYTE = c_byte + +ULONG = c_ulong +LONG = c_long + +LARGE_INTEGER = c_longlong +ULARGE_INTEGER = c_ulonglong + + +HANDLE = c_ulong # in the header files: void * + +HWND = HANDLE +HDC = HANDLE +HMODULE = HANDLE +HINSTANCE = HANDLE +HRGN = HANDLE +HTASK = HANDLE +HKEY = HANDLE +HPEN = HANDLE +HGDIOBJ = HANDLE +HMENU = HANDLE + +LCID = DWORD + +WPARAM = c_uint +LPARAM = c_long + +BOOL = c_long +VARIANT_BOOL = c_short + +LPCOLESTR = LPOLESTR = OLESTR = c_wchar_p +LPCWSTR = LPWSTR = c_wchar_p + +LPCSTR = LPSTR = c_char_p + +class RECT(Structure): + _fields_ = [("left", c_long), + ("top", c_long), + ("right", c_long), + ("bottom", c_long)] +RECTL = RECT + +class POINT(Structure): + _fields_ = [("x", c_long), + ("y", c_long)] +POINTL = POINT + +class SIZE(Structure): + _fields_ = [("cx", c_long), + ("cy", c_long)] +SIZEL = SIZE + +def RGB(red, green, blue): + return red + (green << 8) + (blue << 16) + +class FILETIME(Structure): + _fields_ = [("dwLowDateTime", DWORD), + ("dwHighDateTime", DWORD)] + +class MSG(Structure): + _fields_ = [("hWnd", HWND), + ("message", c_uint), + ("wParam", WPARAM), + ("lParam", LPARAM), + ("time", DWORD), + ("pt", POINT)] +MAX_PATH = 260 + +class WIN32_FIND_DATAA(Structure): + _fields_ = [("dwFileAttributes", DWORD), + ("ftCreationTime", FILETIME), + ("ftLastAccessTime", FILETIME), + ("ftLastWriteTime", FILETIME), + ("nFileSizeHigh", DWORD), + ("nFileSizeLow", DWORD), + ("dwReserved0", DWORD), + ("dwReserved1", DWORD), + ("cFileName", c_char * MAX_PATH), + ("cAlternameFileName", c_char * 14)] + +class WIN32_FIND_DATAW(Structure): + _fields_ = [("dwFileAttributes", DWORD), + ("ftCreationTime", FILETIME), + ("ftLastAccessTime", FILETIME), + ("ftLastWriteTime", FILETIME), + ("nFileSizeHigh", DWORD), + ("nFileSizeLow", DWORD), + ("dwReserved0", DWORD), + ("dwReserved1", DWORD), + ("cFileName", c_wchar * MAX_PATH), + ("cAlternameFileName", c_wchar * 14)] diff --git a/ptvsd/pydevd/third_party/wrapped_for_pydev/not_in_default_pythonpath.txt b/ptvsd/pydevd/third_party/wrapped_for_pydev/not_in_default_pythonpath.txt new file mode 100644 index 00000000..24084e9d --- /dev/null +++ b/ptvsd/pydevd/third_party/wrapped_for_pydev/not_in_default_pythonpath.txt @@ -0,0 +1 @@ +The wrapped_for_pydev folder is not in the default pythonpath... (no __init__.py file) \ No newline at end of file diff --git a/ptvsd/untangle.py b/ptvsd/untangle.py new file mode 100644 index 00000000..90d091ef --- /dev/null +++ b/ptvsd/untangle.py @@ -0,0 +1,208 @@ +#!/usr/bin/env python + +""" + untangle + + Converts xml to python objects. + + The only method you need to call is parse() + + Partially inspired by xml2obj + (http://code.activestate.com/recipes/149368-xml2obj/) + + Author: Christian Stefanescu (http://0chris.com) + License: MIT License - http://www.opensource.org/licenses/mit-license.php +""" +import os +import keyword +from xml.sax import make_parser, handler +try: + from StringIO import StringIO +except ImportError: + from io import StringIO +try: + from types import StringTypes + + def is_string(x): + return isinstance(x, StringTypes) +except ImportError: + def is_string(x): + return isinstance(x, str) + +__version__ = '1.1.1' + + +class Element(object): + """ + Representation of an XML element. + """ + def __init__(self, name, attributes): + self._name = name + self._attributes = attributes + self.children = [] + self.is_root = False + self.cdata = '' + + def add_child(self, element): + """ + Store child elements. + """ + self.children.append(element) + + def add_cdata(self, cdata): + """ + Store cdata + """ + self.cdata = self.cdata + cdata + + def get_attribute(self, key): + """ + Get attributes by key + """ + return self._attributes.get(key) + + def get_elements(self, name=None): + """ + Find a child element by name + """ + if name: + return [e for e in self.children if e._name == name] + else: + return self.children + + def __getitem__(self, key): + return self.get_attribute(key) + + def __getattr__(self, key): + matching_children = [x for x in self.children if x._name == key] + if matching_children: + if len(matching_children) == 1: + self.__dict__[key] = matching_children[0] + return matching_children[0] + else: + self.__dict__[key] = matching_children + return matching_children + else: + raise AttributeError( + "'%s' has no attribute '%s'" % (self._name, key) + ) + + def __hasattribute__(self, name): + if name in self.__dict__: + return True + return any(self.children, lambda x: x._name == name) + + def __iter__(self): + yield self + + def __str__(self): + return ( + "Element <%s> with attributes %s, children %s and cdata %s" % + (self._name, self._attributes, self.children, self.cdata) + ) + + def __repr__(self): + return ( + "Element(name = %s, attributes = %s, cdata = %s)" % + (self._name, self._attributes, self.cdata) + ) + + def __nonzero__(self): + return self.is_root or self._name is not None + + def __eq__(self, val): + return self.cdata == val + + def __dir__(self): + children_names = [x._name for x in self.children] + return children_names + + def __len__(self): + return len(self.children) + + def __contains__(self, key): + return key in dir(self) + + +class Handler(handler.ContentHandler): + """ + SAX handler which creates the Python object structure out of ``Element``s + """ + def __init__(self): + self.root = Element(None, None) + self.root.is_root = True + self.elements = [] + + def startElement(self, name, attributes): + name = name.replace('-', '_') + name = name.replace('.', '_') + name = name.replace(':', '_') + + # adding trailing _ for keywords + if keyword.iskeyword(name): + name += '_' + + attrs = dict() + for k, v in attributes.items(): + attrs[k] = v + element = Element(name, attrs) + if len(self.elements) > 0: + self.elements[-1].add_child(element) + else: + self.root.add_child(element) + self.elements.append(element) + + def endElement(self, name): + self.elements.pop() + + def characters(self, cdata): + self.elements[-1].add_cdata(cdata) + + +def parse(filename, **parser_features): + """ + Interprets the given string as a filename, URL or XML data string, + parses it and returns a Python object which represents the given + document. + + Extra arguments to this function are treated as feature values to pass + to ``parser.setFeature()``. For example, ``feature_external_ges=False`` + will set ``xml.sax.handler.feature_external_ges`` to False, disabling + the parser's inclusion of external general (text) entities such as DTDs. + + Raises ``ValueError`` if the first argument is None / empty string. + + Raises ``AttributeError`` if a requested xml.sax feature is not found in + ``xml.sax.handler``. + + Raises ``xml.sax.SAXParseException`` if something goes wrong + during parsing. + """ + if (filename is None or (is_string(filename) and filename.strip()) == ''): + raise ValueError('parse() takes a filename, URL or XML string') + parser = make_parser() + for feature, value in parser_features.items(): + parser.setFeature(getattr(handler, feature), value) + sax_handler = Handler() + parser.setContentHandler(sax_handler) + if is_string(filename) and (os.path.exists(filename) or is_url(filename)): + parser.parse(filename) + else: + if hasattr(filename, 'read'): + parser.parse(filename) + else: + parser.parse(StringIO(filename)) + + return sax_handler.root + + +def is_url(string): + """ + Checks if the given string starts with 'http(s)'. + """ + try: + return string.startswith('http://') or string.startswith('https://') + except AttributeError: + return False + +# vim: set expandtab ts=4 sw=4: diff --git a/ptvsd/wrapper.py b/ptvsd/wrapper.py index af35ce63..c43ab861 100644 --- a/ptvsd/wrapper.py +++ b/ptvsd/wrapper.py @@ -2,8 +2,7 @@ # Licensed under the MIT License. See LICENSE in the project root # for license information. -# TODO: with_statement is not needed -from __future__ import print_function, with_statement, absolute_import +from __future__ import print_function, absolute_import import atexit import os @@ -11,8 +10,7 @@ import socket import sys import threading import traceback -import untangle -import platform +import platform try: import urllib @@ -27,6 +25,7 @@ import _pydevd_bundle.pydevd_extension_utils as pydevd_extutil import ptvsd.ipcjson as ipcjson import ptvsd.futures as futures +import ptvsd.untangle as untangle __author__ = "Microsoft Corporation " diff --git a/setup.py b/setup.py index eecac430..b882e351 100644 --- a/setup.py +++ b/setup.py @@ -4,8 +4,24 @@ # Licensed under the MIT License. See LICENSE in the project root # for license information. +import os +import os.path from setuptools import setup +# Add pydevd files as data files for this package. They are not treated as a package of their own, +# because we don't actually want to provide pydevd - just use our own copy internally. +def get_pydevd_package_data(): + ptvsd_prefix = 'ptvsd/' + pydevd_prefix = ptvsd_prefix + 'pydevd' + for root, dirs, files in os.walk(pydevd_prefix): + # From the root of pydevd repo, we want only scripts and subdirectories that + # constitute the package itself (not helper scripts, tests etc). But when + # walking down into those subdirectories, we want everything below. + if root == pydevd_prefix: + dirs[:] = [d for d in dirs if d.startswith('pydev') or d.startswith('_pydev')] + files[:] = [f for f in files if f.endswith('.py') and 'pydev' in f] + for f in files: + yield os.path.join(root[len(ptvsd_prefix):], f) setup(name='ptvsd', version='4.0.0a1', @@ -21,5 +37,5 @@ setup(name='ptvsd', 'Programming Language :: Python :: 3', 'License :: OSI Approved :: MIT License'], packages=['ptvsd'], - install_requires=['untangle', 'pydevd>=1.1.1'] + package_data={'ptvsd': list(get_pydevd_package_data())}, )