mirror of
https://github.com/microsoft/debugpy.git
synced 2025-12-23 08:48:12 +00:00
Merge commit '915e24d2a0' as 'ptvsd/pydevd'
This commit is contained in:
commit
688f7e970d
439 changed files with 151357 additions and 0 deletions
36
ptvsd/pydevd/.gitignore
vendored
Normal file
36
ptvsd/pydevd/.gitignore
vendored
Normal file
|
|
@ -0,0 +1,36 @@
|
|||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*.class
|
||||
_pydevd_bundle/*.so
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
env/
|
||||
bin/
|
||||
build/temp.*
|
||||
develop-eggs/
|
||||
dist/
|
||||
eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.coverage
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
|
||||
snippet.py
|
||||
build/*
|
||||
17
ptvsd/pydevd/.project
Normal file
17
ptvsd/pydevd/.project
Normal file
|
|
@ -0,0 +1,17 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<projectDescription>
|
||||
<name>PyDev.Debugger</name>
|
||||
<comment></comment>
|
||||
<projects>
|
||||
</projects>
|
||||
<buildSpec>
|
||||
<buildCommand>
|
||||
<name>org.python.pydev.PyDevBuilder</name>
|
||||
<arguments>
|
||||
</arguments>
|
||||
</buildCommand>
|
||||
</buildSpec>
|
||||
<natures>
|
||||
<nature>org.python.pydev.pythonNature</nature>
|
||||
</natures>
|
||||
</projectDescription>
|
||||
12
ptvsd/pydevd/.pydevproject
Normal file
12
ptvsd/pydevd/.pydevproject
Normal file
|
|
@ -0,0 +1,12 @@
|
|||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<?eclipse-pydev version="1.0"?><pydev_project>
|
||||
<pydev_pathproperty name="org.python.pydev.PROJECT_SOURCE_PATH">
|
||||
<path>/${PROJECT_DIR_NAME}</path>
|
||||
<path>/${PROJECT_DIR_NAME}/build_tools</path>
|
||||
<path>/${PROJECT_DIR_NAME}/jython_test_deps/ant.jar</path>
|
||||
<path>/${PROJECT_DIR_NAME}/jython_test_deps/junit.jar</path>
|
||||
</pydev_pathproperty>
|
||||
<pydev_property name="org.python.pydev.PYTHON_PROJECT_VERSION">python 3.0</pydev_property>
|
||||
<pydev_property name="org.python.pydev.PYTHON_PROJECT_INTERPRETER">Default</pydev_property>
|
||||
<pydev_property name="org.python.pydev.PYTHON_ADDITIONAL_GRAMMAR_VALIDATION">2.6, 2.7, 3.6</pydev_property>
|
||||
</pydev_project>
|
||||
9
ptvsd/pydevd/.settings/org.eclipse.core.resources.prefs
Normal file
9
ptvsd/pydevd/.settings/org.eclipse.core.resources.prefs
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
eclipse.preferences.version=1
|
||||
encoding//.settings/org.python.pydev.yaml=UTF-8
|
||||
encoding//pydev_ipython/inputhook.py=utf-8
|
||||
encoding//pydev_ipython/inputhookglut.py=utf-8
|
||||
encoding//pydev_ipython/inputhookpyglet.py=utf-8
|
||||
encoding//pydev_ipython/inputhookqt4.py=utf-8
|
||||
encoding//pydev_ipython/inputhookqt5.py=utf-8
|
||||
encoding//pydev_ipython/inputhookwx.py=utf-8
|
||||
encoding//pydevd_attach_to_process/winappdbg/__init__.py=utf-8
|
||||
28
ptvsd/pydevd/.settings/org.python.pydev.yaml
Normal file
28
ptvsd/pydevd/.settings/org.python.pydev.yaml
Normal file
|
|
@ -0,0 +1,28 @@
|
|||
ADD_NEW_LINE_AT_END_OF_FILE: true
|
||||
AUTOPEP8_PARAMETERS: ''
|
||||
BREAK_IMPORTS_MODE: ESCAPE
|
||||
DATE_FIELD_FORMAT: yyyy-MM-dd
|
||||
DATE_FIELD_NAME: __updated__
|
||||
DELETE_UNUSED_IMPORTS: false
|
||||
ENABLE_DATE_FIELD_ACTION: false
|
||||
FORMAT_BEFORE_SAVING: false
|
||||
FORMAT_ONLY_CHANGED_LINES: false
|
||||
FORMAT_WITH_AUTOPEP8: false
|
||||
FROM_IMPORTS_FIRST: false
|
||||
GROUP_IMPORTS: true
|
||||
MULTILINE_IMPORTS: true
|
||||
PEP8_IMPORTS: true
|
||||
PYDEV_TEST_RUNNER: '2'
|
||||
PYDEV_TEST_RUNNER_DEFAULT_PARAMETERS: "--capture=no\r\n-vv"
|
||||
PYDEV_USE_PYUNIT_VIEW: true
|
||||
SAVE_ACTIONS_ONLY_ON_WORKSPACE_FILES: true
|
||||
SORT_IMPORTS_ON_SAVE: false
|
||||
SORT_NAMES_GROUPED: false
|
||||
SPACES_BEFORE_COMMENT: '2'
|
||||
SPACES_IN_START_COMMENT: '1'
|
||||
TRIM_EMPTY_LINES: false
|
||||
TRIM_MULTILINE_LITERALS: false
|
||||
USE_ASSIGN_WITH_PACES_INSIDER_PARENTESIS: false
|
||||
USE_OPERATORS_WITH_SPACE: true
|
||||
USE_SPACE_AFTER_COMMA: true
|
||||
USE_SPACE_FOR_PARENTESIS: false
|
||||
75
ptvsd/pydevd/.travis.yml
Normal file
75
ptvsd/pydevd/.travis.yml
Normal file
|
|
@ -0,0 +1,75 @@
|
|||
language: python
|
||||
|
||||
matrix:
|
||||
include:
|
||||
# Python 2.6 (with and without cython)
|
||||
- python: 2.6
|
||||
env: PYDEVD_USE_CYTHON=YES
|
||||
env: PYDEVD_TEST_JYTHON=NO
|
||||
- python: 2.6
|
||||
env: PYDEVD_USE_CYTHON=NO
|
||||
env: PYDEVD_TEST_JYTHON=NO
|
||||
# Python 2.7 (with and without cython)
|
||||
- python: 2.7
|
||||
env: PYDEVD_USE_CYTHON=YES
|
||||
env: PYDEVD_TEST_JYTHON=NO
|
||||
- python: 2.7
|
||||
env: PYDEVD_USE_CYTHON=NO
|
||||
env: PYDEVD_TEST_JYTHON=NO
|
||||
# Python 3.5 (with and without cython)
|
||||
- python: 3.5
|
||||
env: PYDEVD_USE_CYTHON=YES
|
||||
env: PYDEVD_TEST_JYTHON=NO
|
||||
- python: 3.5
|
||||
env: PYDEVD_USE_CYTHON=NO
|
||||
env: PYDEVD_TEST_JYTHON=NO
|
||||
# Python 3.6 (with and without cython)
|
||||
- python: 3.6
|
||||
env: PYDEVD_USE_CYTHON=YES
|
||||
env: PYDEVD_TEST_JYTHON=NO
|
||||
- python: 3.6
|
||||
env: PYDEVD_USE_CYTHON=NO
|
||||
env: PYDEVD_TEST_JYTHON=NO
|
||||
# Jython
|
||||
- python: 2.7
|
||||
env: PYDEVD_USE_CYTHON=NO
|
||||
env: PYDEVD_TEST_JYTHON=YES
|
||||
env: JYTHON_URL=http://search.maven.org/remotecontent?filepath=org/python/jython-installer/2.7.0/jython-installer-2.7.0.jar
|
||||
|
||||
before_install:
|
||||
# CPython setup
|
||||
- if [ "$PYDEVD_TEST_JYTHON" == "NO" ]; then wget http://repo.continuum.io/miniconda/Miniconda-latest-Linux-x86_64.sh -O miniconda.sh; fi
|
||||
- if [ "$PYDEVD_TEST_JYTHON" == "NO" ]; then chmod +x miniconda.sh; fi
|
||||
- if [ "$PYDEVD_TEST_JYTHON" == "NO" ]; then ./miniconda.sh -b; fi
|
||||
- if [ "$PYDEVD_TEST_JYTHON" == "NO" ]; then export PATH=/home/travis/miniconda2/bin:$PATH; fi
|
||||
- if [ "$PYDEVD_TEST_JYTHON" == "NO" ]; then conda update --yes conda; fi
|
||||
# Jython setup
|
||||
- if [ "$PYDEVD_TEST_JYTHON" == "YES" ]; then wget $JYTHON_URL -O jython_installer.jar; java -jar jython_installer.jar -s -d $HOME/jython; export PATH=$HOME/jython:$HOME/jython/bin:$PATH; fi
|
||||
- if [ "$PYDEVD_TEST_JYTHON" == "YES" ]; then jython -c "print('')"; fi
|
||||
# The next couple lines fix a crash with multiprocessing on Travis and are not specific to using Miniconda
|
||||
- sudo rm -rf /dev/shm
|
||||
- sudo ln -s /run/shm /dev/shm
|
||||
# Fix issue with testGui
|
||||
- "export DISPLAY=:99.0"
|
||||
- "sh -e /etc/init.d/xvfb start"
|
||||
# Install packages
|
||||
install:
|
||||
# Both
|
||||
- export PYTHONPATH=.
|
||||
# Python setup
|
||||
- if [ "$PYDEVD_TEST_JYTHON" = "NO" ]; then conda create --yes -n build_env python=$TRAVIS_PYTHON_VERSION; fi
|
||||
- if [ "$PYDEVD_TEST_JYTHON" = "NO" ]; then source activate build_env; fi
|
||||
- if [ "$PYDEVD_TEST_JYTHON" = "NO" ]; then chmod +x ./.travis_install_python_deps.sh; fi
|
||||
- if [ "$PYDEVD_TEST_JYTHON" = "NO" ]; then ./.travis_install_python_deps.sh; fi
|
||||
- if [ "$PYDEVD_TEST_JYTHON" = "NO" ]; then python build_tools/build.py; fi
|
||||
# Jython setup
|
||||
- if [ "$PYDEVD_TEST_JYTHON" = "YES" ]; then chmod +x ./.travis_install_jython_deps.sh; fi
|
||||
- if [ "$PYDEVD_TEST_JYTHON" = "YES" ]; then ./.travis_install_jython_deps.sh; fi
|
||||
|
||||
# Run test
|
||||
# On local machine with jython: c:\bin\jython2.7.0\bin\jython.exe -Dpython.path=.;jython_test_deps/ant.jar;jython_test_deps/junit.jar -m pytest
|
||||
# On remove machine with python: c:\bin\python27\python.exe -m pytest
|
||||
script:
|
||||
- if [ "$PYDEVD_TEST_JYTHON" = "NO" ]; then python -m pytest; fi
|
||||
- if [ "$PYDEVD_TEST_JYTHON" = "YES" ]; then jython -Dpython.path=.:jython_test_deps/ant.jar:jython_test_deps/junit.jar -m pytest; fi
|
||||
|
||||
4
ptvsd/pydevd/.travis_install_jython_deps.sh
Normal file
4
ptvsd/pydevd/.travis_install_jython_deps.sh
Normal file
|
|
@ -0,0 +1,4 @@
|
|||
#!/bin/bash
|
||||
set -ev
|
||||
|
||||
pip install pytest
|
||||
21
ptvsd/pydevd/.travis_install_python_deps.sh
Normal file
21
ptvsd/pydevd/.travis_install_python_deps.sh
Normal file
|
|
@ -0,0 +1,21 @@
|
|||
#!/bin/bash
|
||||
set -ev
|
||||
|
||||
conda install --yes numpy ipython cython pytest psutil
|
||||
|
||||
if [ "$TRAVIS_PYTHON_VERSION" = "2.6" ]; then
|
||||
conda install --yes pyqt=4
|
||||
# Django 1.7 does not support Python 2.7
|
||||
fi
|
||||
if [ "$TRAVIS_PYTHON_VERSION" = "2.7" ]; then
|
||||
conda install --yes pyqt=4
|
||||
pip install "django>=1.7,<1.8"
|
||||
|
||||
fi
|
||||
if [ "$TRAVIS_PYTHON_VERSION" = "3.5" ]; then
|
||||
conda install --yes pyqt=5
|
||||
pip install "django>=1.7,<1.8"
|
||||
fi
|
||||
|
||||
pip install Pympler
|
||||
pip install pytest
|
||||
203
ptvsd/pydevd/LICENSE
Normal file
203
ptvsd/pydevd/LICENSE
Normal file
|
|
@ -0,0 +1,203 @@
|
|||
Eclipse Public License - v 1.0
|
||||
|
||||
THE ACCOMPANYING PROGRAM IS PROVIDED UNDER THE TERMS OF THIS ECLIPSE PUBLIC
|
||||
LICENSE ("AGREEMENT"). ANY USE, REPRODUCTION OR DISTRIBUTION OF THE PROGRAM
|
||||
CONSTITUTES RECIPIENT'S ACCEPTANCE OF THIS AGREEMENT.
|
||||
|
||||
1. DEFINITIONS
|
||||
|
||||
"Contribution" means:
|
||||
|
||||
a) in the case of the initial Contributor, the initial code and documentation
|
||||
distributed under this Agreement, and
|
||||
b) in the case of each subsequent Contributor:
|
||||
i) changes to the Program, and
|
||||
ii) additions to the Program;
|
||||
|
||||
where such changes and/or additions to the Program originate from and are
|
||||
distributed by that particular Contributor. A Contribution 'originates'
|
||||
from a Contributor if it was added to the Program by such Contributor
|
||||
itself or anyone acting on such Contributor's behalf. Contributions do not
|
||||
include additions to the Program which: (i) are separate modules of
|
||||
software distributed in conjunction with the Program under their own
|
||||
license agreement, and (ii) are not derivative works of the Program.
|
||||
|
||||
"Contributor" means any person or entity that distributes the Program.
|
||||
|
||||
"Licensed Patents" mean patent claims licensable by a Contributor which are
|
||||
necessarily infringed by the use or sale of its Contribution alone or when
|
||||
combined with the Program.
|
||||
|
||||
"Program" means the Contributions distributed in accordance with this
|
||||
Agreement.
|
||||
|
||||
"Recipient" means anyone who receives the Program under this Agreement,
|
||||
including all Contributors.
|
||||
|
||||
2. GRANT OF RIGHTS
|
||||
a) Subject to the terms of this Agreement, each Contributor hereby grants
|
||||
Recipient a non-exclusive, worldwide, royalty-free copyright license to
|
||||
reproduce, prepare derivative works of, publicly display, publicly
|
||||
perform, distribute and sublicense the Contribution of such Contributor,
|
||||
if any, and such derivative works, in source code and object code form.
|
||||
b) Subject to the terms of this Agreement, each Contributor hereby grants
|
||||
Recipient a non-exclusive, worldwide, royalty-free patent license under
|
||||
Licensed Patents to make, use, sell, offer to sell, import and otherwise
|
||||
transfer the Contribution of such Contributor, if any, in source code and
|
||||
object code form. This patent license shall apply to the combination of
|
||||
the Contribution and the Program if, at the time the Contribution is
|
||||
added by the Contributor, such addition of the Contribution causes such
|
||||
combination to be covered by the Licensed Patents. The patent license
|
||||
shall not apply to any other combinations which include the Contribution.
|
||||
No hardware per se is licensed hereunder.
|
||||
c) Recipient understands that although each Contributor grants the licenses
|
||||
to its Contributions set forth herein, no assurances are provided by any
|
||||
Contributor that the Program does not infringe the patent or other
|
||||
intellectual property rights of any other entity. Each Contributor
|
||||
disclaims any liability to Recipient for claims brought by any other
|
||||
entity based on infringement of intellectual property rights or
|
||||
otherwise. As a condition to exercising the rights and licenses granted
|
||||
hereunder, each Recipient hereby assumes sole responsibility to secure
|
||||
any other intellectual property rights needed, if any. For example, if a
|
||||
third party patent license is required to allow Recipient to distribute
|
||||
the Program, it is Recipient's responsibility to acquire that license
|
||||
before distributing the Program.
|
||||
d) Each Contributor represents that to its knowledge it has sufficient
|
||||
copyright rights in its Contribution, if any, to grant the copyright
|
||||
license set forth in this Agreement.
|
||||
|
||||
3. REQUIREMENTS
|
||||
|
||||
A Contributor may choose to distribute the Program in object code form under
|
||||
its own license agreement, provided that:
|
||||
|
||||
a) it complies with the terms and conditions of this Agreement; and
|
||||
b) its license agreement:
|
||||
i) effectively disclaims on behalf of all Contributors all warranties
|
||||
and conditions, express and implied, including warranties or
|
||||
conditions of title and non-infringement, and implied warranties or
|
||||
conditions of merchantability and fitness for a particular purpose;
|
||||
ii) effectively excludes on behalf of all Contributors all liability for
|
||||
damages, including direct, indirect, special, incidental and
|
||||
consequential damages, such as lost profits;
|
||||
iii) states that any provisions which differ from this Agreement are
|
||||
offered by that Contributor alone and not by any other party; and
|
||||
iv) states that source code for the Program is available from such
|
||||
Contributor, and informs licensees how to obtain it in a reasonable
|
||||
manner on or through a medium customarily used for software exchange.
|
||||
|
||||
When the Program is made available in source code form:
|
||||
|
||||
a) it must be made available under this Agreement; and
|
||||
b) a copy of this Agreement must be included with each copy of the Program.
|
||||
Contributors may not remove or alter any copyright notices contained
|
||||
within the Program.
|
||||
|
||||
Each Contributor must identify itself as the originator of its Contribution,
|
||||
if
|
||||
any, in a manner that reasonably allows subsequent Recipients to identify the
|
||||
originator of the Contribution.
|
||||
|
||||
4. COMMERCIAL DISTRIBUTION
|
||||
|
||||
Commercial distributors of software may accept certain responsibilities with
|
||||
respect to end users, business partners and the like. While this license is
|
||||
intended to facilitate the commercial use of the Program, the Contributor who
|
||||
includes the Program in a commercial product offering should do so in a manner
|
||||
which does not create potential liability for other Contributors. Therefore,
|
||||
if a Contributor includes the Program in a commercial product offering, such
|
||||
Contributor ("Commercial Contributor") hereby agrees to defend and indemnify
|
||||
every other Contributor ("Indemnified Contributor") against any losses,
|
||||
damages and costs (collectively "Losses") arising from claims, lawsuits and
|
||||
other legal actions brought by a third party against the Indemnified
|
||||
Contributor to the extent caused by the acts or omissions of such Commercial
|
||||
Contributor in connection with its distribution of the Program in a commercial
|
||||
product offering. The obligations in this section do not apply to any claims
|
||||
or Losses relating to any actual or alleged intellectual property
|
||||
infringement. In order to qualify, an Indemnified Contributor must:
|
||||
a) promptly notify the Commercial Contributor in writing of such claim, and
|
||||
b) allow the Commercial Contributor to control, and cooperate with the
|
||||
Commercial Contributor in, the defense and any related settlement
|
||||
negotiations. The Indemnified Contributor may participate in any such claim at
|
||||
its own expense.
|
||||
|
||||
For example, a Contributor might include the Program in a commercial product
|
||||
offering, Product X. That Contributor is then a Commercial Contributor. If
|
||||
that Commercial Contributor then makes performance claims, or offers
|
||||
warranties related to Product X, those performance claims and warranties are
|
||||
such Commercial Contributor's responsibility alone. Under this section, the
|
||||
Commercial Contributor would have to defend claims against the other
|
||||
Contributors related to those performance claims and warranties, and if a
|
||||
court requires any other Contributor to pay any damages as a result, the
|
||||
Commercial Contributor must pay those damages.
|
||||
|
||||
5. NO WARRANTY
|
||||
|
||||
EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, THE PROGRAM IS PROVIDED ON AN
|
||||
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, EITHER EXPRESS OR
|
||||
IMPLIED INCLUDING, WITHOUT LIMITATION, ANY WARRANTIES OR CONDITIONS OF TITLE,
|
||||
NON-INFRINGEMENT, MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. Each
|
||||
Recipient is solely responsible for determining the appropriateness of using
|
||||
and distributing the Program and assumes all risks associated with its
|
||||
exercise of rights under this Agreement , including but not limited to the
|
||||
risks and costs of program errors, compliance with applicable laws, damage to
|
||||
or loss of data, programs or equipment, and unavailability or interruption of
|
||||
operations.
|
||||
|
||||
6. DISCLAIMER OF LIABILITY
|
||||
|
||||
EXCEPT AS EXPRESSLY SET FORTH IN THIS AGREEMENT, NEITHER RECIPIENT NOR ANY
|
||||
CONTRIBUTORS SHALL HAVE ANY LIABILITY FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING WITHOUT LIMITATION
|
||||
LOST PROFITS), HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
||||
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
||||
ARISING IN ANY WAY OUT OF THE USE OR DISTRIBUTION OF THE PROGRAM OR THE
|
||||
EXERCISE OF ANY RIGHTS GRANTED HEREUNDER, EVEN IF ADVISED OF THE POSSIBILITY
|
||||
OF SUCH DAMAGES.
|
||||
|
||||
7. GENERAL
|
||||
|
||||
If any provision of this Agreement is invalid or unenforceable under
|
||||
applicable law, it shall not affect the validity or enforceability of the
|
||||
remainder of the terms of this Agreement, and without further action by the
|
||||
parties hereto, such provision shall be reformed to the minimum extent
|
||||
necessary to make such provision valid and enforceable.
|
||||
|
||||
If Recipient institutes patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Program itself
|
||||
(excluding combinations of the Program with other software or hardware)
|
||||
infringes such Recipient's patent(s), then such Recipient's rights granted
|
||||
under Section 2(b) shall terminate as of the date such litigation is filed.
|
||||
|
||||
All Recipient's rights under this Agreement shall terminate if it fails to
|
||||
comply with any of the material terms or conditions of this Agreement and does
|
||||
not cure such failure in a reasonable period of time after becoming aware of
|
||||
such noncompliance. If all Recipient's rights under this Agreement terminate,
|
||||
Recipient agrees to cease use and distribution of the Program as soon as
|
||||
reasonably practicable. However, Recipient's obligations under this Agreement
|
||||
and any licenses granted by Recipient relating to the Program shall continue
|
||||
and survive.
|
||||
|
||||
Everyone is permitted to copy and distribute copies of this Agreement, but in
|
||||
order to avoid inconsistency the Agreement is copyrighted and may only be
|
||||
modified in the following manner. The Agreement Steward reserves the right to
|
||||
publish new versions (including revisions) of this Agreement from time to
|
||||
time. No one other than the Agreement Steward has the right to modify this
|
||||
Agreement. The Eclipse Foundation is the initial Agreement Steward. The
|
||||
Eclipse Foundation may assign the responsibility to serve as the Agreement
|
||||
Steward to a suitable separate entity. Each new version of the Agreement will
|
||||
be given a distinguishing version number. The Program (including
|
||||
Contributions) may always be distributed subject to the version of the
|
||||
Agreement under which it was received. In addition, after a new version of the
|
||||
Agreement is published, Contributor may elect to distribute the Program
|
||||
(including its Contributions) under the new version. Except as expressly
|
||||
stated in Sections 2(a) and 2(b) above, Recipient receives no rights or
|
||||
licenses to the intellectual property of any Contributor under this Agreement,
|
||||
whether expressly, by implication, estoppel or otherwise. All rights in the
|
||||
Program not expressly granted under this Agreement are reserved.
|
||||
|
||||
This Agreement is governed by the laws of the State of New York and the
|
||||
intellectual property laws of the United States of America. No party to this
|
||||
Agreement will bring a legal action under this Agreement more than one year
|
||||
after the cause of action arose. Each party waives its rights to a jury trial in
|
||||
any resulting litigation.
|
||||
4
ptvsd/pydevd/MANIFEST.in
Normal file
4
ptvsd/pydevd/MANIFEST.in
Normal file
|
|
@ -0,0 +1,4 @@
|
|||
include *.rst *.txt *.md LICENSE .travis.yml appveyor.yml *.pyx
|
||||
recursive-include pydevd_attach_to_process *.py *.dll *.so *.dylib *.txt *.c *.h *.bat Makefile *.sh *.pyx
|
||||
recursive-include _pydevd_bundle *.pyx
|
||||
recursive-include build_tools *.py
|
||||
48
ptvsd/pydevd/README.rst
Normal file
48
ptvsd/pydevd/README.rst
Normal file
|
|
@ -0,0 +1,48 @@
|
|||
PyDev.Debugger
|
||||
==============
|
||||
|
||||
The sources for the PyDev.Debugger (used in PyDev & PyCharm) may be seen at:
|
||||
|
||||
https://github.com/fabioz/PyDev.Debugger
|
||||
|
||||
In general, the debugger backend should **NOT** be installed separately if you're using an IDE which already
|
||||
bundles it (such as PyDev or PyCharm).
|
||||
|
||||
It is however available in PyPi so that it can be installed for doing remote debugging with `pip` -- so, when
|
||||
debugging a process which runs in another machine, it's possible to `pip install pydevd` and in the code use
|
||||
`pydevd.settrace(host='10.1.1.1')` to connect the debugger backend to the debugger UI running in the IDE
|
||||
(whereas previously the sources had to be manually copied from the IDE installation).
|
||||
|
||||
It should be compatible with Python 2.6 onwards (as well as Jython 2.7, IronPython and PyPy -- and
|
||||
any other variant which properly supports the Python structure for debuggers -- i.e.: sys.settrace/threading.settrace).
|
||||
|
||||
Recent versions contain speedup modules using Cython, which are generated with a few changes in the regular files
|
||||
to `cythonize` the files. To update and compile the cython sources (and generate some other auto-generated files),
|
||||
`build_tools/build.py` should be run -- note that the resulting .pyx and .c files should be commited.
|
||||
|
||||
To see performance changes, see:
|
||||
|
||||
https://www.speedtin.com/reports/7_pydevd_cython (performance results with cython).
|
||||
https://www.speedtin.com/reports/8_pydevd_pure_python (performance results without cython).
|
||||
|
||||
To generate a distribution with the precompiled binaries for the IDE, `build_binaries_windows.py` should be run (
|
||||
note that the environments must be pre-created as specified in that file).
|
||||
|
||||
To generate a distribution to upload to PyPi, `python setup.py sdist bdist_wheel` should be run for each python version
|
||||
which should have a wheel and afterwards `twine upload -s dist/pydevd-*` shoud be run to actually upload the contents
|
||||
to PyPi.
|
||||
|
||||
Travis (Linux CI):
|
||||
|
||||
.. |travis| image:: https://travis-ci.org/fabioz/PyDev.Debugger.png
|
||||
:target: https://travis-ci.org/fabioz/PyDev.Debugger
|
||||
|
||||
|travis|
|
||||
|
||||
Appveyor (Windows CI):
|
||||
|
||||
.. |appveyor| image:: https://ci.appveyor.com/api/projects/status/j6vjq687brbk20ux?svg=true
|
||||
:target: https://ci.appveyor.com/project/fabioz/pydev-debugger
|
||||
|
||||
|appveyor|
|
||||
|
||||
0
ptvsd/pydevd/_pydev_bundle/__init__.py
Normal file
0
ptvsd/pydevd/_pydev_bundle/__init__.py
Normal file
158
ptvsd/pydevd/_pydev_bundle/_pydev_calltip_util.py
Normal file
158
ptvsd/pydevd/_pydev_bundle/_pydev_calltip_util.py
Normal file
|
|
@ -0,0 +1,158 @@
|
|||
'''
|
||||
License: Apache 2.0
|
||||
Author: Yuli Fitterman
|
||||
'''
|
||||
# noinspection PyBroadException
|
||||
import types
|
||||
|
||||
from _pydevd_bundle.pydevd_constants import IS_JYTHON, IS_PY3K
|
||||
|
||||
try:
|
||||
import inspect
|
||||
except:
|
||||
try:
|
||||
from _pydev_imps import _pydev_inspect as inspect
|
||||
except:
|
||||
import traceback;
|
||||
|
||||
traceback.print_exc() # Ok, no inspect available (search will not work)from _pydevd_bundle.pydevd_constants import IS_JYTHON, IS_PY3K
|
||||
|
||||
from _pydev_bundle._pydev_imports_tipper import signature_from_docstring
|
||||
|
||||
|
||||
def is_bound_method(obj):
|
||||
if isinstance(obj, types.MethodType):
|
||||
return getattr(obj, '__self__', getattr(obj, 'im_self', None)) is not None
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
def get_class_name(instance):
|
||||
return getattr(getattr(instance, "__class__", None), "__name__", None)
|
||||
|
||||
|
||||
def get_bound_class_name(obj):
|
||||
my_self = getattr(obj, '__self__', getattr(obj, 'im_self', None))
|
||||
if my_self is None:
|
||||
return None
|
||||
return get_class_name(my_self)
|
||||
|
||||
|
||||
def get_description(obj):
|
||||
try:
|
||||
ob_call = obj.__call__
|
||||
except:
|
||||
ob_call = None
|
||||
|
||||
if isinstance(obj, type) or type(obj).__name__ == 'classobj':
|
||||
fob = getattr(obj, '__init__', lambda: None)
|
||||
if not isinstance(fob, (types.FunctionType, types.MethodType)):
|
||||
fob = obj
|
||||
elif is_bound_method(ob_call):
|
||||
fob = ob_call
|
||||
else:
|
||||
fob = obj
|
||||
|
||||
argspec = ""
|
||||
fn_name = None
|
||||
fn_class = None
|
||||
if isinstance(fob, (types.FunctionType, types.MethodType)):
|
||||
spec_info = inspect.getfullargspec(fob) if IS_PY3K else inspect.getargspec(fob)
|
||||
argspec = inspect.formatargspec(*spec_info)
|
||||
fn_name = getattr(fob, '__name__', None)
|
||||
if isinstance(obj, type) or type(obj).__name__ == 'classobj':
|
||||
fn_name = "__init__"
|
||||
fn_class = getattr(obj, "__name__", "UnknownClass")
|
||||
elif is_bound_method(obj) or is_bound_method(ob_call):
|
||||
fn_class = get_bound_class_name(obj) or "UnknownClass"
|
||||
|
||||
else:
|
||||
fn_name = getattr(fob, '__name__', None)
|
||||
fn_self = getattr(fob, '__self__', None)
|
||||
if fn_self is not None and not isinstance(fn_self, types.ModuleType):
|
||||
fn_class = get_class_name(fn_self)
|
||||
|
||||
doc_string = get_docstring(ob_call) if is_bound_method(ob_call) else get_docstring(obj)
|
||||
return create_method_stub(fn_name, fn_class, argspec, doc_string)
|
||||
|
||||
|
||||
def create_method_stub(fn_name, fn_class, argspec, doc_string):
|
||||
if fn_name and argspec:
|
||||
doc_string = "" if doc_string is None else doc_string
|
||||
fn_stub = create_function_stub(fn_name, argspec, doc_string, indent=1 if fn_class else 0)
|
||||
if fn_class:
|
||||
expr = fn_class if fn_name == '__init__' else fn_class + '().' + fn_name
|
||||
return create_class_stub(fn_class, fn_stub) + "\n" + expr
|
||||
else:
|
||||
expr = fn_name
|
||||
return fn_stub + "\n" + expr
|
||||
elif doc_string:
|
||||
if fn_name:
|
||||
restored_signature, _ = signature_from_docstring(doc_string, fn_name)
|
||||
if restored_signature:
|
||||
return create_method_stub(fn_name, fn_class, restored_signature, doc_string)
|
||||
return create_function_stub('unknown', '(*args, **kwargs)', doc_string) + '\nunknown'
|
||||
|
||||
else:
|
||||
return ''
|
||||
|
||||
|
||||
def get_docstring(obj):
|
||||
if obj is not None:
|
||||
try:
|
||||
if IS_JYTHON:
|
||||
# Jython
|
||||
doc = obj.__doc__
|
||||
if doc is not None:
|
||||
return doc
|
||||
|
||||
from _pydev_bundle import _pydev_jy_imports_tipper
|
||||
|
||||
is_method, infos = _pydev_jy_imports_tipper.ismethod(obj)
|
||||
ret = ''
|
||||
if is_method:
|
||||
for info in infos:
|
||||
ret += info.get_as_doc()
|
||||
return ret
|
||||
|
||||
else:
|
||||
|
||||
doc = inspect.getdoc(obj)
|
||||
if doc is not None:
|
||||
return doc
|
||||
except:
|
||||
pass
|
||||
else:
|
||||
return ''
|
||||
try:
|
||||
# if no attempt succeeded, try to return repr()...
|
||||
return repr(obj)
|
||||
except:
|
||||
try:
|
||||
# otherwise the class
|
||||
return str(obj.__class__)
|
||||
except:
|
||||
# if all fails, go to an empty string
|
||||
return ''
|
||||
|
||||
|
||||
def create_class_stub(class_name, contents):
|
||||
return "class %s(object):\n%s" % (class_name, contents)
|
||||
|
||||
|
||||
def create_function_stub(fn_name, fn_argspec, fn_docstring, indent=0):
|
||||
def shift_right(string, prefix):
|
||||
return ''.join(prefix + line for line in string.splitlines(True))
|
||||
|
||||
fn_docstring = shift_right(inspect.cleandoc(fn_docstring), " " * (indent + 1))
|
||||
ret = '''
|
||||
def %s%s:
|
||||
"""%s"""
|
||||
pass
|
||||
''' % (fn_name, fn_argspec, fn_docstring)
|
||||
ret = ret[1:] # remove first /n
|
||||
ret = ret.replace('\t', " ")
|
||||
if indent:
|
||||
prefix = " " * indent
|
||||
ret = shift_right(ret, prefix)
|
||||
return ret
|
||||
191
ptvsd/pydevd/_pydev_bundle/_pydev_completer.py
Normal file
191
ptvsd/pydevd/_pydev_bundle/_pydev_completer.py
Normal file
|
|
@ -0,0 +1,191 @@
|
|||
import pydevconsole
|
||||
import sys
|
||||
|
||||
if sys.version_info[0] >= 3:
|
||||
import builtins as __builtin__ # Py3
|
||||
else:
|
||||
import __builtin__
|
||||
|
||||
try:
|
||||
import java.lang #@UnusedImport
|
||||
from _pydev_bundle import _pydev_jy_imports_tipper
|
||||
_pydev_imports_tipper = _pydev_jy_imports_tipper
|
||||
except ImportError:
|
||||
IS_JYTHON = False
|
||||
from _pydev_bundle import _pydev_imports_tipper
|
||||
|
||||
from _pydevd_bundle import pydevd_xml
|
||||
dir2 = _pydev_imports_tipper.generate_imports_tip_for_module
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# _StartsWithFilter
|
||||
#=======================================================================================================================
|
||||
class _StartsWithFilter:
|
||||
'''
|
||||
Used because we can't create a lambda that'll use an outer scope in jython 2.1
|
||||
'''
|
||||
|
||||
|
||||
def __init__(self, start_with):
|
||||
self.start_with = start_with.lower()
|
||||
|
||||
def __call__(self, name):
|
||||
return name.lower().startswith(self.start_with)
|
||||
|
||||
#=======================================================================================================================
|
||||
# Completer
|
||||
#
|
||||
# This class was gotten from IPython.completer (dir2 was replaced with the completer already in pydev)
|
||||
#=======================================================================================================================
|
||||
class Completer:
|
||||
|
||||
def __init__(self, namespace=None, global_namespace=None):
|
||||
"""Create a new completer for the command line.
|
||||
|
||||
Completer([namespace,global_namespace]) -> completer instance.
|
||||
|
||||
If unspecified, the default namespace where completions are performed
|
||||
is __main__ (technically, __main__.__dict__). Namespaces should be
|
||||
given as dictionaries.
|
||||
|
||||
An optional second namespace can be given. This allows the completer
|
||||
to handle cases where both the local and global scopes need to be
|
||||
distinguished.
|
||||
|
||||
Completer instances should be used as the completion mechanism of
|
||||
readline via the set_completer() call:
|
||||
|
||||
readline.set_completer(Completer(my_namespace).complete)
|
||||
"""
|
||||
|
||||
# Don't bind to namespace quite yet, but flag whether the user wants a
|
||||
# specific namespace or to use __main__.__dict__. This will allow us
|
||||
# to bind to __main__.__dict__ at completion time, not now.
|
||||
if namespace is None:
|
||||
self.use_main_ns = 1
|
||||
else:
|
||||
self.use_main_ns = 0
|
||||
self.namespace = namespace
|
||||
|
||||
# The global namespace, if given, can be bound directly
|
||||
if global_namespace is None:
|
||||
self.global_namespace = {}
|
||||
else:
|
||||
self.global_namespace = global_namespace
|
||||
|
||||
def complete(self, text):
|
||||
"""Return the next possible completion for 'text'.
|
||||
|
||||
This is called successively with state == 0, 1, 2, ... until it
|
||||
returns None. The completion should begin with 'text'.
|
||||
|
||||
"""
|
||||
if self.use_main_ns:
|
||||
#In pydev this option should never be used
|
||||
raise RuntimeError('Namespace must be provided!')
|
||||
self.namespace = __main__.__dict__ #@UndefinedVariable
|
||||
|
||||
if "." in text:
|
||||
return self.attr_matches(text)
|
||||
else:
|
||||
return self.global_matches(text)
|
||||
|
||||
def global_matches(self, text):
|
||||
"""Compute matches when text is a simple name.
|
||||
|
||||
Return a list of all keywords, built-in functions and names currently
|
||||
defined in self.namespace or self.global_namespace that match.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
def get_item(obj, attr):
|
||||
return obj[attr]
|
||||
|
||||
a = {}
|
||||
|
||||
for dict_with_comps in [__builtin__.__dict__, self.namespace, self.global_namespace]: #@UndefinedVariable
|
||||
a.update(dict_with_comps)
|
||||
|
||||
filter = _StartsWithFilter(text)
|
||||
|
||||
return dir2(a, a.keys(), get_item, filter)
|
||||
|
||||
def attr_matches(self, text):
|
||||
"""Compute matches when text contains a dot.
|
||||
|
||||
Assuming the text is of the form NAME.NAME....[NAME], and is
|
||||
evaluatable in self.namespace or self.global_namespace, it will be
|
||||
evaluated and its attributes (as revealed by dir()) are used as
|
||||
possible completions. (For class instances, class members are are
|
||||
also considered.)
|
||||
|
||||
WARNING: this can still invoke arbitrary C code, if an object
|
||||
with a __getattr__ hook is evaluated.
|
||||
|
||||
"""
|
||||
import re
|
||||
|
||||
# Another option, seems to work great. Catches things like ''.<tab>
|
||||
m = re.match(r"(\S+(\.\w+)*)\.(\w*)$", text) #@UndefinedVariable
|
||||
|
||||
if not m:
|
||||
return []
|
||||
|
||||
expr, attr = m.group(1, 3)
|
||||
try:
|
||||
obj = eval(expr, self.namespace)
|
||||
except:
|
||||
try:
|
||||
obj = eval(expr, self.global_namespace)
|
||||
except:
|
||||
return []
|
||||
|
||||
filter = _StartsWithFilter(attr)
|
||||
|
||||
words = dir2(obj, filter=filter)
|
||||
|
||||
return words
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# generate_completions_as_xml
|
||||
#=======================================================================================================================
|
||||
def generate_completions_as_xml(frame, act_tok):
|
||||
if frame is None:
|
||||
return '<xml></xml>'
|
||||
|
||||
#Not using frame.f_globals because of https://sourceforge.net/tracker2/?func=detail&aid=2541355&group_id=85796&atid=577329
|
||||
#(Names not resolved in generator expression in method)
|
||||
#See message: http://mail.python.org/pipermail/python-list/2009-January/526522.html
|
||||
updated_globals = {}
|
||||
updated_globals.update(frame.f_globals)
|
||||
updated_globals.update(frame.f_locals) #locals later because it has precedence over the actual globals
|
||||
|
||||
if pydevconsole.IPYTHON:
|
||||
completions = pydevconsole.get_completions(act_tok, act_tok, updated_globals, frame.f_locals)
|
||||
else:
|
||||
completer = Completer(updated_globals, None)
|
||||
#list(tuple(name, descr, parameters, type))
|
||||
completions = completer.complete(act_tok)
|
||||
|
||||
valid_xml = pydevd_xml.make_valid_xml_value
|
||||
quote = pydevd_xml.quote
|
||||
|
||||
msg = ["<xml>"]
|
||||
|
||||
for comp in completions:
|
||||
msg.append('<comp p0="')
|
||||
msg.append(valid_xml(quote(comp[0], '/>_= \t')))
|
||||
msg.append('" p1="')
|
||||
msg.append(valid_xml(quote(comp[1], '/>_= \t')))
|
||||
msg.append('" p2="')
|
||||
msg.append(valid_xml(quote(comp[2], '/>_= \t')))
|
||||
msg.append('" p3="')
|
||||
msg.append(valid_xml(quote(comp[3], '/>_= \t')))
|
||||
msg.append('"/>')
|
||||
msg.append("</xml>")
|
||||
|
||||
return ''.join(msg)
|
||||
|
||||
41
ptvsd/pydevd/_pydev_bundle/_pydev_filesystem_encoding.py
Normal file
41
ptvsd/pydevd/_pydev_bundle/_pydev_filesystem_encoding.py
Normal file
|
|
@ -0,0 +1,41 @@
|
|||
import sys
|
||||
|
||||
|
||||
def __getfilesystemencoding():
|
||||
'''
|
||||
Note: there's a copy of this method in interpreterInfo.py
|
||||
'''
|
||||
try:
|
||||
ret = sys.getfilesystemencoding()
|
||||
if not ret:
|
||||
raise RuntimeError('Unable to get encoding.')
|
||||
return ret
|
||||
except:
|
||||
try:
|
||||
#Handle Jython
|
||||
from java.lang import System # @UnresolvedImport
|
||||
env = System.getProperty("os.name").lower()
|
||||
if env.find('win') != -1:
|
||||
return 'ISO-8859-1' #mbcs does not work on Jython, so, use a (hopefully) suitable replacement
|
||||
return 'utf-8'
|
||||
except:
|
||||
pass
|
||||
|
||||
#Only available from 2.3 onwards.
|
||||
if sys.platform == 'win32':
|
||||
return 'mbcs'
|
||||
return 'utf-8'
|
||||
|
||||
def getfilesystemencoding():
|
||||
try:
|
||||
ret = __getfilesystemencoding()
|
||||
|
||||
#Check if the encoding is actually there to be used!
|
||||
if hasattr('', 'encode'):
|
||||
''.encode(ret)
|
||||
if hasattr('', 'decode'):
|
||||
''.decode(ret)
|
||||
|
||||
return ret
|
||||
except:
|
||||
return 'utf-8'
|
||||
130
ptvsd/pydevd/_pydev_bundle/_pydev_getopt.py
Normal file
130
ptvsd/pydevd/_pydev_bundle/_pydev_getopt.py
Normal file
|
|
@ -0,0 +1,130 @@
|
|||
|
||||
#=======================================================================================================================
|
||||
# getopt code copied since gnu_getopt is not available on jython 2.1
|
||||
#=======================================================================================================================
|
||||
class GetoptError(Exception):
|
||||
opt = ''
|
||||
msg = ''
|
||||
def __init__(self, msg, opt=''):
|
||||
self.msg = msg
|
||||
self.opt = opt
|
||||
Exception.__init__(self, msg, opt)
|
||||
|
||||
def __str__(self):
|
||||
return self.msg
|
||||
|
||||
|
||||
def gnu_getopt(args, shortopts, longopts=[]):
|
||||
"""getopt(args, options[, long_options]) -> opts, args
|
||||
|
||||
This function works like getopt(), except that GNU style scanning
|
||||
mode is used by default. This means that option and non-option
|
||||
arguments may be intermixed. The getopt() function stops
|
||||
processing options as soon as a non-option argument is
|
||||
encountered.
|
||||
|
||||
If the first character of the option string is `+', or if the
|
||||
environment variable POSIXLY_CORRECT is set, then option
|
||||
processing stops as soon as a non-option argument is encountered.
|
||||
"""
|
||||
|
||||
opts = []
|
||||
prog_args = []
|
||||
if type('') == type(longopts):
|
||||
longopts = [longopts]
|
||||
else:
|
||||
longopts = list(longopts)
|
||||
|
||||
# Allow options after non-option arguments?
|
||||
all_options_first = False
|
||||
if shortopts.startswith('+'):
|
||||
shortopts = shortopts[1:]
|
||||
all_options_first = True
|
||||
|
||||
while args:
|
||||
if args[0] == '--':
|
||||
prog_args += args[1:]
|
||||
break
|
||||
|
||||
if args[0][:2] == '--':
|
||||
opts, args = do_longs(opts, args[0][2:], longopts, args[1:])
|
||||
elif args[0][:1] == '-':
|
||||
opts, args = do_shorts(opts, args[0][1:], shortopts, args[1:])
|
||||
else:
|
||||
if all_options_first:
|
||||
prog_args += args
|
||||
break
|
||||
else:
|
||||
prog_args.append(args[0])
|
||||
args = args[1:]
|
||||
|
||||
return opts, prog_args
|
||||
|
||||
def do_longs(opts, opt, longopts, args):
|
||||
try:
|
||||
i = opt.index('=')
|
||||
except ValueError:
|
||||
optarg = None
|
||||
else:
|
||||
opt, optarg = opt[:i], opt[i + 1:]
|
||||
|
||||
has_arg, opt = long_has_args(opt, longopts)
|
||||
if has_arg:
|
||||
if optarg is None:
|
||||
if not args:
|
||||
raise GetoptError('option --%s requires argument' % opt, opt)
|
||||
optarg, args = args[0], args[1:]
|
||||
elif optarg:
|
||||
raise GetoptError('option --%s must not have an argument' % opt, opt)
|
||||
opts.append(('--' + opt, optarg or ''))
|
||||
return opts, args
|
||||
|
||||
# Return:
|
||||
# has_arg?
|
||||
# full option name
|
||||
def long_has_args(opt, longopts):
|
||||
possibilities = [o for o in longopts if o.startswith(opt)]
|
||||
if not possibilities:
|
||||
raise GetoptError('option --%s not recognized' % opt, opt)
|
||||
# Is there an exact match?
|
||||
if opt in possibilities:
|
||||
return False, opt
|
||||
elif opt + '=' in possibilities:
|
||||
return True, opt
|
||||
# No exact match, so better be unique.
|
||||
if len(possibilities) > 1:
|
||||
# XXX since possibilities contains all valid continuations, might be
|
||||
# nice to work them into the error msg
|
||||
raise GetoptError('option --%s not a unique prefix' % opt, opt)
|
||||
assert len(possibilities) == 1
|
||||
unique_match = possibilities[0]
|
||||
has_arg = unique_match.endswith('=')
|
||||
if has_arg:
|
||||
unique_match = unique_match[:-1]
|
||||
return has_arg, unique_match
|
||||
|
||||
def do_shorts(opts, optstring, shortopts, args):
|
||||
while optstring != '':
|
||||
opt, optstring = optstring[0], optstring[1:]
|
||||
if short_has_arg(opt, shortopts):
|
||||
if optstring == '':
|
||||
if not args:
|
||||
raise GetoptError('option -%s requires argument' % opt,
|
||||
opt)
|
||||
optstring, args = args[0], args[1:]
|
||||
optarg, optstring = optstring, ''
|
||||
else:
|
||||
optarg = ''
|
||||
opts.append(('-' + opt, optarg))
|
||||
return opts, args
|
||||
|
||||
def short_has_arg(opt, shortopts):
|
||||
for i in range(len(shortopts)):
|
||||
if opt == shortopts[i] != ':':
|
||||
return shortopts.startswith(':', i + 1)
|
||||
raise GetoptError('option -%s not recognized' % opt, opt)
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# End getopt code
|
||||
#=======================================================================================================================
|
||||
350
ptvsd/pydevd/_pydev_bundle/_pydev_imports_tipper.py
Normal file
350
ptvsd/pydevd/_pydev_bundle/_pydev_imports_tipper.py
Normal file
|
|
@ -0,0 +1,350 @@
|
|||
import inspect
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
from _pydev_bundle._pydev_tipper_common import do_find
|
||||
|
||||
try:
|
||||
xrange
|
||||
except:
|
||||
xrange = range
|
||||
|
||||
#completion types.
|
||||
TYPE_IMPORT = '0'
|
||||
TYPE_CLASS = '1'
|
||||
TYPE_FUNCTION = '2'
|
||||
TYPE_ATTR = '3'
|
||||
TYPE_BUILTIN = '4'
|
||||
TYPE_PARAM = '5'
|
||||
|
||||
def _imp(name, log=None):
|
||||
try:
|
||||
return __import__(name)
|
||||
except:
|
||||
if '.' in name:
|
||||
sub = name[0:name.rfind('.')]
|
||||
|
||||
if log is not None:
|
||||
log.add_content('Unable to import', name, 'trying with', sub)
|
||||
log.add_exception()
|
||||
|
||||
return _imp(sub, log)
|
||||
else:
|
||||
s = 'Unable to import module: %s - sys.path: %s' % (str(name), sys.path)
|
||||
if log is not None:
|
||||
log.add_content(s)
|
||||
log.add_exception()
|
||||
|
||||
raise ImportError(s)
|
||||
|
||||
|
||||
IS_IPY = False
|
||||
if sys.platform == 'cli':
|
||||
IS_IPY = True
|
||||
_old_imp = _imp
|
||||
def _imp(name, log=None):
|
||||
#We must add a reference in clr for .Net
|
||||
import clr #@UnresolvedImport
|
||||
initial_name = name
|
||||
while '.' in name:
|
||||
try:
|
||||
clr.AddReference(name)
|
||||
break #If it worked, that's OK.
|
||||
except:
|
||||
name = name[0:name.rfind('.')]
|
||||
else:
|
||||
try:
|
||||
clr.AddReference(name)
|
||||
except:
|
||||
pass #That's OK (not dot net module).
|
||||
|
||||
return _old_imp(initial_name, log)
|
||||
|
||||
|
||||
|
||||
def get_file(mod):
|
||||
f = None
|
||||
try:
|
||||
f = inspect.getsourcefile(mod) or inspect.getfile(mod)
|
||||
except:
|
||||
if hasattr(mod, '__file__'):
|
||||
f = mod.__file__
|
||||
if f.lower(f[-4:]) in ['.pyc', '.pyo']:
|
||||
filename = f[:-4] + '.py'
|
||||
if os.path.exists(filename):
|
||||
f = filename
|
||||
|
||||
return f
|
||||
|
||||
def Find(name, log=None):
|
||||
f = None
|
||||
|
||||
mod = _imp(name, log)
|
||||
parent = mod
|
||||
foundAs = ''
|
||||
|
||||
if inspect.ismodule(mod):
|
||||
f = get_file(mod)
|
||||
|
||||
components = name.split('.')
|
||||
|
||||
old_comp = None
|
||||
for comp in components[1:]:
|
||||
try:
|
||||
#this happens in the following case:
|
||||
#we have mx.DateTime.mxDateTime.mxDateTime.pyd
|
||||
#but after importing it, mx.DateTime.mxDateTime shadows access to mxDateTime.pyd
|
||||
mod = getattr(mod, comp)
|
||||
except AttributeError:
|
||||
if old_comp != comp:
|
||||
raise
|
||||
|
||||
if inspect.ismodule(mod):
|
||||
f = get_file(mod)
|
||||
else:
|
||||
if len(foundAs) > 0:
|
||||
foundAs = foundAs + '.'
|
||||
foundAs = foundAs + comp
|
||||
|
||||
old_comp = comp
|
||||
|
||||
return f, mod, parent, foundAs
|
||||
|
||||
def search_definition(data):
|
||||
'''@return file, line, col
|
||||
'''
|
||||
|
||||
data = data.replace('\n', '')
|
||||
if data.endswith('.'):
|
||||
data = data.rstrip('.')
|
||||
f, mod, parent, foundAs = Find(data)
|
||||
try:
|
||||
return do_find(f, mod), foundAs
|
||||
except:
|
||||
return do_find(f, parent), foundAs
|
||||
|
||||
|
||||
def generate_tip(data, log=None):
|
||||
data = data.replace('\n', '')
|
||||
if data.endswith('.'):
|
||||
data = data.rstrip('.')
|
||||
|
||||
f, mod, parent, foundAs = Find(data, log)
|
||||
#print_ >> open('temp.txt', 'w'), f
|
||||
tips = generate_imports_tip_for_module(mod)
|
||||
return f, tips
|
||||
|
||||
|
||||
def check_char(c):
|
||||
if c == '-' or c == '.':
|
||||
return '_'
|
||||
return c
|
||||
|
||||
def generate_imports_tip_for_module(obj_to_complete, dir_comps=None, getattr=getattr, filter=lambda name:True):
|
||||
'''
|
||||
@param obj_to_complete: the object from where we should get the completions
|
||||
@param dir_comps: if passed, we should not 'dir' the object and should just iterate those passed as a parameter
|
||||
@param getattr: the way to get a given object from the obj_to_complete (used for the completer)
|
||||
@param filter: a callable that receives the name and decides if it should be appended or not to the results
|
||||
@return: list of tuples, so that each tuple represents a completion with:
|
||||
name, doc, args, type (from the TYPE_* constants)
|
||||
'''
|
||||
ret = []
|
||||
|
||||
if dir_comps is None:
|
||||
dir_comps = dir(obj_to_complete)
|
||||
if hasattr(obj_to_complete, '__dict__'):
|
||||
dir_comps.append('__dict__')
|
||||
if hasattr(obj_to_complete, '__class__'):
|
||||
dir_comps.append('__class__')
|
||||
|
||||
get_complete_info = True
|
||||
|
||||
if len(dir_comps) > 1000:
|
||||
#ok, we don't want to let our users wait forever...
|
||||
#no complete info for you...
|
||||
|
||||
get_complete_info = False
|
||||
|
||||
dontGetDocsOn = (float, int, str, tuple, list)
|
||||
for d in dir_comps:
|
||||
|
||||
if d is None:
|
||||
continue
|
||||
|
||||
if not filter(d):
|
||||
continue
|
||||
|
||||
args = ''
|
||||
|
||||
try:
|
||||
try:
|
||||
obj = getattr(obj_to_complete.__class__, d)
|
||||
except:
|
||||
obj = getattr(obj_to_complete, d)
|
||||
except: #just ignore and get it without additional info
|
||||
ret.append((d, '', args, TYPE_BUILTIN))
|
||||
else:
|
||||
|
||||
if get_complete_info:
|
||||
try:
|
||||
retType = TYPE_BUILTIN
|
||||
|
||||
#check if we have to get docs
|
||||
getDoc = True
|
||||
for class_ in dontGetDocsOn:
|
||||
|
||||
if isinstance(obj, class_):
|
||||
getDoc = False
|
||||
break
|
||||
|
||||
doc = ''
|
||||
if getDoc:
|
||||
#no need to get this info... too many constants are defined and
|
||||
#makes things much slower (passing all that through sockets takes quite some time)
|
||||
try:
|
||||
doc = inspect.getdoc(obj)
|
||||
if doc is None:
|
||||
doc = ''
|
||||
except: #may happen on jython when checking java classes (so, just ignore it)
|
||||
doc = ''
|
||||
|
||||
|
||||
if inspect.ismethod(obj) or inspect.isbuiltin(obj) or inspect.isfunction(obj) or inspect.isroutine(obj):
|
||||
try:
|
||||
args, vargs, kwargs, defaults = inspect.getargspec(obj)
|
||||
|
||||
r = ''
|
||||
for a in (args):
|
||||
if len(r) > 0:
|
||||
r = r + ', '
|
||||
r = r + str(a)
|
||||
args = '(%s)' % (r)
|
||||
except TypeError:
|
||||
#ok, let's see if we can get the arguments from the doc
|
||||
args, doc = signature_from_docstring(doc, getattr(obj, '__name__', None))
|
||||
|
||||
retType = TYPE_FUNCTION
|
||||
|
||||
elif inspect.isclass(obj):
|
||||
retType = TYPE_CLASS
|
||||
|
||||
elif inspect.ismodule(obj):
|
||||
retType = TYPE_IMPORT
|
||||
|
||||
else:
|
||||
retType = TYPE_ATTR
|
||||
|
||||
|
||||
#add token and doc to return - assure only strings.
|
||||
ret.append((d, doc, args, retType))
|
||||
|
||||
except: #just ignore and get it without aditional info
|
||||
ret.append((d, '', args, TYPE_BUILTIN))
|
||||
|
||||
else: #get_complete_info == False
|
||||
if inspect.ismethod(obj) or inspect.isbuiltin(obj) or inspect.isfunction(obj) or inspect.isroutine(obj):
|
||||
retType = TYPE_FUNCTION
|
||||
|
||||
elif inspect.isclass(obj):
|
||||
retType = TYPE_CLASS
|
||||
|
||||
elif inspect.ismodule(obj):
|
||||
retType = TYPE_IMPORT
|
||||
|
||||
else:
|
||||
retType = TYPE_ATTR
|
||||
#ok, no complete info, let's try to do this as fast and clean as possible
|
||||
#so, no docs for this kind of information, only the signatures
|
||||
ret.append((d, '', str(args), retType))
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def signature_from_docstring(doc, obj_name):
|
||||
args = '()'
|
||||
try:
|
||||
found = False
|
||||
if len(doc) > 0:
|
||||
if IS_IPY:
|
||||
# Handle case where we have the situation below
|
||||
# sort(self, object cmp, object key)
|
||||
# sort(self, object cmp, object key, bool reverse)
|
||||
# sort(self)
|
||||
# sort(self, object cmp)
|
||||
|
||||
# Or: sort(self: list, cmp: object, key: object)
|
||||
# sort(self: list, cmp: object, key: object, reverse: bool)
|
||||
# sort(self: list)
|
||||
# sort(self: list, cmp: object)
|
||||
if obj_name:
|
||||
name = obj_name + '('
|
||||
|
||||
# Fix issue where it was appearing sort(aa)sort(bb)sort(cc) in the same line.
|
||||
lines = doc.splitlines()
|
||||
if len(lines) == 1:
|
||||
c = doc.count(name)
|
||||
if c > 1:
|
||||
doc = ('\n' + name).join(doc.split(name))
|
||||
|
||||
major = ''
|
||||
for line in doc.splitlines():
|
||||
if line.startswith(name) and line.endswith(')'):
|
||||
if len(line) > len(major):
|
||||
major = line
|
||||
if major:
|
||||
args = major[major.index('('):]
|
||||
found = True
|
||||
|
||||
if not found:
|
||||
i = doc.find('->')
|
||||
if i < 0:
|
||||
i = doc.find('--')
|
||||
if i < 0:
|
||||
i = doc.find('\n')
|
||||
if i < 0:
|
||||
i = doc.find('\r')
|
||||
|
||||
if i > 0:
|
||||
s = doc[0:i]
|
||||
s = s.strip()
|
||||
|
||||
# let's see if we have a docstring in the first line
|
||||
if s[-1] == ')':
|
||||
start = s.find('(')
|
||||
if start >= 0:
|
||||
end = s.find('[')
|
||||
if end <= 0:
|
||||
end = s.find(')')
|
||||
if end <= 0:
|
||||
end = len(s)
|
||||
|
||||
args = s[start:end]
|
||||
if not args[-1] == ')':
|
||||
args = args + ')'
|
||||
|
||||
# now, get rid of unwanted chars
|
||||
l = len(args) - 1
|
||||
r = []
|
||||
for i in xrange(len(args)):
|
||||
if i == 0 or i == l:
|
||||
r.append(args[i])
|
||||
else:
|
||||
r.append(check_char(args[i]))
|
||||
|
||||
args = ''.join(r)
|
||||
|
||||
if IS_IPY:
|
||||
if args.startswith('(self:'):
|
||||
i = args.find(',')
|
||||
if i >= 0:
|
||||
args = '(self' + args[i:]
|
||||
else:
|
||||
args = '(self)'
|
||||
i = args.find(')')
|
||||
if i > 0:
|
||||
args = args[:i + 1]
|
||||
|
||||
except:
|
||||
pass
|
||||
return args, doc
|
||||
504
ptvsd/pydevd/_pydev_bundle/_pydev_jy_imports_tipper.py
Normal file
504
ptvsd/pydevd/_pydev_bundle/_pydev_jy_imports_tipper.py
Normal file
|
|
@ -0,0 +1,504 @@
|
|||
try:
|
||||
import StringIO
|
||||
except:
|
||||
import io as StringIO
|
||||
|
||||
import traceback
|
||||
from java.lang import StringBuffer #@UnresolvedImport
|
||||
from java.lang import String #@UnresolvedImport
|
||||
import java.lang #@UnresolvedImport
|
||||
import sys
|
||||
from _pydev_bundle._pydev_tipper_common import do_find
|
||||
|
||||
|
||||
from org.python.core import PyReflectedFunction #@UnresolvedImport
|
||||
|
||||
from org.python import core #@UnresolvedImport
|
||||
from org.python.core import PyClass #@UnresolvedImport
|
||||
|
||||
try:
|
||||
xrange
|
||||
except:
|
||||
xrange = range
|
||||
|
||||
|
||||
#completion types.
|
||||
TYPE_IMPORT = '0'
|
||||
TYPE_CLASS = '1'
|
||||
TYPE_FUNCTION = '2'
|
||||
TYPE_ATTR = '3'
|
||||
TYPE_BUILTIN = '4'
|
||||
TYPE_PARAM = '5'
|
||||
|
||||
def _imp(name):
|
||||
try:
|
||||
return __import__(name)
|
||||
except:
|
||||
if '.' in name:
|
||||
sub = name[0:name.rfind('.')]
|
||||
return _imp(sub)
|
||||
else:
|
||||
s = 'Unable to import module: %s - sys.path: %s' % (str(name), sys.path)
|
||||
raise RuntimeError(s)
|
||||
|
||||
import java.util
|
||||
_java_rt_file = getattr(java.util, '__file__', None)
|
||||
|
||||
def Find(name):
|
||||
f = None
|
||||
if name.startswith('__builtin__'):
|
||||
if name == '__builtin__.str':
|
||||
name = 'org.python.core.PyString'
|
||||
elif name == '__builtin__.dict':
|
||||
name = 'org.python.core.PyDictionary'
|
||||
|
||||
mod = _imp(name)
|
||||
parent = mod
|
||||
foundAs = ''
|
||||
|
||||
if hasattr(mod, '__file__'):
|
||||
f = mod.__file__
|
||||
|
||||
|
||||
components = name.split('.')
|
||||
old_comp = None
|
||||
for comp in components[1:]:
|
||||
try:
|
||||
#this happens in the following case:
|
||||
#we have mx.DateTime.mxDateTime.mxDateTime.pyd
|
||||
#but after importing it, mx.DateTime.mxDateTime does shadows access to mxDateTime.pyd
|
||||
mod = getattr(mod, comp)
|
||||
except AttributeError:
|
||||
if old_comp != comp:
|
||||
raise
|
||||
|
||||
if hasattr(mod, '__file__'):
|
||||
f = mod.__file__
|
||||
else:
|
||||
if len(foundAs) > 0:
|
||||
foundAs = foundAs + '.'
|
||||
foundAs = foundAs + comp
|
||||
|
||||
old_comp = comp
|
||||
|
||||
if f is None and name.startswith('java.lang'):
|
||||
# Hack: java.lang.__file__ is None on Jython 2.7 (whereas it pointed to rt.jar on Jython 2.5).
|
||||
f = _java_rt_file
|
||||
|
||||
if f is not None:
|
||||
if f.endswith('.pyc'):
|
||||
f = f[:-1]
|
||||
elif f.endswith('$py.class'):
|
||||
f = f[:-len('$py.class')] + '.py'
|
||||
return f, mod, parent, foundAs
|
||||
|
||||
def format_param_class_name(paramClassName):
|
||||
if paramClassName.startswith('<type \'') and paramClassName.endswith('\'>'):
|
||||
paramClassName = paramClassName[len('<type \''): -2]
|
||||
if paramClassName.startswith('['):
|
||||
if paramClassName == '[C':
|
||||
paramClassName = 'char[]'
|
||||
|
||||
elif paramClassName == '[B':
|
||||
paramClassName = 'byte[]'
|
||||
|
||||
elif paramClassName == '[I':
|
||||
paramClassName = 'int[]'
|
||||
|
||||
elif paramClassName.startswith('[L') and paramClassName.endswith(';'):
|
||||
paramClassName = paramClassName[2:-1]
|
||||
paramClassName += '[]'
|
||||
return paramClassName
|
||||
|
||||
|
||||
def generate_tip(data, log=None):
|
||||
data = data.replace('\n', '')
|
||||
if data.endswith('.'):
|
||||
data = data.rstrip('.')
|
||||
|
||||
f, mod, parent, foundAs = Find(data)
|
||||
tips = generate_imports_tip_for_module(mod)
|
||||
return f, tips
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# Info
|
||||
#=======================================================================================================================
|
||||
class Info:
|
||||
|
||||
def __init__(self, name, **kwargs):
|
||||
self.name = name
|
||||
self.doc = kwargs.get('doc', None)
|
||||
self.args = kwargs.get('args', ()) #tuple of strings
|
||||
self.varargs = kwargs.get('varargs', None) #string
|
||||
self.kwargs = kwargs.get('kwargs', None) #string
|
||||
self.ret = kwargs.get('ret', None) #string
|
||||
|
||||
def basic_as_str(self):
|
||||
'''@returns this class information as a string (just basic format)
|
||||
'''
|
||||
args = self.args
|
||||
if sys.version_info[0] <= 2:
|
||||
# Supress the u''
|
||||
args = [arg.encode('utf-8') if isinstance(arg, unicode) else arg for arg in args]
|
||||
s = 'function:%s args=%s, varargs=%s, kwargs=%s, docs:%s' % \
|
||||
(self.name, args, self.varargs, self.kwargs, self.doc)
|
||||
return s
|
||||
|
||||
|
||||
def get_as_doc(self):
|
||||
s = str(self.name)
|
||||
if self.doc:
|
||||
s += '\n@doc %s\n' % str(self.doc)
|
||||
|
||||
if self.args:
|
||||
s += '\n@params '
|
||||
for arg in self.args:
|
||||
s += str(format_param_class_name(arg))
|
||||
s += ' '
|
||||
|
||||
if self.varargs:
|
||||
s += '\n@varargs '
|
||||
s += str(self.varargs)
|
||||
|
||||
if self.kwargs:
|
||||
s += '\n@kwargs '
|
||||
s += str(self.kwargs)
|
||||
|
||||
if self.ret:
|
||||
s += '\n@return '
|
||||
s += str(format_param_class_name(str(self.ret)))
|
||||
|
||||
return str(s)
|
||||
|
||||
def isclass(cls):
|
||||
return isinstance(cls, core.PyClass) or type(cls) == java.lang.Class
|
||||
|
||||
def ismethod(func):
|
||||
'''this function should return the information gathered on a function
|
||||
|
||||
@param func: this is the function we want to get info on
|
||||
@return a tuple where:
|
||||
0 = indicates whether the parameter passed is a method or not
|
||||
1 = a list of classes 'Info', with the info gathered from the function
|
||||
this is a list because when we have methods from java with the same name and different signatures,
|
||||
we actually have many methods, each with its own set of arguments
|
||||
'''
|
||||
|
||||
try:
|
||||
if isinstance(func, core.PyFunction):
|
||||
#ok, this is from python, created by jython
|
||||
#print_ ' PyFunction'
|
||||
|
||||
def getargs(func_code):
|
||||
"""Get information about the arguments accepted by a code object.
|
||||
|
||||
Three things are returned: (args, varargs, varkw), where 'args' is
|
||||
a list of argument names (possibly containing nested lists), and
|
||||
'varargs' and 'varkw' are the names of the * and ** arguments or None."""
|
||||
|
||||
nargs = func_code.co_argcount
|
||||
names = func_code.co_varnames
|
||||
args = list(names[:nargs])
|
||||
step = 0
|
||||
|
||||
if not hasattr(func_code, 'CO_VARARGS'):
|
||||
from org.python.core import CodeFlag # @UnresolvedImport
|
||||
co_varargs_flag = CodeFlag.CO_VARARGS.flag
|
||||
co_varkeywords_flag = CodeFlag.CO_VARKEYWORDS.flag
|
||||
else:
|
||||
co_varargs_flag = func_code.CO_VARARGS
|
||||
co_varkeywords_flag = func_code.CO_VARKEYWORDS
|
||||
|
||||
varargs = None
|
||||
if func_code.co_flags & co_varargs_flag:
|
||||
varargs = func_code.co_varnames[nargs]
|
||||
nargs = nargs + 1
|
||||
varkw = None
|
||||
if func_code.co_flags & co_varkeywords_flag:
|
||||
varkw = func_code.co_varnames[nargs]
|
||||
return args, varargs, varkw
|
||||
|
||||
args = getargs(func.func_code)
|
||||
return 1, [Info(func.func_name, args=args[0], varargs=args[1], kwargs=args[2], doc=func.func_doc)]
|
||||
|
||||
if isinstance(func, core.PyMethod):
|
||||
#this is something from java itself, and jython just wrapped it...
|
||||
|
||||
#things to play in func:
|
||||
#['__call__', '__class__', '__cmp__', '__delattr__', '__dir__', '__doc__', '__findattr__', '__name__', '_doget', 'im_class',
|
||||
#'im_func', 'im_self', 'toString']
|
||||
#print_ ' PyMethod'
|
||||
#that's the PyReflectedFunction... keep going to get it
|
||||
func = func.im_func
|
||||
|
||||
if isinstance(func, PyReflectedFunction):
|
||||
#this is something from java itself, and jython just wrapped it...
|
||||
|
||||
#print_ ' PyReflectedFunction'
|
||||
|
||||
infos = []
|
||||
for i in xrange(len(func.argslist)):
|
||||
#things to play in func.argslist[i]:
|
||||
|
||||
#'PyArgsCall', 'PyArgsKeywordsCall', 'REPLACE', 'StandardCall', 'args', 'compare', 'compareTo', 'data', 'declaringClass'
|
||||
#'flags', 'isStatic', 'matches', 'precedence']
|
||||
|
||||
#print_ ' ', func.argslist[i].data.__class__
|
||||
#func.argslist[i].data.__class__ == java.lang.reflect.Method
|
||||
|
||||
if func.argslist[i]:
|
||||
met = func.argslist[i].data
|
||||
name = met.getName()
|
||||
try:
|
||||
ret = met.getReturnType()
|
||||
except AttributeError:
|
||||
ret = ''
|
||||
parameterTypes = met.getParameterTypes()
|
||||
|
||||
args = []
|
||||
for j in xrange(len(parameterTypes)):
|
||||
paramTypesClass = parameterTypes[j]
|
||||
try:
|
||||
try:
|
||||
paramClassName = paramTypesClass.getName()
|
||||
except:
|
||||
paramClassName = paramTypesClass.getName(paramTypesClass)
|
||||
except AttributeError:
|
||||
try:
|
||||
paramClassName = repr(paramTypesClass) #should be something like <type 'object'>
|
||||
paramClassName = paramClassName.split('\'')[1]
|
||||
except:
|
||||
paramClassName = repr(paramTypesClass) #just in case something else happens... it will at least be visible
|
||||
#if the parameter equals [C, it means it it a char array, so, let's change it
|
||||
|
||||
a = format_param_class_name(paramClassName)
|
||||
#a = a.replace('[]','Array')
|
||||
#a = a.replace('Object', 'obj')
|
||||
#a = a.replace('String', 's')
|
||||
#a = a.replace('Integer', 'i')
|
||||
#a = a.replace('Char', 'c')
|
||||
#a = a.replace('Double', 'd')
|
||||
args.append(a) #so we don't leave invalid code
|
||||
|
||||
|
||||
info = Info(name, args=args, ret=ret)
|
||||
#print_ info.basic_as_str()
|
||||
infos.append(info)
|
||||
|
||||
return 1, infos
|
||||
except Exception:
|
||||
s = StringIO.StringIO()
|
||||
traceback.print_exc(file=s)
|
||||
return 1, [Info(str('ERROR'), doc=s.getvalue())]
|
||||
|
||||
return 0, None
|
||||
|
||||
def ismodule(mod):
|
||||
#java modules... do we have other way to know that?
|
||||
if not hasattr(mod, 'getClass') and not hasattr(mod, '__class__') \
|
||||
and hasattr(mod, '__name__'):
|
||||
return 1
|
||||
|
||||
return isinstance(mod, core.PyModule)
|
||||
|
||||
|
||||
def dir_obj(obj):
|
||||
ret = []
|
||||
found = java.util.HashMap()
|
||||
original = obj
|
||||
if hasattr(obj, '__class__'):
|
||||
if obj.__class__ == java.lang.Class:
|
||||
|
||||
#get info about superclasses
|
||||
classes = []
|
||||
classes.append(obj)
|
||||
try:
|
||||
c = obj.getSuperclass()
|
||||
except TypeError:
|
||||
#may happen on jython when getting the java.lang.Class class
|
||||
c = obj.getSuperclass(obj)
|
||||
|
||||
while c != None:
|
||||
classes.append(c)
|
||||
c = c.getSuperclass()
|
||||
|
||||
#get info about interfaces
|
||||
interfs = []
|
||||
for obj in classes:
|
||||
try:
|
||||
interfs.extend(obj.getInterfaces())
|
||||
except TypeError:
|
||||
interfs.extend(obj.getInterfaces(obj))
|
||||
classes.extend(interfs)
|
||||
|
||||
#now is the time when we actually get info on the declared methods and fields
|
||||
for obj in classes:
|
||||
try:
|
||||
declaredMethods = obj.getDeclaredMethods()
|
||||
except TypeError:
|
||||
declaredMethods = obj.getDeclaredMethods(obj)
|
||||
|
||||
try:
|
||||
declaredFields = obj.getDeclaredFields()
|
||||
except TypeError:
|
||||
declaredFields = obj.getDeclaredFields(obj)
|
||||
|
||||
for i in xrange(len(declaredMethods)):
|
||||
name = declaredMethods[i].getName()
|
||||
ret.append(name)
|
||||
found.put(name, 1)
|
||||
|
||||
for i in xrange(len(declaredFields)):
|
||||
name = declaredFields[i].getName()
|
||||
ret.append(name)
|
||||
found.put(name, 1)
|
||||
|
||||
|
||||
elif isclass(obj.__class__):
|
||||
d = dir(obj.__class__)
|
||||
for name in d:
|
||||
ret.append(name)
|
||||
found.put(name, 1)
|
||||
|
||||
|
||||
#this simple dir does not always get all the info, that's why we have the part before
|
||||
#(e.g.: if we do a dir on String, some methods that are from other interfaces such as
|
||||
#charAt don't appear)
|
||||
d = dir(original)
|
||||
for name in d:
|
||||
if found.get(name) != 1:
|
||||
ret.append(name)
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def format_arg(arg):
|
||||
'''formats an argument to be shown
|
||||
'''
|
||||
|
||||
s = str(arg)
|
||||
dot = s.rfind('.')
|
||||
if dot >= 0:
|
||||
s = s[dot + 1:]
|
||||
|
||||
s = s.replace(';', '')
|
||||
s = s.replace('[]', 'Array')
|
||||
if len(s) > 0:
|
||||
c = s[0].lower()
|
||||
s = c + s[1:]
|
||||
|
||||
return s
|
||||
|
||||
|
||||
|
||||
def search_definition(data):
|
||||
'''@return file, line, col
|
||||
'''
|
||||
|
||||
data = data.replace('\n', '')
|
||||
if data.endswith('.'):
|
||||
data = data.rstrip('.')
|
||||
f, mod, parent, foundAs = Find(data)
|
||||
try:
|
||||
return do_find(f, mod), foundAs
|
||||
except:
|
||||
return do_find(f, parent), foundAs
|
||||
|
||||
|
||||
def generate_imports_tip_for_module(obj_to_complete, dir_comps=None, getattr=getattr, filter=lambda name:True):
|
||||
'''
|
||||
@param obj_to_complete: the object from where we should get the completions
|
||||
@param dir_comps: if passed, we should not 'dir' the object and should just iterate those passed as a parameter
|
||||
@param getattr: the way to get a given object from the obj_to_complete (used for the completer)
|
||||
@param filter: a callable that receives the name and decides if it should be appended or not to the results
|
||||
@return: list of tuples, so that each tuple represents a completion with:
|
||||
name, doc, args, type (from the TYPE_* constants)
|
||||
'''
|
||||
ret = []
|
||||
|
||||
if dir_comps is None:
|
||||
dir_comps = dir_obj(obj_to_complete)
|
||||
|
||||
for d in dir_comps:
|
||||
|
||||
if d is None:
|
||||
continue
|
||||
|
||||
if not filter(d):
|
||||
continue
|
||||
|
||||
args = ''
|
||||
doc = ''
|
||||
retType = TYPE_BUILTIN
|
||||
|
||||
try:
|
||||
obj = getattr(obj_to_complete, d)
|
||||
except (AttributeError, java.lang.NoClassDefFoundError):
|
||||
#jython has a bug in its custom classloader that prevents some things from working correctly, so, let's see if
|
||||
#we can fix that... (maybe fixing it in jython itself would be a better idea, as this is clearly a bug)
|
||||
#for that we need a custom classloader... we have references from it in the below places:
|
||||
#
|
||||
#http://mindprod.com/jgloss/classloader.html
|
||||
#http://www.javaworld.com/javaworld/jw-03-2000/jw-03-classload-p2.html
|
||||
#http://freshmeat.net/articles/view/1643/
|
||||
#
|
||||
#note: this only happens when we add things to the sys.path at runtime, if they are added to the classpath
|
||||
#before the run, everything goes fine.
|
||||
#
|
||||
#The code below ilustrates what I mean...
|
||||
#
|
||||
#import sys
|
||||
#sys.path.insert(1, r"C:\bin\eclipse310\plugins\org.junit_3.8.1\junit.jar" )
|
||||
#
|
||||
#import junit.framework
|
||||
#print_ dir(junit.framework) #shows the TestCase class here
|
||||
#
|
||||
#import junit.framework.TestCase
|
||||
#
|
||||
#raises the error:
|
||||
#Traceback (innermost last):
|
||||
# File "<console>", line 1, in ?
|
||||
#ImportError: No module named TestCase
|
||||
#
|
||||
#whereas if we had added the jar to the classpath before, everything would be fine by now...
|
||||
|
||||
ret.append((d, '', '', retType))
|
||||
#that's ok, private things cannot be gotten...
|
||||
continue
|
||||
else:
|
||||
|
||||
isMet = ismethod(obj)
|
||||
if isMet[0] and isMet[1]:
|
||||
info = isMet[1][0]
|
||||
try:
|
||||
args, vargs, kwargs = info.args, info.varargs, info.kwargs
|
||||
doc = info.get_as_doc()
|
||||
r = ''
|
||||
for a in (args):
|
||||
if len(r) > 0:
|
||||
r += ', '
|
||||
r += format_arg(a)
|
||||
args = '(%s)' % (r)
|
||||
except TypeError:
|
||||
traceback.print_exc()
|
||||
args = '()'
|
||||
|
||||
retType = TYPE_FUNCTION
|
||||
|
||||
elif isclass(obj):
|
||||
retType = TYPE_CLASS
|
||||
|
||||
elif ismodule(obj):
|
||||
retType = TYPE_IMPORT
|
||||
|
||||
#add token and doc to return - assure only strings.
|
||||
ret.append((d, doc, args, retType))
|
||||
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.path.append(r'D:\dev_programs\eclipse_3\310\eclipse\plugins\org.junit_3.8.1\junit.jar')
|
||||
sys.stdout.write('%s\n' % Find('junit.framework.TestCase'))
|
||||
28
ptvsd/pydevd/_pydev_bundle/_pydev_log.py
Normal file
28
ptvsd/pydevd/_pydev_bundle/_pydev_log.py
Normal file
|
|
@ -0,0 +1,28 @@
|
|||
import traceback
|
||||
import sys
|
||||
try:
|
||||
import StringIO
|
||||
except:
|
||||
import io as StringIO #Python 3.0
|
||||
|
||||
|
||||
class Log:
|
||||
|
||||
def __init__(self):
|
||||
self._contents = []
|
||||
|
||||
def add_content(self, *content):
|
||||
self._contents.append(' '.join(content))
|
||||
|
||||
def add_exception(self):
|
||||
s = StringIO.StringIO()
|
||||
exc_info = sys.exc_info()
|
||||
traceback.print_exception(exc_info[0], exc_info[1], exc_info[2], limit=None, file=s)
|
||||
self._contents.append(s.getvalue())
|
||||
|
||||
|
||||
def get_contents(self):
|
||||
return '\n'.join(self._contents)
|
||||
|
||||
def clear_log(self):
|
||||
del self._contents[:]
|
||||
67
ptvsd/pydevd/_pydev_bundle/_pydev_tipper_common.py
Normal file
67
ptvsd/pydevd/_pydev_bundle/_pydev_tipper_common.py
Normal file
|
|
@ -0,0 +1,67 @@
|
|||
try:
|
||||
import inspect
|
||||
except:
|
||||
try:
|
||||
from _pydev_imps import _pydev_inspect as inspect
|
||||
except:
|
||||
import traceback;traceback.print_exc() #Ok, no inspect available (search will not work)
|
||||
|
||||
try:
|
||||
import re
|
||||
except:
|
||||
try:
|
||||
import sre as re # for older versions
|
||||
except:
|
||||
import traceback;traceback.print_exc() #Ok, no inspect available (search will not work)
|
||||
|
||||
|
||||
from _pydevd_bundle.pydevd_constants import xrange
|
||||
|
||||
def do_find(f, mod):
|
||||
import linecache
|
||||
if inspect.ismodule(mod):
|
||||
return f, 0, 0
|
||||
|
||||
lines = linecache.getlines(f)
|
||||
|
||||
if inspect.isclass(mod):
|
||||
name = mod.__name__
|
||||
pat = re.compile(r'^\s*class\s*' + name + r'\b')
|
||||
for i in xrange(len(lines)):
|
||||
if pat.match(lines[i]):
|
||||
return f, i, 0
|
||||
|
||||
return f, 0, 0
|
||||
|
||||
if inspect.ismethod(mod):
|
||||
mod = mod.im_func
|
||||
|
||||
if inspect.isfunction(mod):
|
||||
try:
|
||||
mod = mod.func_code
|
||||
except AttributeError:
|
||||
mod = mod.__code__ #python 3k
|
||||
|
||||
if inspect.istraceback(mod):
|
||||
mod = mod.tb_frame
|
||||
|
||||
if inspect.isframe(mod):
|
||||
mod = mod.f_code
|
||||
|
||||
if inspect.iscode(mod):
|
||||
if not hasattr(mod, 'co_filename'):
|
||||
return None, 0, 0
|
||||
|
||||
if not hasattr(mod, 'co_firstlineno'):
|
||||
return mod.co_filename, 0, 0
|
||||
|
||||
lnum = mod.co_firstlineno
|
||||
pat = re.compile(r'^(\s*def\s)|(.*(?<!\w)lambda(:|\s))|^(\s*@)')
|
||||
while lnum > 0:
|
||||
if pat.match(lines[lnum]):
|
||||
break
|
||||
lnum -= 1
|
||||
|
||||
return f, lnum, 0
|
||||
|
||||
raise RuntimeError('Do not know about: ' + f + ' ' + str(mod))
|
||||
13
ptvsd/pydevd/_pydev_bundle/fix_getpass.py
Normal file
13
ptvsd/pydevd/_pydev_bundle/fix_getpass.py
Normal file
|
|
@ -0,0 +1,13 @@
|
|||
def fix_getpass():
|
||||
try:
|
||||
import getpass
|
||||
except ImportError:
|
||||
return #If we can't import it, we can't fix it
|
||||
import warnings
|
||||
fallback = getattr(getpass, 'fallback_getpass', None) # >= 2.6
|
||||
if not fallback:
|
||||
fallback = getpass.default_getpass # <= 2.5 @UndefinedVariable
|
||||
getpass.getpass = fallback
|
||||
if hasattr(getpass, 'GetPassWarning'):
|
||||
warnings.simplefilter("ignore", category=getpass.GetPassWarning)
|
||||
|
||||
616
ptvsd/pydevd/_pydev_bundle/pydev_console_utils.py
Normal file
616
ptvsd/pydevd/_pydev_bundle/pydev_console_utils.py
Normal file
|
|
@ -0,0 +1,616 @@
|
|||
import os
|
||||
import sys
|
||||
import traceback
|
||||
from _pydev_bundle.pydev_imports import xmlrpclib, _queue, Exec
|
||||
from _pydev_bundle._pydev_calltip_util import get_description
|
||||
from _pydev_imps._pydev_saved_modules import thread
|
||||
from _pydevd_bundle import pydevd_vars
|
||||
from _pydevd_bundle import pydevd_xml
|
||||
from _pydevd_bundle.pydevd_constants import IS_JYTHON, dict_iter_items
|
||||
from _pydevd_bundle.pydevd_utils import to_string
|
||||
|
||||
|
||||
# =======================================================================================================================
|
||||
# Null
|
||||
# =======================================================================================================================
|
||||
class Null:
|
||||
"""
|
||||
Gotten from: http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/68205
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
return None
|
||||
|
||||
def __call__(self, *args, **kwargs):
|
||||
return self
|
||||
|
||||
def __getattr__(self, mname):
|
||||
return self
|
||||
|
||||
def __setattr__(self, name, value):
|
||||
return self
|
||||
|
||||
def __delattr__(self, name):
|
||||
return self
|
||||
|
||||
def __repr__(self):
|
||||
return "<Null>"
|
||||
|
||||
def __str__(self):
|
||||
return "Null"
|
||||
|
||||
def __len__(self):
|
||||
return 0
|
||||
|
||||
def __getitem__(self):
|
||||
return self
|
||||
|
||||
def __setitem__(self, *args, **kwargs):
|
||||
pass
|
||||
|
||||
def write(self, *args, **kwargs):
|
||||
pass
|
||||
|
||||
def __nonzero__(self):
|
||||
return 0
|
||||
|
||||
|
||||
# =======================================================================================================================
|
||||
# BaseStdIn
|
||||
# =======================================================================================================================
|
||||
class BaseStdIn:
|
||||
def __init__(self, original_stdin=sys.stdin, *args, **kwargs):
|
||||
try:
|
||||
self.encoding = sys.stdin.encoding
|
||||
except:
|
||||
# Not sure if it's available in all Python versions...
|
||||
pass
|
||||
self.original_stdin = original_stdin
|
||||
|
||||
try:
|
||||
self.errors = sys.stdin.errors # Who knew? sys streams have an errors attribute!
|
||||
except:
|
||||
#Not sure if it's available in all Python versions...
|
||||
pass
|
||||
|
||||
def readline(self, *args, **kwargs):
|
||||
# sys.stderr.write('Cannot readline out of the console evaluation\n') -- don't show anything
|
||||
# This could happen if the user had done input('enter number).<-- upon entering this, that message would appear,
|
||||
# which is not something we want.
|
||||
return '\n'
|
||||
|
||||
def write(self, *args, **kwargs):
|
||||
pass # not available StdIn (but it can be expected to be in the stream interface)
|
||||
|
||||
def flush(self, *args, **kwargs):
|
||||
pass # not available StdIn (but it can be expected to be in the stream interface)
|
||||
|
||||
def read(self, *args, **kwargs):
|
||||
# in the interactive interpreter, a read and a readline are the same.
|
||||
return self.readline()
|
||||
|
||||
def close(self, *args, **kwargs):
|
||||
pass # expected in StdIn
|
||||
|
||||
def __getattr__(self, item):
|
||||
# it's called if the attribute wasn't found
|
||||
if hasattr(self.original_stdin, item):
|
||||
return getattr(self.original_stdin, item)
|
||||
raise AttributeError("%s has no attribute %s" % (self.original_stdin, item))
|
||||
|
||||
|
||||
# =======================================================================================================================
|
||||
# StdIn
|
||||
# =======================================================================================================================
|
||||
class StdIn(BaseStdIn):
|
||||
'''
|
||||
Object to be added to stdin (to emulate it as non-blocking while the next line arrives)
|
||||
'''
|
||||
|
||||
def __init__(self, interpreter, host, client_port, original_stdin=sys.stdin):
|
||||
BaseStdIn.__init__(self, original_stdin)
|
||||
self.interpreter = interpreter
|
||||
self.client_port = client_port
|
||||
self.host = host
|
||||
|
||||
def readline(self, *args, **kwargs):
|
||||
# Ok, callback into the client to get the new input
|
||||
try:
|
||||
server = xmlrpclib.Server('http://%s:%s' % (self.host, self.client_port))
|
||||
requested_input = server.RequestInput()
|
||||
if not requested_input:
|
||||
return '\n' #Yes, a readline must return something (otherwise we can get an EOFError on the input() call).
|
||||
else:
|
||||
# readline should end with '\n' (not doing so makes IPython 5 remove the last *valid* character).
|
||||
requested_input += '\n'
|
||||
return requested_input
|
||||
except KeyboardInterrupt:
|
||||
raise # Let KeyboardInterrupt go through -- #PyDev-816: Interrupting infinite loop in the Interactive Console
|
||||
except:
|
||||
return '\n'
|
||||
|
||||
def close(self, *args, **kwargs):
|
||||
pass # expected in StdIn
|
||||
|
||||
#=======================================================================================================================
|
||||
# DebugConsoleStdIn
|
||||
#=======================================================================================================================
|
||||
class DebugConsoleStdIn(BaseStdIn):
|
||||
'''
|
||||
Object to be added to stdin (to emulate it as non-blocking while the next line arrives)
|
||||
'''
|
||||
|
||||
def __init__(self, dbg, original_stdin):
|
||||
BaseStdIn.__init__(self, original_stdin)
|
||||
self.debugger = dbg
|
||||
|
||||
def __pydev_run_command(self, is_started):
|
||||
try:
|
||||
cmd = self.debugger.cmd_factory.make_input_requested_message(is_started)
|
||||
self.debugger.writer.add_command(cmd)
|
||||
except Exception:
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
return '\n'
|
||||
|
||||
def readline(self, *args, **kwargs):
|
||||
# Notify Java side about input and call original function
|
||||
self.__pydev_run_command(True)
|
||||
result = self.original_stdin.readline(*args, **kwargs)
|
||||
self.__pydev_run_command(False)
|
||||
return result
|
||||
|
||||
|
||||
class CodeFragment:
|
||||
def __init__(self, text, is_single_line=True):
|
||||
self.text = text
|
||||
self.is_single_line = is_single_line
|
||||
|
||||
def append(self, code_fragment):
|
||||
self.text = self.text + "\n" + code_fragment.text
|
||||
if not code_fragment.is_single_line:
|
||||
self.is_single_line = False
|
||||
|
||||
|
||||
# =======================================================================================================================
|
||||
# BaseInterpreterInterface
|
||||
# =======================================================================================================================
|
||||
class BaseInterpreterInterface:
|
||||
def __init__(self, mainThread):
|
||||
self.mainThread = mainThread
|
||||
self.interruptable = False
|
||||
self.exec_queue = _queue.Queue(0)
|
||||
self.buffer = None
|
||||
|
||||
def need_more_for_code(self, source):
|
||||
# PyDev-502: PyDev 3.9 F2 doesn't support backslash continuations
|
||||
|
||||
# Strangely even the IPython console is_complete said it was complete
|
||||
# even with a continuation char at the end.
|
||||
if source.endswith('\\'):
|
||||
return True
|
||||
|
||||
if hasattr(self.interpreter, 'is_complete'):
|
||||
return not self.interpreter.is_complete(source)
|
||||
try:
|
||||
# At this point, it should always be single.
|
||||
# If we don't do this, things as:
|
||||
#
|
||||
# for i in range(10): print(i)
|
||||
#
|
||||
# (in a single line) don't work.
|
||||
# Note that it won't give an error and code will be None (so, it'll
|
||||
# use execMultipleLines in the next call in this case).
|
||||
symbol = 'single'
|
||||
code = self.interpreter.compile(source, '<input>', symbol)
|
||||
except (OverflowError, SyntaxError, ValueError):
|
||||
# Case 1
|
||||
return False
|
||||
if code is None:
|
||||
# Case 2
|
||||
return True
|
||||
|
||||
# Case 3
|
||||
return False
|
||||
|
||||
def need_more(self, code_fragment):
|
||||
if self.buffer is None:
|
||||
self.buffer = code_fragment
|
||||
else:
|
||||
self.buffer.append(code_fragment)
|
||||
|
||||
return self.need_more_for_code(self.buffer.text)
|
||||
|
||||
def create_std_in(self, debugger=None, original_std_in=None):
|
||||
if debugger is None:
|
||||
return StdIn(self, self.host, self.client_port, original_stdin=original_std_in)
|
||||
else:
|
||||
return DebugConsoleStdIn(dbg=debugger, original_stdin=original_std_in)
|
||||
|
||||
def add_exec(self, code_fragment, debugger=None):
|
||||
original_in = sys.stdin
|
||||
try:
|
||||
help = None
|
||||
if 'pydoc' in sys.modules:
|
||||
pydoc = sys.modules['pydoc'] # Don't import it if it still is not there.
|
||||
|
||||
if hasattr(pydoc, 'help'):
|
||||
# You never know how will the API be changed, so, let's code defensively here
|
||||
help = pydoc.help
|
||||
if not hasattr(help, 'input'):
|
||||
help = None
|
||||
except:
|
||||
# Just ignore any error here
|
||||
pass
|
||||
|
||||
more = False
|
||||
try:
|
||||
sys.stdin = self.create_std_in(debugger, original_in)
|
||||
try:
|
||||
if help is not None:
|
||||
# This will enable the help() function to work.
|
||||
try:
|
||||
try:
|
||||
help.input = sys.stdin
|
||||
except AttributeError:
|
||||
help._input = sys.stdin
|
||||
except:
|
||||
help = None
|
||||
if not self._input_error_printed:
|
||||
self._input_error_printed = True
|
||||
sys.stderr.write('\nError when trying to update pydoc.help.input\n')
|
||||
sys.stderr.write('(help() may not work -- please report this as a bug in the pydev bugtracker).\n\n')
|
||||
traceback.print_exc()
|
||||
|
||||
try:
|
||||
self.start_exec()
|
||||
if hasattr(self, 'debugger'):
|
||||
import pydevd_tracing
|
||||
pydevd_tracing.SetTrace(self.debugger.trace_dispatch)
|
||||
|
||||
more = self.do_add_exec(code_fragment)
|
||||
|
||||
if hasattr(self, 'debugger'):
|
||||
import pydevd_tracing
|
||||
pydevd_tracing.SetTrace(None)
|
||||
|
||||
self.finish_exec(more)
|
||||
finally:
|
||||
if help is not None:
|
||||
try:
|
||||
try:
|
||||
help.input = original_in
|
||||
except AttributeError:
|
||||
help._input = original_in
|
||||
except:
|
||||
pass
|
||||
|
||||
finally:
|
||||
sys.stdin = original_in
|
||||
except SystemExit:
|
||||
raise
|
||||
except:
|
||||
traceback.print_exc()
|
||||
|
||||
return more
|
||||
|
||||
def do_add_exec(self, codeFragment):
|
||||
'''
|
||||
Subclasses should override.
|
||||
|
||||
@return: more (True if more input is needed to complete the statement and False if the statement is complete).
|
||||
'''
|
||||
raise NotImplementedError()
|
||||
|
||||
def get_namespace(self):
|
||||
'''
|
||||
Subclasses should override.
|
||||
|
||||
@return: dict with namespace.
|
||||
'''
|
||||
raise NotImplementedError()
|
||||
|
||||
def __resolve_reference__(self, text):
|
||||
"""
|
||||
|
||||
:type text: str
|
||||
"""
|
||||
obj = None
|
||||
if '.' not in text:
|
||||
try:
|
||||
obj = self.get_namespace()[text]
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
if obj is None:
|
||||
try:
|
||||
obj = self.get_namespace()['__builtins__'][text]
|
||||
except:
|
||||
pass
|
||||
|
||||
if obj is None:
|
||||
try:
|
||||
obj = getattr(self.get_namespace()['__builtins__'], text, None)
|
||||
except:
|
||||
pass
|
||||
|
||||
else:
|
||||
try:
|
||||
last_dot = text.rindex('.')
|
||||
parent_context = text[0:last_dot]
|
||||
res = pydevd_vars.eval_in_context(parent_context, self.get_namespace(), self.get_namespace())
|
||||
obj = getattr(res, text[last_dot + 1:])
|
||||
except:
|
||||
pass
|
||||
return obj
|
||||
|
||||
def getDescription(self, text):
|
||||
try:
|
||||
obj = self.__resolve_reference__(text)
|
||||
if obj is None:
|
||||
return ''
|
||||
return get_description(obj)
|
||||
except:
|
||||
return ''
|
||||
|
||||
def do_exec_code(self, code, is_single_line):
|
||||
try:
|
||||
code_fragment = CodeFragment(code, is_single_line)
|
||||
more = self.need_more(code_fragment)
|
||||
if not more:
|
||||
code_fragment = self.buffer
|
||||
self.buffer = None
|
||||
self.exec_queue.put(code_fragment)
|
||||
|
||||
return more
|
||||
except:
|
||||
traceback.print_exc()
|
||||
return False
|
||||
|
||||
def execLine(self, line):
|
||||
return self.do_exec_code(line, True)
|
||||
|
||||
def execMultipleLines(self, lines):
|
||||
if IS_JYTHON:
|
||||
more = False
|
||||
for line in lines.split('\n'):
|
||||
more = self.do_exec_code(line, True)
|
||||
return more
|
||||
else:
|
||||
return self.do_exec_code(lines, False)
|
||||
|
||||
def interrupt(self):
|
||||
self.buffer = None # Also clear the buffer when it's interrupted.
|
||||
try:
|
||||
if self.interruptable:
|
||||
called = False
|
||||
try:
|
||||
# Fix for #PyDev-500: Console interrupt can't interrupt on sleep
|
||||
import os
|
||||
import signal
|
||||
if os.name == 'posix':
|
||||
# On Linux we can't interrupt 0 as in Windows because it's
|
||||
# actually owned by a process -- on the good side, signals
|
||||
# work much better on Linux!
|
||||
os.kill(os.getpid(), signal.SIGINT)
|
||||
called = True
|
||||
|
||||
elif os.name == 'nt':
|
||||
# Stupid windows: sending a Ctrl+C to a process given its pid
|
||||
# is absurdly difficult.
|
||||
# There are utilities to make it work such as
|
||||
# http://www.latenighthacking.com/projects/2003/sendSignal/
|
||||
# but fortunately for us, it seems Python does allow a CTRL_C_EVENT
|
||||
# for the current process in Windows if pid 0 is passed... if we needed
|
||||
# to send a signal to another process the approach would be
|
||||
# much more difficult.
|
||||
# Still, note that CTRL_C_EVENT is only Python 2.7 onwards...
|
||||
# Also, this doesn't seem to be documented anywhere!? (stumbled
|
||||
# upon it by chance after digging quite a lot).
|
||||
os.kill(0, signal.CTRL_C_EVENT)
|
||||
called = True
|
||||
except:
|
||||
# Many things to go wrong (from CTRL_C_EVENT not being there
|
||||
# to failing import signal)... if that's the case, ask for
|
||||
# forgiveness and go on to the approach which will interrupt
|
||||
# the main thread (but it'll only work when it's executing some Python
|
||||
# code -- not on sleep() for instance).
|
||||
pass
|
||||
|
||||
if not called:
|
||||
if hasattr(thread, 'interrupt_main'): # Jython doesn't have it
|
||||
thread.interrupt_main()
|
||||
else:
|
||||
self.mainThread._thread.interrupt() # Jython
|
||||
self.finish_exec(False)
|
||||
return True
|
||||
except:
|
||||
traceback.print_exc()
|
||||
return False
|
||||
|
||||
def close(self):
|
||||
sys.exit(0)
|
||||
|
||||
def start_exec(self):
|
||||
self.interruptable = True
|
||||
|
||||
def get_server(self):
|
||||
if getattr(self, 'host', None) is not None:
|
||||
return xmlrpclib.Server('http://%s:%s' % (self.host, self.client_port))
|
||||
else:
|
||||
return None
|
||||
|
||||
server = property(get_server)
|
||||
|
||||
def ShowConsole(self):
|
||||
server = self.get_server()
|
||||
if server is not None:
|
||||
server.ShowConsole()
|
||||
|
||||
def finish_exec(self, more):
|
||||
self.interruptable = False
|
||||
|
||||
server = self.get_server()
|
||||
|
||||
if server is not None:
|
||||
return server.NotifyFinished(more)
|
||||
else:
|
||||
return True
|
||||
|
||||
def getFrame(self):
|
||||
hidden_ns = self.get_ipython_hidden_vars_dict()
|
||||
xml = "<xml>"
|
||||
xml += pydevd_xml.frame_vars_to_xml(self.get_namespace(), hidden_ns)
|
||||
xml += "</xml>"
|
||||
|
||||
return xml
|
||||
|
||||
def getVariable(self, attributes):
|
||||
xml = "<xml>"
|
||||
valDict = pydevd_vars.resolve_var(self.get_namespace(), attributes)
|
||||
if valDict is None:
|
||||
valDict = {}
|
||||
|
||||
keys = valDict.keys()
|
||||
|
||||
for k in keys:
|
||||
xml += pydevd_vars.var_to_xml(valDict[k], to_string(k))
|
||||
|
||||
xml += "</xml>"
|
||||
|
||||
return xml
|
||||
|
||||
def getArray(self, attr, roffset, coffset, rows, cols, format):
|
||||
name = attr.split("\t")[-1]
|
||||
array = pydevd_vars.eval_in_context(name, self.get_namespace(), self.get_namespace())
|
||||
return pydevd_vars.table_like_struct_to_xml(array, name, roffset, coffset, rows, cols, format)
|
||||
|
||||
def evaluate(self, expression):
|
||||
xml = "<xml>"
|
||||
result = pydevd_vars.eval_in_context(expression, self.get_namespace(), self.get_namespace())
|
||||
|
||||
xml += pydevd_vars.var_to_xml(result, expression)
|
||||
|
||||
xml += "</xml>"
|
||||
|
||||
return xml
|
||||
|
||||
def changeVariable(self, attr, value):
|
||||
def do_change_variable():
|
||||
Exec('%s=%s' % (attr, value), self.get_namespace(), self.get_namespace())
|
||||
|
||||
# Important: it has to be really enabled in the main thread, so, schedule
|
||||
# it to run in the main thread.
|
||||
self.exec_queue.put(do_change_variable)
|
||||
|
||||
def _findFrame(self, thread_id, frame_id):
|
||||
'''
|
||||
Used to show console with variables connection.
|
||||
Always return a frame where the locals map to our internal namespace.
|
||||
'''
|
||||
VIRTUAL_FRAME_ID = "1" # matches PyStackFrameConsole.java
|
||||
VIRTUAL_CONSOLE_ID = "console_main" # matches PyThreadConsole.java
|
||||
if thread_id == VIRTUAL_CONSOLE_ID and frame_id == VIRTUAL_FRAME_ID:
|
||||
f = FakeFrame()
|
||||
f.f_globals = {} # As globals=locals here, let's simply let it empty (and save a bit of network traffic).
|
||||
f.f_locals = self.get_namespace()
|
||||
return f
|
||||
else:
|
||||
return self.orig_find_frame(thread_id, frame_id)
|
||||
|
||||
def connectToDebugger(self, debuggerPort, debugger_options=None):
|
||||
'''
|
||||
Used to show console with variables connection.
|
||||
Mainly, monkey-patches things in the debugger structure so that the debugger protocol works.
|
||||
'''
|
||||
|
||||
if debugger_options is None:
|
||||
debugger_options = {}
|
||||
env_key = "PYDEVD_EXTRA_ENVS"
|
||||
if env_key in debugger_options:
|
||||
for (env_name, value) in dict_iter_items(debugger_options[env_key]):
|
||||
os.environ[env_name] = value
|
||||
del debugger_options[env_key]
|
||||
def do_connect_to_debugger():
|
||||
try:
|
||||
# Try to import the packages needed to attach the debugger
|
||||
import pydevd
|
||||
from _pydev_imps._pydev_saved_modules import threading
|
||||
|
||||
except:
|
||||
# This happens on Jython embedded in host eclipse
|
||||
traceback.print_exc()
|
||||
sys.stderr.write('pydevd is not available, cannot connect\n', )
|
||||
|
||||
from _pydev_bundle import pydev_localhost
|
||||
threading.currentThread().__pydevd_id__ = "console_main"
|
||||
|
||||
self.orig_find_frame = pydevd_vars.find_frame
|
||||
pydevd_vars.find_frame = self._findFrame
|
||||
|
||||
self.debugger = pydevd.PyDB()
|
||||
try:
|
||||
pydevd.apply_debugger_options(debugger_options)
|
||||
self.debugger.connect(pydev_localhost.get_localhost(), debuggerPort)
|
||||
self.debugger.prepare_to_run()
|
||||
import pydevd_tracing
|
||||
pydevd_tracing.SetTrace(None)
|
||||
except:
|
||||
traceback.print_exc()
|
||||
sys.stderr.write('Failed to connect to target debugger.\n')
|
||||
|
||||
# Register to process commands when idle
|
||||
self.debugrunning = False
|
||||
try:
|
||||
import pydevconsole
|
||||
pydevconsole.set_debug_hook(self.debugger.process_internal_commands)
|
||||
except:
|
||||
traceback.print_exc()
|
||||
sys.stderr.write('Version of Python does not support debuggable Interactive Console.\n')
|
||||
|
||||
# Important: it has to be really enabled in the main thread, so, schedule
|
||||
# it to run in the main thread.
|
||||
self.exec_queue.put(do_connect_to_debugger)
|
||||
|
||||
return ('connect complete',)
|
||||
|
||||
def hello(self, input_str):
|
||||
# Don't care what the input string is
|
||||
return ("Hello eclipse",)
|
||||
|
||||
def enableGui(self, guiname):
|
||||
''' Enable the GUI specified in guiname (see inputhook for list).
|
||||
As with IPython, enabling multiple GUIs isn't an error, but
|
||||
only the last one's main loop runs and it may not work
|
||||
'''
|
||||
def do_enable_gui():
|
||||
from _pydev_bundle.pydev_versioncheck import versionok_for_gui
|
||||
if versionok_for_gui():
|
||||
try:
|
||||
from pydev_ipython.inputhook import enable_gui
|
||||
enable_gui(guiname)
|
||||
except:
|
||||
sys.stderr.write("Failed to enable GUI event loop integration for '%s'\n" % guiname)
|
||||
traceback.print_exc()
|
||||
elif guiname not in ['none', '', None]:
|
||||
# Only print a warning if the guiname was going to do something
|
||||
sys.stderr.write("PyDev console: Python version does not support GUI event loop integration for '%s'\n" % guiname)
|
||||
# Return value does not matter, so return back what was sent
|
||||
return guiname
|
||||
|
||||
# Important: it has to be really enabled in the main thread, so, schedule
|
||||
# it to run in the main thread.
|
||||
self.exec_queue.put(do_enable_gui)
|
||||
|
||||
def get_ipython_hidden_vars_dict(self):
|
||||
return None
|
||||
|
||||
|
||||
# =======================================================================================================================
|
||||
# FakeFrame
|
||||
# =======================================================================================================================
|
||||
class FakeFrame:
|
||||
'''
|
||||
Used to show console with variables connection.
|
||||
A class to be used as a mock of a frame.
|
||||
'''
|
||||
36
ptvsd/pydevd/_pydev_bundle/pydev_import_hook.py
Normal file
36
ptvsd/pydevd/_pydev_bundle/pydev_import_hook.py
Normal file
|
|
@ -0,0 +1,36 @@
|
|||
|
||||
import sys
|
||||
from types import ModuleType
|
||||
|
||||
|
||||
class ImportHookManager(ModuleType):
|
||||
def __init__(self, name, system_import):
|
||||
ModuleType.__init__(self, name)
|
||||
self._system_import = system_import
|
||||
self._modules_to_patch = {}
|
||||
|
||||
def add_module_name(self, module_name, activate_function):
|
||||
self._modules_to_patch[module_name] = activate_function
|
||||
|
||||
def do_import(self, name, *args, **kwargs):
|
||||
activate_func = None
|
||||
if name in self._modules_to_patch:
|
||||
activate_func = self._modules_to_patch.pop(name)
|
||||
|
||||
module = self._system_import(name, *args, **kwargs)
|
||||
try:
|
||||
if activate_func:
|
||||
activate_func() #call activate function
|
||||
except:
|
||||
sys.stderr.write("Matplotlib support failed\n")
|
||||
return module
|
||||
|
||||
if sys.version_info[0] >= 3:
|
||||
import builtins # py3
|
||||
else:
|
||||
import __builtin__ as builtins
|
||||
|
||||
import_hook_manager = ImportHookManager(__name__ + '.import_hook', builtins.__import__)
|
||||
builtins.__import__ = import_hook_manager.do_import
|
||||
sys.modules[import_hook_manager.__name__] = import_hook_manager
|
||||
del builtins
|
||||
60
ptvsd/pydevd/_pydev_bundle/pydev_imports.py
Normal file
60
ptvsd/pydevd/_pydev_bundle/pydev_imports.py
Normal file
|
|
@ -0,0 +1,60 @@
|
|||
from _pydevd_bundle.pydevd_constants import USE_LIB_COPY, izip
|
||||
|
||||
|
||||
try:
|
||||
try:
|
||||
if USE_LIB_COPY:
|
||||
from _pydev_imps._pydev_saved_modules import xmlrpclib
|
||||
else:
|
||||
import xmlrpclib
|
||||
except ImportError:
|
||||
import xmlrpc.client as xmlrpclib
|
||||
except ImportError:
|
||||
from _pydev_imps import _pydev_xmlrpclib as xmlrpclib
|
||||
|
||||
|
||||
try:
|
||||
try:
|
||||
if USE_LIB_COPY:
|
||||
from _pydev_imps._pydev_saved_modules import _pydev_SimpleXMLRPCServer
|
||||
from _pydev_SimpleXMLRPCServer import SimpleXMLRPCServer
|
||||
else:
|
||||
from SimpleXMLRPCServer import SimpleXMLRPCServer
|
||||
except ImportError:
|
||||
from xmlrpc.server import SimpleXMLRPCServer
|
||||
except ImportError:
|
||||
from _pydev_imps._pydev_SimpleXMLRPCServer import SimpleXMLRPCServer
|
||||
|
||||
|
||||
|
||||
try:
|
||||
from StringIO import StringIO
|
||||
except ImportError:
|
||||
from io import StringIO
|
||||
|
||||
|
||||
try:
|
||||
execfile=execfile #Not in Py3k
|
||||
except NameError:
|
||||
from _pydev_imps._pydev_execfile import execfile
|
||||
|
||||
|
||||
try:
|
||||
if USE_LIB_COPY:
|
||||
from _pydev_imps._pydev_saved_modules import _queue
|
||||
else:
|
||||
import Queue as _queue
|
||||
except:
|
||||
import queue as _queue #@UnresolvedImport
|
||||
|
||||
|
||||
try:
|
||||
from _pydevd_bundle.pydevd_exec import Exec
|
||||
except:
|
||||
from _pydevd_bundle.pydevd_exec2 import Exec
|
||||
|
||||
try:
|
||||
from urllib import quote, quote_plus, unquote_plus
|
||||
except:
|
||||
from urllib.parse import quote, quote_plus, unquote_plus #@UnresolvedImport
|
||||
|
||||
92
ptvsd/pydevd/_pydev_bundle/pydev_ipython_console.py
Normal file
92
ptvsd/pydevd/_pydev_bundle/pydev_ipython_console.py
Normal file
|
|
@ -0,0 +1,92 @@
|
|||
import sys
|
||||
from _pydev_bundle.pydev_console_utils import BaseInterpreterInterface
|
||||
|
||||
import os
|
||||
import traceback
|
||||
|
||||
# Uncomment to force PyDev standard shell.
|
||||
# raise ImportError()
|
||||
|
||||
from _pydev_bundle.pydev_ipython_console_011 import get_pydev_frontend
|
||||
from _pydevd_bundle.pydevd_constants import dict_iter_items
|
||||
|
||||
#=======================================================================================================================
|
||||
# InterpreterInterface
|
||||
#=======================================================================================================================
|
||||
class InterpreterInterface(BaseInterpreterInterface):
|
||||
'''
|
||||
The methods in this class should be registered in the xml-rpc server.
|
||||
'''
|
||||
|
||||
def __init__(self, host, client_port, mainThread, show_banner=True):
|
||||
BaseInterpreterInterface.__init__(self, mainThread)
|
||||
self.client_port = client_port
|
||||
self.host = host
|
||||
self.interpreter = get_pydev_frontend(host, client_port, show_banner=show_banner)
|
||||
self._input_error_printed = False
|
||||
self.notification_succeeded = False
|
||||
self.notification_tries = 0
|
||||
self.notification_max_tries = 3
|
||||
|
||||
self.notify_about_magic()
|
||||
|
||||
def get_greeting_msg(self):
|
||||
return self.interpreter.get_greeting_msg()
|
||||
|
||||
def do_add_exec(self, codeFragment):
|
||||
self.notify_about_magic()
|
||||
if (codeFragment.text.rstrip().endswith('??')):
|
||||
print('IPython-->')
|
||||
try:
|
||||
res = bool(self.interpreter.add_exec(codeFragment.text))
|
||||
finally:
|
||||
if (codeFragment.text.rstrip().endswith('??')):
|
||||
print('<--IPython')
|
||||
|
||||
return res
|
||||
|
||||
|
||||
def get_namespace(self):
|
||||
return self.interpreter.get_namespace()
|
||||
|
||||
|
||||
def getCompletions(self, text, act_tok):
|
||||
return self.interpreter.getCompletions(text, act_tok)
|
||||
|
||||
def close(self):
|
||||
sys.exit(0)
|
||||
|
||||
def notify_about_magic(self):
|
||||
if not self.notification_succeeded:
|
||||
self.notification_tries+=1
|
||||
if self.notification_tries>self.notification_max_tries:
|
||||
return
|
||||
completions = self.getCompletions("%", "%")
|
||||
magic_commands = [x[0] for x in completions]
|
||||
|
||||
server = self.get_server()
|
||||
|
||||
if server is not None:
|
||||
try:
|
||||
server.NotifyAboutMagic(magic_commands, self.interpreter.is_automagic())
|
||||
self.notification_succeeded = True
|
||||
except :
|
||||
self.notification_succeeded = False
|
||||
|
||||
def get_ipython_hidden_vars_dict(self):
|
||||
try:
|
||||
useful_ipython_vars = ['_', '__']
|
||||
if hasattr(self.interpreter, 'ipython') and hasattr(self.interpreter.ipython, 'user_ns_hidden'):
|
||||
user_ns_hidden = self.interpreter.ipython.user_ns_hidden
|
||||
if isinstance(user_ns_hidden, dict):
|
||||
# Since IPython 2 dict `user_ns_hidden` contains hidden variables and values
|
||||
user_hidden_dict = user_ns_hidden
|
||||
else:
|
||||
# In IPython 1.x `user_ns_hidden` used to be a set with names of hidden variables
|
||||
user_hidden_dict = dict([(key, val) for key, val in dict_iter_items(self.interpreter.ipython.user_ns)
|
||||
if key in user_ns_hidden])
|
||||
return dict([(key, val) for key, val in dict_iter_items(user_hidden_dict) if key not in useful_ipython_vars])
|
||||
except:
|
||||
# Getting IPython variables shouldn't break loading frame variables
|
||||
traceback.print_exc()
|
||||
|
||||
508
ptvsd/pydevd/_pydev_bundle/pydev_ipython_console_011.py
Normal file
508
ptvsd/pydevd/_pydev_bundle/pydev_ipython_console_011.py
Normal file
|
|
@ -0,0 +1,508 @@
|
|||
# TODO that would make IPython integration better
|
||||
# - show output other times then when enter was pressed
|
||||
# - support proper exit to allow IPython to cleanup (e.g. temp files created with %edit)
|
||||
# - support Ctrl-D (Ctrl-Z on Windows)
|
||||
# - use IPython (numbered) prompts in PyDev
|
||||
# - better integration of IPython and PyDev completions
|
||||
# - some of the semantics on handling the code completion are not correct:
|
||||
# eg: Start a line with % and then type c should give %cd as a completion by it doesn't
|
||||
# however type %c and request completions and %cd is given as an option
|
||||
# eg: Completing a magic when user typed it without the leading % causes the % to be inserted
|
||||
# to the left of what should be the first colon.
|
||||
"""Interface to TerminalInteractiveShell for PyDev Interactive Console frontend
|
||||
for IPython 0.11 to 1.0+.
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import os
|
||||
import codeop
|
||||
|
||||
from IPython.core.error import UsageError
|
||||
from IPython.core.completer import IPCompleter
|
||||
from IPython.core.interactiveshell import InteractiveShell, InteractiveShellABC
|
||||
from IPython.core.usage import default_banner_parts
|
||||
from IPython.utils.strdispatch import StrDispatch
|
||||
import IPython.core.release as IPythonRelease
|
||||
try:
|
||||
from IPython.terminal.interactiveshell import TerminalInteractiveShell
|
||||
except ImportError:
|
||||
# Versions of IPython [0.11,1.0) had an extra hierarchy level
|
||||
from IPython.frontend.terminal.interactiveshell import TerminalInteractiveShell
|
||||
try:
|
||||
from traitlets import CBool, Unicode
|
||||
except ImportError:
|
||||
from IPython.utils.traitlets import CBool, Unicode
|
||||
from IPython.core import release
|
||||
|
||||
from _pydev_bundle.pydev_imports import xmlrpclib
|
||||
|
||||
default_pydev_banner_parts = default_banner_parts
|
||||
|
||||
default_pydev_banner = ''.join(default_pydev_banner_parts)
|
||||
|
||||
def show_in_pager(self, strng, *args, **kwargs):
|
||||
""" Run a string through pager """
|
||||
# On PyDev we just output the string, there are scroll bars in the console
|
||||
# to handle "paging". This is the same behaviour as when TERM==dump (see
|
||||
# page.py)
|
||||
print(strng)
|
||||
|
||||
def create_editor_hook(pydev_host, pydev_client_port):
|
||||
|
||||
def call_editor(filename, line=0, wait=True):
|
||||
""" Open an editor in PyDev """
|
||||
if line is None:
|
||||
line = 0
|
||||
|
||||
# Make sure to send an absolution path because unlike most editor hooks
|
||||
# we don't launch a process. This is more like what happens in the zmqshell
|
||||
filename = os.path.abspath(filename)
|
||||
|
||||
# import sys
|
||||
# sys.__stderr__.write('Calling editor at: %s:%s\n' % (pydev_host, pydev_client_port))
|
||||
|
||||
# Tell PyDev to open the editor
|
||||
server = xmlrpclib.Server('http://%s:%s' % (pydev_host, pydev_client_port))
|
||||
server.IPythonEditor(filename, str(line))
|
||||
|
||||
if wait:
|
||||
try:
|
||||
raw_input("Press Enter when done editing:")
|
||||
except NameError:
|
||||
input("Press Enter when done editing:")
|
||||
return call_editor
|
||||
|
||||
|
||||
|
||||
class PyDevIPCompleter(IPCompleter):
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
""" Create a Completer that reuses the advanced completion support of PyDev
|
||||
in addition to the completion support provided by IPython """
|
||||
IPCompleter.__init__(self, *args, **kwargs)
|
||||
# Use PyDev for python matches, see getCompletions below
|
||||
self.matchers.remove(self.python_matches)
|
||||
|
||||
class PyDevTerminalInteractiveShell(TerminalInteractiveShell):
|
||||
banner1 = Unicode(default_pydev_banner, config=True,
|
||||
help="""The part of the banner to be printed before the profile"""
|
||||
)
|
||||
|
||||
# TODO term_title: (can PyDev's title be changed???, see terminal.py for where to inject code, in particular set_term_title as used by %cd)
|
||||
# for now, just disable term_title
|
||||
term_title = CBool(False)
|
||||
|
||||
# Note in version 0.11 there is no guard in the IPython code about displaying a
|
||||
# warning, so with 0.11 you get:
|
||||
# WARNING: Readline services not available or not loaded.
|
||||
# WARNING: The auto-indent feature requires the readline library
|
||||
# Disable readline, readline type code is all handled by PyDev (on Java side)
|
||||
readline_use = CBool(False)
|
||||
# autoindent has no meaning in PyDev (PyDev always handles that on the Java side),
|
||||
# and attempting to enable it will print a warning in the absence of readline.
|
||||
autoindent = CBool(False)
|
||||
# Force console to not give warning about color scheme choice and default to NoColor.
|
||||
# TODO It would be nice to enable colors in PyDev but:
|
||||
# - The PyDev Console (Eclipse Console) does not support the full range of colors, so the
|
||||
# effect isn't as nice anyway at the command line
|
||||
# - If done, the color scheme should default to LightBG, but actually be dependent on
|
||||
# any settings the user has (such as if a dark theme is in use, then Linux is probably
|
||||
# a better theme).
|
||||
colors_force = CBool(True)
|
||||
colors = Unicode("NoColor")
|
||||
# Since IPython 5 the terminal interface is not compatible with Emacs `inferior-shell` and
|
||||
# the `simple_prompt` flag is needed
|
||||
simple_prompt = CBool(True)
|
||||
|
||||
# In the PyDev Console, GUI control is done via hookable XML-RPC server
|
||||
@staticmethod
|
||||
def enable_gui(gui=None, app=None):
|
||||
"""Switch amongst GUI input hooks by name.
|
||||
"""
|
||||
# Deferred import
|
||||
from pydev_ipython.inputhook import enable_gui as real_enable_gui
|
||||
try:
|
||||
return real_enable_gui(gui, app)
|
||||
except ValueError as e:
|
||||
raise UsageError("%s" % e)
|
||||
|
||||
#-------------------------------------------------------------------------
|
||||
# Things related to hooks
|
||||
#-------------------------------------------------------------------------
|
||||
|
||||
def init_hooks(self):
|
||||
super(PyDevTerminalInteractiveShell, self).init_hooks()
|
||||
self.set_hook('show_in_pager', show_in_pager)
|
||||
|
||||
#-------------------------------------------------------------------------
|
||||
# Things related to exceptions
|
||||
#-------------------------------------------------------------------------
|
||||
|
||||
def showtraceback(self, exc_tuple=None, filename=None, tb_offset=None,
|
||||
exception_only=False):
|
||||
# IPython does a lot of clever stuff with Exceptions. However mostly
|
||||
# it is related to IPython running in a terminal instead of an IDE.
|
||||
# (e.g. it prints out snippets of code around the stack trace)
|
||||
# PyDev does a lot of clever stuff too, so leave exception handling
|
||||
# with default print_exc that PyDev can parse and do its clever stuff
|
||||
# with (e.g. it puts links back to the original source code)
|
||||
import traceback;traceback.print_exc()
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------
|
||||
# Things related to text completion
|
||||
#-------------------------------------------------------------------------
|
||||
|
||||
# The way to construct an IPCompleter changed in most versions,
|
||||
# so we have a custom, per version implementation of the construction
|
||||
|
||||
def _new_completer_011(self):
|
||||
return PyDevIPCompleter(self,
|
||||
self.user_ns,
|
||||
self.user_global_ns,
|
||||
self.readline_omit__names,
|
||||
self.alias_manager.alias_table,
|
||||
self.has_readline)
|
||||
|
||||
|
||||
def _new_completer_012(self):
|
||||
completer = PyDevIPCompleter(shell=self,
|
||||
namespace=self.user_ns,
|
||||
global_namespace=self.user_global_ns,
|
||||
alias_table=self.alias_manager.alias_table,
|
||||
use_readline=self.has_readline,
|
||||
config=self.config,
|
||||
)
|
||||
return completer
|
||||
|
||||
|
||||
def _new_completer_100(self):
|
||||
completer = PyDevIPCompleter(shell=self,
|
||||
namespace=self.user_ns,
|
||||
global_namespace=self.user_global_ns,
|
||||
alias_table=self.alias_manager.alias_table,
|
||||
use_readline=self.has_readline,
|
||||
parent=self,
|
||||
)
|
||||
return completer
|
||||
|
||||
def _new_completer_234(self):
|
||||
# correct for IPython versions 2.x, 3.x, 4.x
|
||||
completer = PyDevIPCompleter(shell=self,
|
||||
namespace=self.user_ns,
|
||||
global_namespace=self.user_global_ns,
|
||||
use_readline=self.has_readline,
|
||||
parent=self,
|
||||
)
|
||||
return completer
|
||||
|
||||
def _new_completer_500(self):
|
||||
completer = PyDevIPCompleter(shell=self,
|
||||
namespace=self.user_ns,
|
||||
global_namespace=self.user_global_ns,
|
||||
use_readline=False,
|
||||
parent=self
|
||||
)
|
||||
return completer
|
||||
|
||||
def add_completer_hooks(self):
|
||||
from IPython.core.completerlib import module_completer, magic_run_completer, cd_completer
|
||||
try:
|
||||
from IPython.core.completerlib import reset_completer
|
||||
except ImportError:
|
||||
# reset_completer was added for rel-0.13
|
||||
reset_completer = None
|
||||
self.configurables.append(self.Completer)
|
||||
|
||||
# Add custom completers to the basic ones built into IPCompleter
|
||||
sdisp = self.strdispatchers.get('complete_command', StrDispatch())
|
||||
self.strdispatchers['complete_command'] = sdisp
|
||||
self.Completer.custom_completers = sdisp
|
||||
|
||||
self.set_hook('complete_command', module_completer, str_key = 'import')
|
||||
self.set_hook('complete_command', module_completer, str_key = 'from')
|
||||
self.set_hook('complete_command', magic_run_completer, str_key = '%run')
|
||||
self.set_hook('complete_command', cd_completer, str_key = '%cd')
|
||||
if reset_completer:
|
||||
self.set_hook('complete_command', reset_completer, str_key = '%reset')
|
||||
|
||||
def init_completer(self):
|
||||
"""Initialize the completion machinery.
|
||||
|
||||
This creates a completer that provides the completions that are
|
||||
IPython specific. We use this to supplement PyDev's core code
|
||||
completions.
|
||||
"""
|
||||
# PyDev uses its own completer and custom hooks so that it uses
|
||||
# most completions from PyDev's core completer which provides
|
||||
# extra information.
|
||||
# See getCompletions for where the two sets of results are merged
|
||||
|
||||
if IPythonRelease._version_major >= 5:
|
||||
self.Completer = self._new_completer_500()
|
||||
elif IPythonRelease._version_major >= 2:
|
||||
self.Completer = self._new_completer_234()
|
||||
elif IPythonRelease._version_major >= 1:
|
||||
self.Completer = self._new_completer_100()
|
||||
elif IPythonRelease._version_minor >= 12:
|
||||
self.Completer = self._new_completer_012()
|
||||
else:
|
||||
self.Completer = self._new_completer_011()
|
||||
|
||||
self.add_completer_hooks()
|
||||
|
||||
if IPythonRelease._version_major <= 3:
|
||||
# Only configure readline if we truly are using readline. IPython can
|
||||
# do tab-completion over the network, in GUIs, etc, where readline
|
||||
# itself may be absent
|
||||
if self.has_readline:
|
||||
self.set_readline_completer()
|
||||
|
||||
#-------------------------------------------------------------------------
|
||||
# Things related to aliases
|
||||
#-------------------------------------------------------------------------
|
||||
|
||||
def init_alias(self):
|
||||
# InteractiveShell defines alias's we want, but TerminalInteractiveShell defines
|
||||
# ones we don't. So don't use super and instead go right to InteractiveShell
|
||||
InteractiveShell.init_alias(self)
|
||||
|
||||
#-------------------------------------------------------------------------
|
||||
# Things related to exiting
|
||||
#-------------------------------------------------------------------------
|
||||
def ask_exit(self):
|
||||
""" Ask the shell to exit. Can be overiden and used as a callback. """
|
||||
# TODO PyDev's console does not have support from the Python side to exit
|
||||
# the console. If user forces the exit (with sys.exit()) then the console
|
||||
# simply reports errors. e.g.:
|
||||
# >>> import sys
|
||||
# >>> sys.exit()
|
||||
# Failed to create input stream: Connection refused
|
||||
# >>>
|
||||
# Console already exited with value: 0 while waiting for an answer.
|
||||
# Error stream:
|
||||
# Output stream:
|
||||
# >>>
|
||||
#
|
||||
# Alternatively if you use the non-IPython shell this is what happens
|
||||
# >>> exit()
|
||||
# <type 'exceptions.SystemExit'>:None
|
||||
# >>>
|
||||
# <type 'exceptions.SystemExit'>:None
|
||||
# >>>
|
||||
#
|
||||
super(PyDevTerminalInteractiveShell, self).ask_exit()
|
||||
print('To exit the PyDev Console, terminate the console within IDE.')
|
||||
|
||||
#-------------------------------------------------------------------------
|
||||
# Things related to magics
|
||||
#-------------------------------------------------------------------------
|
||||
|
||||
def init_magics(self):
|
||||
super(PyDevTerminalInteractiveShell, self).init_magics()
|
||||
# TODO Any additional magics for PyDev?
|
||||
|
||||
InteractiveShellABC.register(PyDevTerminalInteractiveShell) # @UndefinedVariable
|
||||
|
||||
#=======================================================================================================================
|
||||
# _PyDevFrontEnd
|
||||
#=======================================================================================================================
|
||||
class _PyDevFrontEnd:
|
||||
|
||||
version = release.__version__
|
||||
|
||||
def __init__(self, show_banner=True):
|
||||
# Create and initialize our IPython instance.
|
||||
if hasattr(PyDevTerminalInteractiveShell, '_instance') and PyDevTerminalInteractiveShell._instance is not None:
|
||||
self.ipython = PyDevTerminalInteractiveShell._instance
|
||||
else:
|
||||
self.ipython = PyDevTerminalInteractiveShell.instance()
|
||||
|
||||
if show_banner:
|
||||
# Display the IPython banner, this has version info and
|
||||
# help info
|
||||
self.ipython.show_banner()
|
||||
|
||||
self._curr_exec_line = 0
|
||||
self._curr_exec_lines = []
|
||||
|
||||
|
||||
def update(self, globals, locals):
|
||||
ns = self.ipython.user_ns
|
||||
|
||||
for ind in ['_oh', '_ih', '_dh', '_sh', 'In', 'Out', 'get_ipython', 'exit', 'quit']:
|
||||
try:
|
||||
locals[ind] = ns[ind]
|
||||
except KeyError:
|
||||
pass # Ignore if it's not there -- #PyDev-817: Error on autocomplete with IPython on interactive console
|
||||
|
||||
self.ipython.user_global_ns.clear()
|
||||
self.ipython.user_global_ns.update(globals)
|
||||
self.ipython.user_ns = locals
|
||||
|
||||
if hasattr(self.ipython, 'history_manager') and hasattr(self.ipython.history_manager, 'save_thread'):
|
||||
self.ipython.history_manager.save_thread.pydev_do_not_trace = True #don't trace ipython history saving thread
|
||||
|
||||
def complete(self, string):
|
||||
try:
|
||||
if string:
|
||||
return self.ipython.complete(None, line=string, cursor_pos=string.__len__())
|
||||
else:
|
||||
return self.ipython.complete(string, string, 0)
|
||||
except:
|
||||
# Silence completer exceptions
|
||||
pass
|
||||
|
||||
def is_complete(self, string):
|
||||
#Based on IPython 0.10.1
|
||||
|
||||
if string in ('', '\n'):
|
||||
# Prefiltering, eg through ipython0, may return an empty
|
||||
# string although some operations have been accomplished. We
|
||||
# thus want to consider an empty string as a complete
|
||||
# statement.
|
||||
return True
|
||||
else:
|
||||
try:
|
||||
# Add line returns here, to make sure that the statement is
|
||||
# complete (except if '\' was used).
|
||||
# This should probably be done in a different place (like
|
||||
# maybe 'prefilter_input' method? For now, this works.
|
||||
clean_string = string.rstrip('\n')
|
||||
if not clean_string.endswith('\\'):
|
||||
clean_string += '\n\n'
|
||||
|
||||
is_complete = codeop.compile_command(
|
||||
clean_string,
|
||||
"<string>",
|
||||
"exec"
|
||||
)
|
||||
except Exception:
|
||||
# XXX: Hack: return True so that the
|
||||
# code gets executed and the error captured.
|
||||
is_complete = True
|
||||
return is_complete
|
||||
|
||||
|
||||
def getCompletions(self, text, act_tok):
|
||||
# Get completions from IPython and from PyDev and merge the results
|
||||
# IPython only gives context free list of completions, while PyDev
|
||||
# gives detailed information about completions.
|
||||
try:
|
||||
TYPE_IPYTHON = '11'
|
||||
TYPE_IPYTHON_MAGIC = '12'
|
||||
_line, ipython_completions = self.complete(text)
|
||||
|
||||
from _pydev_bundle._pydev_completer import Completer
|
||||
completer = Completer(self.get_namespace(), None)
|
||||
ret = completer.complete(act_tok)
|
||||
append = ret.append
|
||||
ip = self.ipython
|
||||
pydev_completions = set([f[0] for f in ret])
|
||||
for ipython_completion in ipython_completions:
|
||||
|
||||
#PyCharm was not expecting completions with '%'...
|
||||
#Could be fixed in the backend, but it's probably better
|
||||
#fixing it at PyCharm.
|
||||
#if ipython_completion.startswith('%'):
|
||||
# ipython_completion = ipython_completion[1:]
|
||||
|
||||
if ipython_completion not in pydev_completions:
|
||||
pydev_completions.add(ipython_completion)
|
||||
inf = ip.object_inspect(ipython_completion)
|
||||
if inf['type_name'] == 'Magic function':
|
||||
pydev_type = TYPE_IPYTHON_MAGIC
|
||||
else:
|
||||
pydev_type = TYPE_IPYTHON
|
||||
pydev_doc = inf['docstring']
|
||||
if pydev_doc is None:
|
||||
pydev_doc = ''
|
||||
append((ipython_completion, pydev_doc, '', pydev_type))
|
||||
return ret
|
||||
except:
|
||||
import traceback;traceback.print_exc()
|
||||
return []
|
||||
|
||||
|
||||
def get_namespace(self):
|
||||
return self.ipython.user_ns
|
||||
|
||||
def clear_buffer(self):
|
||||
del self._curr_exec_lines[:]
|
||||
|
||||
def add_exec(self, line):
|
||||
if self._curr_exec_lines:
|
||||
self._curr_exec_lines.append(line)
|
||||
|
||||
buf = '\n'.join(self._curr_exec_lines)
|
||||
|
||||
if self.is_complete(buf):
|
||||
self._curr_exec_line += 1
|
||||
self.ipython.run_cell(buf)
|
||||
del self._curr_exec_lines[:]
|
||||
return False #execute complete (no more)
|
||||
|
||||
return True #needs more
|
||||
else:
|
||||
|
||||
if not self.is_complete(line):
|
||||
#Did not execute
|
||||
self._curr_exec_lines.append(line)
|
||||
return True #needs more
|
||||
else:
|
||||
self._curr_exec_line += 1
|
||||
self.ipython.run_cell(line, store_history=True)
|
||||
#hist = self.ipython.history_manager.output_hist_reprs
|
||||
#rep = hist.get(self._curr_exec_line, None)
|
||||
#if rep is not None:
|
||||
# print(rep)
|
||||
return False #execute complete (no more)
|
||||
|
||||
def is_automagic(self):
|
||||
return self.ipython.automagic
|
||||
|
||||
def get_greeting_msg(self):
|
||||
return 'PyDev console: using IPython %s\n' % self.version
|
||||
|
||||
|
||||
# If we have succeeded in importing this module, then monkey patch inputhook
|
||||
# in IPython to redirect to PyDev's version. This is essential to make
|
||||
# %gui in 0.11 work (0.12+ fixes it by calling self.enable_gui, which is implemented
|
||||
# above, instead of inputhook.enable_gui).
|
||||
# See test_gui (test_pydev_ipython_011.TestRunningCode) which fails on 0.11 without
|
||||
# this patch
|
||||
import IPython.lib.inputhook
|
||||
import pydev_ipython.inputhook
|
||||
IPython.lib.inputhook.enable_gui = pydev_ipython.inputhook.enable_gui
|
||||
# In addition to enable_gui, make all publics in pydev_ipython.inputhook replace
|
||||
# the IPython versions. This enables the examples in IPython's examples/lib/gui-*
|
||||
# to operate properly because those examples don't use %gui magic and instead
|
||||
# rely on using the inputhooks directly.
|
||||
for name in pydev_ipython.inputhook.__all__:
|
||||
setattr(IPython.lib.inputhook, name, getattr(pydev_ipython.inputhook, name))
|
||||
|
||||
|
||||
class _PyDevFrontEndContainer:
|
||||
_instance = None
|
||||
_last_host_port = None
|
||||
|
||||
def get_pydev_frontend(pydev_host, pydev_client_port, show_banner=True):
|
||||
if _PyDevFrontEndContainer._instance is None:
|
||||
_PyDevFrontEndContainer._instance = _PyDevFrontEnd(show_banner=show_banner)
|
||||
|
||||
if _PyDevFrontEndContainer._last_host_port != (pydev_host, pydev_client_port):
|
||||
_PyDevFrontEndContainer._last_host_port = pydev_host, pydev_client_port
|
||||
|
||||
# Back channel to PyDev to open editors (in the future other
|
||||
# info may go back this way. This is the same channel that is
|
||||
# used to get stdin, see StdIn in pydev_console_utils)
|
||||
_PyDevFrontEndContainer._instance.ipython.hooks['editor'] = create_editor_hook(pydev_host, pydev_client_port)
|
||||
|
||||
# Note: setting the callback directly because setting it with set_hook would actually create a chain instead
|
||||
# of ovewriting at each new call).
|
||||
# _PyDevFrontEndContainer._instance.ipython.set_hook('editor', create_editor_hook(pydev_host, pydev_client_port))
|
||||
|
||||
return _PyDevFrontEndContainer._instance
|
||||
|
||||
|
||||
23
ptvsd/pydevd/_pydev_bundle/pydev_is_thread_alive.py
Normal file
23
ptvsd/pydevd/_pydev_bundle/pydev_is_thread_alive.py
Normal file
|
|
@ -0,0 +1,23 @@
|
|||
from _pydev_imps._pydev_saved_modules import threading
|
||||
|
||||
# Hack for https://sw-brainwy.rhcloud.com/tracker/PyDev/363 (i.e.: calling isAlive() can throw AssertionError under some circumstances)
|
||||
# It is required to debug threads started by start_new_thread in Python 3.4
|
||||
_temp = threading.Thread()
|
||||
if hasattr(_temp, '_is_stopped'): # Python 3.4 has this
|
||||
def is_thread_alive(t):
|
||||
try:
|
||||
return not t._is_stopped
|
||||
except:
|
||||
return t.isAlive()
|
||||
|
||||
elif hasattr(_temp, '_Thread__stopped'): # Python 2.7 has this
|
||||
def is_thread_alive(t):
|
||||
try:
|
||||
return not t._Thread__stopped
|
||||
except:
|
||||
return t.isAlive()
|
||||
|
||||
else: # Haven't checked all other versions, so, let's use the regular isAlive call in this case.
|
||||
def is_thread_alive(t):
|
||||
return t.isAlive()
|
||||
del _temp
|
||||
64
ptvsd/pydevd/_pydev_bundle/pydev_localhost.py
Normal file
64
ptvsd/pydevd/_pydev_bundle/pydev_localhost.py
Normal file
|
|
@ -0,0 +1,64 @@
|
|||
from _pydevd_bundle import pydevd_constants
|
||||
from _pydev_imps._pydev_saved_modules import socket
|
||||
import sys
|
||||
|
||||
IS_JYTHON = sys.platform.find('java') != -1
|
||||
|
||||
_cache = None
|
||||
def get_localhost():
|
||||
'''
|
||||
Should return 127.0.0.1 in ipv4 and ::1 in ipv6
|
||||
|
||||
localhost is not used because on windows vista/windows 7, there can be issues where the resolving doesn't work
|
||||
properly and takes a lot of time (had this issue on the pyunit server).
|
||||
|
||||
Using the IP directly solves the problem.
|
||||
'''
|
||||
#TODO: Needs better investigation!
|
||||
|
||||
global _cache
|
||||
if _cache is None:
|
||||
try:
|
||||
for addr_info in socket.getaddrinfo("localhost", 80, 0, 0, socket.SOL_TCP):
|
||||
config = addr_info[4]
|
||||
if config[0] == '127.0.0.1':
|
||||
_cache = '127.0.0.1'
|
||||
return _cache
|
||||
except:
|
||||
#Ok, some versions of Python don't have getaddrinfo or SOL_TCP... Just consider it 127.0.0.1 in this case.
|
||||
_cache = '127.0.0.1'
|
||||
else:
|
||||
_cache = 'localhost'
|
||||
|
||||
return _cache
|
||||
|
||||
|
||||
def get_socket_names(n_sockets, close=False):
|
||||
socket_names = []
|
||||
sockets = []
|
||||
for _ in range(n_sockets):
|
||||
if IS_JYTHON:
|
||||
# Although the option which would be pure java *should* work for Jython, the socket being returned is still 0
|
||||
# (i.e.: it doesn't give the local port bound, only the original port, which was 0).
|
||||
from java.net import ServerSocket
|
||||
sock = ServerSocket(0)
|
||||
socket_name = get_localhost(), sock.getLocalPort()
|
||||
else:
|
||||
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||
sock.bind((get_localhost(), 0))
|
||||
socket_name = sock.getsockname()
|
||||
|
||||
sockets.append(sock)
|
||||
socket_names.append(socket_name)
|
||||
|
||||
if close:
|
||||
for s in sockets:
|
||||
s.close()
|
||||
return socket_names
|
||||
|
||||
def get_socket_name(close=False):
|
||||
return get_socket_names(1, close)[0]
|
||||
|
||||
if __name__ == '__main__':
|
||||
print(get_socket_name())
|
||||
40
ptvsd/pydevd/_pydev_bundle/pydev_log.py
Normal file
40
ptvsd/pydevd/_pydev_bundle/pydev_log.py
Normal file
|
|
@ -0,0 +1,40 @@
|
|||
import sys
|
||||
from _pydevd_bundle.pydevd_constants import DebugInfoHolder
|
||||
from _pydev_imps._pydev_saved_modules import threading
|
||||
currentThread = threading.currentThread
|
||||
|
||||
|
||||
import traceback
|
||||
|
||||
WARN_ONCE_MAP = {}
|
||||
|
||||
def stderr_write(message):
|
||||
sys.stderr.write(message)
|
||||
sys.stderr.write("\n")
|
||||
|
||||
|
||||
def debug(message):
|
||||
if DebugInfoHolder.DEBUG_TRACE_LEVEL>2:
|
||||
stderr_write(message)
|
||||
|
||||
|
||||
def warn(message):
|
||||
if DebugInfoHolder.DEBUG_TRACE_LEVEL>1:
|
||||
stderr_write(message)
|
||||
|
||||
|
||||
def info(message):
|
||||
stderr_write(message)
|
||||
|
||||
|
||||
def error(message, tb=False):
|
||||
stderr_write(message)
|
||||
if tb:
|
||||
traceback.print_exc()
|
||||
|
||||
|
||||
def error_once(message):
|
||||
if message not in WARN_ONCE_MAP:
|
||||
WARN_ONCE_MAP[message] = True
|
||||
error(message)
|
||||
|
||||
692
ptvsd/pydevd/_pydev_bundle/pydev_monkey.py
Normal file
692
ptvsd/pydevd/_pydev_bundle/pydev_monkey.py
Normal file
|
|
@ -0,0 +1,692 @@
|
|||
# License: EPL
|
||||
import os
|
||||
import sys
|
||||
import traceback
|
||||
|
||||
try:
|
||||
xrange
|
||||
except:
|
||||
xrange = range
|
||||
|
||||
#===============================================================================
|
||||
# Things that are dependent on having the pydevd debugger
|
||||
#===============================================================================
|
||||
def log_debug(msg):
|
||||
from _pydev_bundle import pydev_log
|
||||
pydev_log.debug(msg)
|
||||
|
||||
def log_error_once(msg):
|
||||
from _pydev_bundle import pydev_log
|
||||
pydev_log.error_once(msg)
|
||||
|
||||
pydev_src_dir = os.path.dirname(os.path.dirname(__file__))
|
||||
|
||||
def _get_python_c_args(host, port, indC, args):
|
||||
return ("import sys; sys.path.append(r'%s'); import pydevd; "
|
||||
"pydevd.settrace(host='%s', port=%s, suspend=False, trace_only_current_thread=False, patch_multiprocessing=True); "
|
||||
"%s"
|
||||
) % (
|
||||
pydev_src_dir,
|
||||
host,
|
||||
port,
|
||||
args[indC + 1])
|
||||
|
||||
def _get_host_port():
|
||||
import pydevd
|
||||
host, port = pydevd.dispatch()
|
||||
return host, port
|
||||
|
||||
def _is_managed_arg(arg):
|
||||
if arg.endswith('pydevd.py'):
|
||||
return True
|
||||
return False
|
||||
|
||||
def _on_forked_process():
|
||||
import pydevd
|
||||
pydevd.threadingCurrentThread().__pydevd_main_thread = True
|
||||
pydevd.settrace_forked()
|
||||
|
||||
def _on_set_trace_for_new_thread(global_debugger):
|
||||
if global_debugger is not None:
|
||||
global_debugger.SetTrace(global_debugger.trace_dispatch, global_debugger.frame_eval_func, global_debugger.dummy_trace_dispatch)
|
||||
|
||||
#===============================================================================
|
||||
# Things related to monkey-patching
|
||||
#===============================================================================
|
||||
def is_python(path):
|
||||
if path.endswith("'") or path.endswith('"'):
|
||||
path = path[1:len(path) - 1]
|
||||
filename = os.path.basename(path).lower()
|
||||
for name in ['python', 'jython', 'pypy']:
|
||||
if filename.find(name) != -1:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def remove_quotes_from_args(args):
|
||||
if sys.platform == "win32":
|
||||
new_args = []
|
||||
for x in args:
|
||||
if len(x) > 1 and x.startswith('"') and x.endswith('"'):
|
||||
x = x[1:-1]
|
||||
new_args.append(x)
|
||||
return new_args
|
||||
else:
|
||||
return args
|
||||
|
||||
|
||||
def quote_args(args):
|
||||
if sys.platform == "win32":
|
||||
quoted_args = []
|
||||
for x in args:
|
||||
if x.startswith('"') and x.endswith('"'):
|
||||
quoted_args.append(x)
|
||||
else:
|
||||
if ' ' in x:
|
||||
x = x.replace('"', '\\"')
|
||||
quoted_args.append('"%s"' % x)
|
||||
else:
|
||||
quoted_args.append(x)
|
||||
return quoted_args
|
||||
else:
|
||||
return args
|
||||
|
||||
|
||||
def patch_args(args):
|
||||
try:
|
||||
log_debug("Patching args: %s"% str(args))
|
||||
args = remove_quotes_from_args(args)
|
||||
|
||||
import sys
|
||||
new_args = []
|
||||
i = 0
|
||||
if len(args) == 0:
|
||||
return args
|
||||
|
||||
if is_python(args[0]):
|
||||
try:
|
||||
indC = args.index('-c')
|
||||
except ValueError:
|
||||
indC = -1
|
||||
|
||||
if indC != -1:
|
||||
host, port = _get_host_port()
|
||||
|
||||
if port is not None:
|
||||
new_args.extend(args)
|
||||
new_args[indC + 1] = _get_python_c_args(host, port, indC, args)
|
||||
return quote_args(new_args)
|
||||
else:
|
||||
# Check for Python ZIP Applications and don't patch the args for them.
|
||||
# Assumes the first non `-<flag>` argument is what we need to check.
|
||||
# There's probably a better way to determine this but it works for most cases.
|
||||
continue_next = False
|
||||
for i in xrange(1, len(args)):
|
||||
if continue_next:
|
||||
continue_next = False
|
||||
continue
|
||||
|
||||
arg = args[i]
|
||||
if arg.startswith('-'):
|
||||
# Skip the next arg too if this flag expects a value.
|
||||
continue_next = arg in ['-m', '-W', '-X']
|
||||
continue
|
||||
|
||||
if arg.rsplit('.')[-1] in ['zip', 'pyz', 'pyzw']:
|
||||
log_debug('Executing a PyZip, returning')
|
||||
return args
|
||||
break
|
||||
|
||||
new_args.append(args[0])
|
||||
else:
|
||||
log_debug("Process is not python, returning.")
|
||||
return args
|
||||
|
||||
i = 1
|
||||
|
||||
# Original args should be something as:
|
||||
# ['X:\\pysrc\\pydevd.py', '--multiprocess', '--print-in-debugger-startup',
|
||||
# '--vm_type', 'python', '--client', '127.0.0.1', '--port', '56352', '--file', 'x:\\snippet1.py']
|
||||
from _pydevd_bundle.pydevd_command_line_handling import setup_to_argv
|
||||
from pydevd import SetupHolder
|
||||
original = setup_to_argv(SetupHolder.setup) + ['--file']
|
||||
while i < len(args):
|
||||
if args[i] == '-m':
|
||||
# Always insert at pos == 1 (i.e.: pydevd "--module" --multiprocess ...)
|
||||
original.insert(1, '--module')
|
||||
else:
|
||||
if args[i].startswith('-'):
|
||||
new_args.append(args[i])
|
||||
else:
|
||||
break
|
||||
i += 1
|
||||
|
||||
# Note: undoing https://github.com/Elizaveta239/PyDev.Debugger/commit/053c9d6b1b455530bca267e7419a9f63bf51cddf
|
||||
# (i >= len(args) instead of i < len(args))
|
||||
# in practice it'd raise an exception here and would return original args, which is not what we want... providing
|
||||
# a proper fix for https://youtrack.jetbrains.com/issue/PY-9767 elsewhere.
|
||||
if i < len(args) and _is_managed_arg(args[i]): # no need to add pydevd twice
|
||||
return args
|
||||
|
||||
for x in original:
|
||||
new_args.append(x)
|
||||
if x == '--file':
|
||||
break
|
||||
|
||||
while i < len(args):
|
||||
new_args.append(args[i])
|
||||
i += 1
|
||||
|
||||
return quote_args(new_args)
|
||||
except:
|
||||
traceback.print_exc()
|
||||
return args
|
||||
|
||||
|
||||
def str_to_args_windows(args):
|
||||
# see http:#msdn.microsoft.com/en-us/library/a1y7w461.aspx
|
||||
result = []
|
||||
|
||||
DEFAULT = 0
|
||||
ARG = 1
|
||||
IN_DOUBLE_QUOTE = 2
|
||||
|
||||
state = DEFAULT
|
||||
backslashes = 0
|
||||
buf = ''
|
||||
|
||||
args_len = len(args)
|
||||
for i in xrange(args_len):
|
||||
ch = args[i]
|
||||
if (ch == '\\'):
|
||||
backslashes += 1
|
||||
continue
|
||||
elif (backslashes != 0):
|
||||
if ch == '"':
|
||||
while backslashes >= 2:
|
||||
backslashes -= 2
|
||||
buf += '\\'
|
||||
if (backslashes == 1):
|
||||
if (state == DEFAULT):
|
||||
state = ARG
|
||||
|
||||
buf += '"'
|
||||
backslashes = 0
|
||||
continue
|
||||
# else fall through to switch
|
||||
else:
|
||||
# false alarm, treat passed backslashes literally...
|
||||
if (state == DEFAULT):
|
||||
state = ARG
|
||||
|
||||
while backslashes > 0:
|
||||
backslashes -= 1
|
||||
buf += '\\'
|
||||
# fall through to switch
|
||||
if ch in (' ', '\t'):
|
||||
if (state == DEFAULT):
|
||||
# skip
|
||||
continue
|
||||
elif (state == ARG):
|
||||
state = DEFAULT
|
||||
result.append(buf)
|
||||
buf = ''
|
||||
continue
|
||||
|
||||
if state in (DEFAULT, ARG):
|
||||
if ch == '"':
|
||||
state = IN_DOUBLE_QUOTE
|
||||
else:
|
||||
state = ARG
|
||||
buf += ch
|
||||
|
||||
elif state == IN_DOUBLE_QUOTE:
|
||||
if ch == '"':
|
||||
if (i + 1 < args_len and args[i + 1] == '"'):
|
||||
# Undocumented feature in Windows:
|
||||
# Two consecutive double quotes inside a double-quoted argument are interpreted as
|
||||
# a single double quote.
|
||||
buf += '"'
|
||||
i += 1
|
||||
elif len(buf) == 0:
|
||||
# empty string on Windows platform. Account for bug in constructor of
|
||||
# JDK's java.lang.ProcessImpl.
|
||||
result.append("\"\"")
|
||||
state = DEFAULT
|
||||
else:
|
||||
state = ARG
|
||||
else:
|
||||
buf += ch
|
||||
|
||||
else:
|
||||
raise RuntimeError('Illegal condition')
|
||||
|
||||
if len(buf) > 0 or state != DEFAULT:
|
||||
result.append(buf)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def patch_arg_str_win(arg_str):
|
||||
args = str_to_args_windows(arg_str)
|
||||
# Fix https://youtrack.jetbrains.com/issue/PY-9767 (args may be empty)
|
||||
if not args or not is_python(args[0]):
|
||||
return arg_str
|
||||
arg_str = ' '.join(patch_args(args))
|
||||
log_debug("New args: %s" % arg_str)
|
||||
return arg_str
|
||||
|
||||
def monkey_patch_module(module, funcname, create_func):
|
||||
if hasattr(module, funcname):
|
||||
original_name = 'original_' + funcname
|
||||
if not hasattr(module, original_name):
|
||||
setattr(module, original_name, getattr(module, funcname))
|
||||
setattr(module, funcname, create_func(original_name))
|
||||
|
||||
|
||||
def monkey_patch_os(funcname, create_func):
|
||||
monkey_patch_module(os, funcname, create_func)
|
||||
|
||||
|
||||
def warn_multiproc():
|
||||
log_error_once(
|
||||
"pydev debugger: New process is launching (breakpoints won't work in the new process).\n"
|
||||
"pydev debugger: To debug that process please enable 'Attach to subprocess automatically while debugging?' option in the debugger settings.\n")
|
||||
|
||||
|
||||
def create_warn_multiproc(original_name):
|
||||
|
||||
def new_warn_multiproc(*args):
|
||||
import os
|
||||
|
||||
warn_multiproc()
|
||||
|
||||
return getattr(os, original_name)(*args)
|
||||
return new_warn_multiproc
|
||||
|
||||
def create_execl(original_name):
|
||||
def new_execl(path, *args):
|
||||
"""
|
||||
os.execl(path, arg0, arg1, ...)
|
||||
os.execle(path, arg0, arg1, ..., env)
|
||||
os.execlp(file, arg0, arg1, ...)
|
||||
os.execlpe(file, arg0, arg1, ..., env)
|
||||
"""
|
||||
import os
|
||||
args = patch_args(args)
|
||||
send_process_created_message()
|
||||
return getattr(os, original_name)(path, *args)
|
||||
return new_execl
|
||||
|
||||
|
||||
def create_execv(original_name):
|
||||
def new_execv(path, args):
|
||||
"""
|
||||
os.execv(path, args)
|
||||
os.execvp(file, args)
|
||||
"""
|
||||
import os
|
||||
send_process_created_message()
|
||||
return getattr(os, original_name)(path, patch_args(args))
|
||||
return new_execv
|
||||
|
||||
|
||||
def create_execve(original_name):
|
||||
"""
|
||||
os.execve(path, args, env)
|
||||
os.execvpe(file, args, env)
|
||||
"""
|
||||
def new_execve(path, args, env):
|
||||
import os
|
||||
send_process_created_message()
|
||||
return getattr(os, original_name)(path, patch_args(args), env)
|
||||
return new_execve
|
||||
|
||||
|
||||
def create_spawnl(original_name):
|
||||
def new_spawnl(mode, path, *args):
|
||||
"""
|
||||
os.spawnl(mode, path, arg0, arg1, ...)
|
||||
os.spawnlp(mode, file, arg0, arg1, ...)
|
||||
"""
|
||||
import os
|
||||
args = patch_args(args)
|
||||
send_process_created_message()
|
||||
return getattr(os, original_name)(mode, path, *args)
|
||||
return new_spawnl
|
||||
|
||||
|
||||
def create_spawnv(original_name):
|
||||
def new_spawnv(mode, path, args):
|
||||
"""
|
||||
os.spawnv(mode, path, args)
|
||||
os.spawnvp(mode, file, args)
|
||||
"""
|
||||
import os
|
||||
send_process_created_message()
|
||||
return getattr(os, original_name)(mode, path, patch_args(args))
|
||||
return new_spawnv
|
||||
|
||||
|
||||
def create_spawnve(original_name):
|
||||
"""
|
||||
os.spawnve(mode, path, args, env)
|
||||
os.spawnvpe(mode, file, args, env)
|
||||
"""
|
||||
def new_spawnve(mode, path, args, env):
|
||||
import os
|
||||
send_process_created_message()
|
||||
return getattr(os, original_name)(mode, path, patch_args(args), env)
|
||||
return new_spawnve
|
||||
|
||||
|
||||
def create_fork_exec(original_name):
|
||||
"""
|
||||
_posixsubprocess.fork_exec(args, executable_list, close_fds, ... (13 more))
|
||||
"""
|
||||
def new_fork_exec(args, *other_args):
|
||||
import _posixsubprocess # @UnresolvedImport
|
||||
args = patch_args(args)
|
||||
send_process_created_message()
|
||||
return getattr(_posixsubprocess, original_name)(args, *other_args)
|
||||
return new_fork_exec
|
||||
|
||||
|
||||
def create_warn_fork_exec(original_name):
|
||||
"""
|
||||
_posixsubprocess.fork_exec(args, executable_list, close_fds, ... (13 more))
|
||||
"""
|
||||
def new_warn_fork_exec(*args):
|
||||
try:
|
||||
import _posixsubprocess
|
||||
warn_multiproc()
|
||||
return getattr(_posixsubprocess, original_name)(*args)
|
||||
except:
|
||||
pass
|
||||
return new_warn_fork_exec
|
||||
|
||||
|
||||
def create_CreateProcess(original_name):
|
||||
"""
|
||||
CreateProcess(*args, **kwargs)
|
||||
"""
|
||||
def new_CreateProcess(app_name, cmd_line, *args):
|
||||
try:
|
||||
import _subprocess
|
||||
except ImportError:
|
||||
import _winapi as _subprocess
|
||||
send_process_created_message()
|
||||
return getattr(_subprocess, original_name)(app_name, patch_arg_str_win(cmd_line), *args)
|
||||
return new_CreateProcess
|
||||
|
||||
|
||||
def create_CreateProcessWarnMultiproc(original_name):
|
||||
"""
|
||||
CreateProcess(*args, **kwargs)
|
||||
"""
|
||||
def new_CreateProcess(*args):
|
||||
try:
|
||||
import _subprocess
|
||||
except ImportError:
|
||||
import _winapi as _subprocess
|
||||
warn_multiproc()
|
||||
return getattr(_subprocess, original_name)(*args)
|
||||
return new_CreateProcess
|
||||
|
||||
|
||||
def create_fork(original_name):
|
||||
def new_fork():
|
||||
import os
|
||||
|
||||
# A simple fork will result in a new python process
|
||||
is_new_python_process = True
|
||||
frame = sys._getframe()
|
||||
|
||||
while frame is not None:
|
||||
if frame.f_code.co_name == '_execute_child' and 'subprocess' in frame.f_code.co_filename:
|
||||
# If we're actually in subprocess.Popen creating a child, it may
|
||||
# result in something which is not a Python process, (so, we
|
||||
# don't want to connect with it in the forked version).
|
||||
executable = frame.f_locals.get('executable')
|
||||
if executable is not None:
|
||||
is_new_python_process = False
|
||||
if is_python(executable):
|
||||
is_new_python_process = True
|
||||
break
|
||||
|
||||
frame = frame.f_back
|
||||
frame = None # Just make sure we don't hold on to it.
|
||||
|
||||
child_process = getattr(os, original_name)() # fork
|
||||
if not child_process:
|
||||
if is_new_python_process:
|
||||
_on_forked_process()
|
||||
else:
|
||||
if is_new_python_process:
|
||||
send_process_created_message()
|
||||
return child_process
|
||||
return new_fork
|
||||
|
||||
|
||||
def send_process_created_message():
|
||||
from _pydevd_bundle.pydevd_comm import get_global_debugger
|
||||
debugger = get_global_debugger()
|
||||
if debugger is not None:
|
||||
debugger.send_process_created_message()
|
||||
|
||||
|
||||
def patch_new_process_functions():
|
||||
# os.execl(path, arg0, arg1, ...)
|
||||
# os.execle(path, arg0, arg1, ..., env)
|
||||
# os.execlp(file, arg0, arg1, ...)
|
||||
# os.execlpe(file, arg0, arg1, ..., env)
|
||||
# os.execv(path, args)
|
||||
# os.execve(path, args, env)
|
||||
# os.execvp(file, args)
|
||||
# os.execvpe(file, args, env)
|
||||
monkey_patch_os('execl', create_execl)
|
||||
monkey_patch_os('execle', create_execl)
|
||||
monkey_patch_os('execlp', create_execl)
|
||||
monkey_patch_os('execlpe', create_execl)
|
||||
monkey_patch_os('execv', create_execv)
|
||||
monkey_patch_os('execve', create_execve)
|
||||
monkey_patch_os('execvp', create_execv)
|
||||
monkey_patch_os('execvpe', create_execve)
|
||||
|
||||
# os.spawnl(mode, path, ...)
|
||||
# os.spawnle(mode, path, ..., env)
|
||||
# os.spawnlp(mode, file, ...)
|
||||
# os.spawnlpe(mode, file, ..., env)
|
||||
# os.spawnv(mode, path, args)
|
||||
# os.spawnve(mode, path, args, env)
|
||||
# os.spawnvp(mode, file, args)
|
||||
# os.spawnvpe(mode, file, args, env)
|
||||
|
||||
monkey_patch_os('spawnl', create_spawnl)
|
||||
monkey_patch_os('spawnle', create_spawnl)
|
||||
monkey_patch_os('spawnlp', create_spawnl)
|
||||
monkey_patch_os('spawnlpe', create_spawnl)
|
||||
monkey_patch_os('spawnv', create_spawnv)
|
||||
monkey_patch_os('spawnve', create_spawnve)
|
||||
monkey_patch_os('spawnvp', create_spawnv)
|
||||
monkey_patch_os('spawnvpe', create_spawnve)
|
||||
|
||||
if sys.platform != 'win32':
|
||||
monkey_patch_os('fork', create_fork)
|
||||
try:
|
||||
import _posixsubprocess
|
||||
monkey_patch_module(_posixsubprocess, 'fork_exec', create_fork_exec)
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
# Windows
|
||||
try:
|
||||
import _subprocess
|
||||
except ImportError:
|
||||
import _winapi as _subprocess
|
||||
monkey_patch_module(_subprocess, 'CreateProcess', create_CreateProcess)
|
||||
|
||||
|
||||
def patch_new_process_functions_with_warning():
|
||||
monkey_patch_os('execl', create_warn_multiproc)
|
||||
monkey_patch_os('execle', create_warn_multiproc)
|
||||
monkey_patch_os('execlp', create_warn_multiproc)
|
||||
monkey_patch_os('execlpe', create_warn_multiproc)
|
||||
monkey_patch_os('execv', create_warn_multiproc)
|
||||
monkey_patch_os('execve', create_warn_multiproc)
|
||||
monkey_patch_os('execvp', create_warn_multiproc)
|
||||
monkey_patch_os('execvpe', create_warn_multiproc)
|
||||
monkey_patch_os('spawnl', create_warn_multiproc)
|
||||
monkey_patch_os('spawnle', create_warn_multiproc)
|
||||
monkey_patch_os('spawnlp', create_warn_multiproc)
|
||||
monkey_patch_os('spawnlpe', create_warn_multiproc)
|
||||
monkey_patch_os('spawnv', create_warn_multiproc)
|
||||
monkey_patch_os('spawnve', create_warn_multiproc)
|
||||
monkey_patch_os('spawnvp', create_warn_multiproc)
|
||||
monkey_patch_os('spawnvpe', create_warn_multiproc)
|
||||
|
||||
if sys.platform != 'win32':
|
||||
monkey_patch_os('fork', create_warn_multiproc)
|
||||
try:
|
||||
import _posixsubprocess
|
||||
monkey_patch_module(_posixsubprocess, 'fork_exec', create_warn_fork_exec)
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
# Windows
|
||||
try:
|
||||
import _subprocess
|
||||
except ImportError:
|
||||
import _winapi as _subprocess
|
||||
monkey_patch_module(_subprocess, 'CreateProcess', create_CreateProcessWarnMultiproc)
|
||||
|
||||
|
||||
class _NewThreadStartupWithTrace:
|
||||
|
||||
def __init__(self, original_func, args, kwargs):
|
||||
self.original_func = original_func
|
||||
self.args = args
|
||||
self.kwargs = kwargs
|
||||
self.global_debugger = self.get_debugger()
|
||||
|
||||
def get_debugger(self):
|
||||
from _pydevd_bundle.pydevd_comm import get_global_debugger
|
||||
return get_global_debugger()
|
||||
|
||||
def __call__(self):
|
||||
_on_set_trace_for_new_thread(self.global_debugger)
|
||||
global_debugger = self.global_debugger
|
||||
|
||||
if global_debugger is not None and global_debugger.thread_analyser is not None:
|
||||
# we can detect start_new_thread only here
|
||||
try:
|
||||
from pydevd_concurrency_analyser.pydevd_concurrency_logger import log_new_thread
|
||||
log_new_thread(global_debugger)
|
||||
except:
|
||||
sys.stderr.write("Failed to detect new thread for visualization")
|
||||
|
||||
return self.original_func(*self.args, **self.kwargs)
|
||||
|
||||
|
||||
class _NewThreadStartupWithoutTrace:
|
||||
|
||||
def __init__(self, original_func, args, kwargs):
|
||||
self.original_func = original_func
|
||||
self.args = args
|
||||
self.kwargs = kwargs
|
||||
|
||||
def __call__(self):
|
||||
return self.original_func(*self.args, **self.kwargs)
|
||||
|
||||
_UseNewThreadStartup = _NewThreadStartupWithTrace
|
||||
|
||||
|
||||
def _get_threading_modules_to_patch():
|
||||
threading_modules_to_patch = []
|
||||
|
||||
try:
|
||||
import thread as _thread
|
||||
except:
|
||||
import _thread
|
||||
threading_modules_to_patch.append(_thread)
|
||||
|
||||
return threading_modules_to_patch
|
||||
|
||||
threading_modules_to_patch = _get_threading_modules_to_patch()
|
||||
|
||||
|
||||
def patch_thread_module(thread):
|
||||
|
||||
if getattr(thread, '_original_start_new_thread', None) is None:
|
||||
_original_start_new_thread = thread._original_start_new_thread = thread.start_new_thread
|
||||
else:
|
||||
_original_start_new_thread = thread._original_start_new_thread
|
||||
|
||||
class ClassWithPydevStartNewThread:
|
||||
|
||||
def pydev_start_new_thread(self, function, args=(), kwargs={}):
|
||||
'''
|
||||
We need to replace the original thread.start_new_thread with this function so that threads started
|
||||
through it and not through the threading module are properly traced.
|
||||
'''
|
||||
return _original_start_new_thread(_UseNewThreadStartup(function, args, kwargs), ())
|
||||
|
||||
# This is a hack for the situation where the thread.start_new_thread is declared inside a class, such as the one below
|
||||
# class F(object):
|
||||
# start_new_thread = thread.start_new_thread
|
||||
#
|
||||
# def start_it(self):
|
||||
# self.start_new_thread(self.function, args, kwargs)
|
||||
# So, if it's an already bound method, calling self.start_new_thread won't really receive a different 'self' -- it
|
||||
# does work in the default case because in builtins self isn't passed either.
|
||||
pydev_start_new_thread = ClassWithPydevStartNewThread().pydev_start_new_thread
|
||||
|
||||
try:
|
||||
# We need to replace the original thread.start_new_thread with this function so that threads started through
|
||||
# it and not through the threading module are properly traced.
|
||||
thread.start_new_thread = pydev_start_new_thread
|
||||
thread.start_new = pydev_start_new_thread
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
def patch_thread_modules():
|
||||
for t in threading_modules_to_patch:
|
||||
patch_thread_module(t)
|
||||
|
||||
|
||||
def undo_patch_thread_modules():
|
||||
for t in threading_modules_to_patch:
|
||||
try:
|
||||
t.start_new_thread = t._original_start_new_thread
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
t.start_new = t._original_start_new_thread
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
def disable_trace_thread_modules():
|
||||
'''
|
||||
Can be used to temporarily stop tracing threads created with thread.start_new_thread.
|
||||
'''
|
||||
global _UseNewThreadStartup
|
||||
_UseNewThreadStartup = _NewThreadStartupWithoutTrace
|
||||
|
||||
|
||||
def enable_trace_thread_modules():
|
||||
'''
|
||||
Can be used to start tracing threads created with thread.start_new_thread again.
|
||||
'''
|
||||
global _UseNewThreadStartup
|
||||
_UseNewThreadStartup = _NewThreadStartupWithTrace
|
||||
|
||||
|
||||
def get_original_start_new_thread(threading_module):
|
||||
try:
|
||||
return threading_module._original_start_new_thread
|
||||
except:
|
||||
return threading_module.start_new_thread
|
||||
190
ptvsd/pydevd/_pydev_bundle/pydev_monkey_qt.py
Normal file
190
ptvsd/pydevd/_pydev_bundle/pydev_monkey_qt.py
Normal file
|
|
@ -0,0 +1,190 @@
|
|||
from __future__ import nested_scopes
|
||||
import os
|
||||
|
||||
def set_trace_in_qt():
|
||||
import pydevd_tracing
|
||||
from _pydevd_bundle.pydevd_comm import get_global_debugger
|
||||
debugger = get_global_debugger()
|
||||
if debugger is not None:
|
||||
pydevd_tracing.SetTrace(debugger.trace_dispatch, debugger.frame_eval_func)
|
||||
|
||||
|
||||
_patched_qt = False
|
||||
def patch_qt(qt_support_mode):
|
||||
'''
|
||||
This method patches qt (PySide, PyQt4, PyQt5) so that we have hooks to set the tracing for QThread.
|
||||
'''
|
||||
if not qt_support_mode:
|
||||
return
|
||||
|
||||
if qt_support_mode is True or qt_support_mode == 'True':
|
||||
# do not break backward compatibility
|
||||
qt_support_mode = 'auto'
|
||||
|
||||
if qt_support_mode == 'auto':
|
||||
qt_support_mode = os.getenv('PYDEVD_PYQT_MODE', 'auto')
|
||||
|
||||
# Avoid patching more than once
|
||||
global _patched_qt
|
||||
if _patched_qt:
|
||||
return
|
||||
|
||||
_patched_qt = True
|
||||
|
||||
if qt_support_mode == 'auto':
|
||||
|
||||
patch_qt_on_import = None
|
||||
try:
|
||||
import PySide # @UnresolvedImport @UnusedImport
|
||||
qt_support_mode = 'pyside'
|
||||
except:
|
||||
try:
|
||||
import PyQt5 # @UnresolvedImport @UnusedImport
|
||||
qt_support_mode = 'pyqt5'
|
||||
except:
|
||||
try:
|
||||
import PyQt4 # @UnresolvedImport @UnusedImport
|
||||
qt_support_mode = 'pyqt4'
|
||||
except:
|
||||
return
|
||||
|
||||
|
||||
if qt_support_mode == 'pyside':
|
||||
import PySide.QtCore # @UnresolvedImport
|
||||
_internal_patch_qt(PySide.QtCore, qt_support_mode)
|
||||
|
||||
elif qt_support_mode == 'pyqt5':
|
||||
import PyQt5.QtCore # @UnresolvedImport
|
||||
_internal_patch_qt(PyQt5.QtCore)
|
||||
|
||||
elif qt_support_mode == 'pyqt4':
|
||||
# Ok, we have an issue here:
|
||||
# PyDev-452: Selecting PyQT API version using sip.setapi fails in debug mode
|
||||
# http://pyqt.sourceforge.net/Docs/PyQt4/incompatible_apis.html
|
||||
# Mostly, if the user uses a different API version (i.e.: v2 instead of v1),
|
||||
# that has to be done before importing PyQt4 modules (PySide/PyQt5 don't have this issue
|
||||
# as they only implements v2).
|
||||
patch_qt_on_import = 'PyQt4'
|
||||
def get_qt_core_module():
|
||||
import PyQt4.QtCore # @UnresolvedImport
|
||||
return PyQt4.QtCore
|
||||
_patch_import_to_patch_pyqt_on_import(patch_qt_on_import, get_qt_core_module)
|
||||
|
||||
else:
|
||||
raise ValueError('Unexpected qt support mode: %s' % (qt_support_mode,))
|
||||
|
||||
|
||||
def _patch_import_to_patch_pyqt_on_import(patch_qt_on_import, get_qt_core_module):
|
||||
# I don't like this approach very much as we have to patch __import__, but I like even less
|
||||
# asking the user to configure something in the client side...
|
||||
# So, our approach is to patch PyQt4 right before the user tries to import it (at which
|
||||
# point he should've set the sip api version properly already anyways).
|
||||
|
||||
dotted = patch_qt_on_import + '.'
|
||||
original_import = __import__
|
||||
|
||||
from _pydev_imps._pydev_sys_patch import patch_sys_module, patch_reload, cancel_patches_in_sys_module
|
||||
|
||||
patch_sys_module()
|
||||
patch_reload()
|
||||
|
||||
def patched_import(name, *args, **kwargs):
|
||||
if patch_qt_on_import == name or name.startswith(dotted):
|
||||
builtins.__import__ = original_import
|
||||
cancel_patches_in_sys_module()
|
||||
_internal_patch_qt(get_qt_core_module()) # Patch it only when the user would import the qt module
|
||||
return original_import(name, *args, **kwargs)
|
||||
|
||||
import sys
|
||||
if sys.version_info[0] >= 3:
|
||||
import builtins # Py3
|
||||
else:
|
||||
import __builtin__ as builtins
|
||||
|
||||
builtins.__import__ = patched_import
|
||||
|
||||
|
||||
def _internal_patch_qt(QtCore, qt_support_mode='auto'):
|
||||
_original_thread_init = QtCore.QThread.__init__
|
||||
_original_runnable_init = QtCore.QRunnable.__init__
|
||||
_original_QThread = QtCore.QThread
|
||||
|
||||
class FuncWrapper:
|
||||
def __init__(self, original):
|
||||
self._original = original
|
||||
|
||||
def __call__(self, *args, **kwargs):
|
||||
set_trace_in_qt()
|
||||
return self._original(*args, **kwargs)
|
||||
|
||||
class StartedSignalWrapper(QtCore.QObject): # Wrapper for the QThread.started signal
|
||||
|
||||
try:
|
||||
_signal = QtCore.Signal() # @UndefinedVariable
|
||||
except:
|
||||
_signal = QtCore.pyqtSignal() # @UndefinedVariable
|
||||
|
||||
def __init__(self, thread, original_started):
|
||||
QtCore.QObject.__init__(self)
|
||||
self.thread = thread
|
||||
self.original_started = original_started
|
||||
if qt_support_mode == 'pyside':
|
||||
self._signal = original_started
|
||||
else:
|
||||
self._signal.connect(self._on_call)
|
||||
self.original_started.connect(self._signal)
|
||||
|
||||
def connect(self, func, *args, **kwargs):
|
||||
if qt_support_mode == 'pyside':
|
||||
return self._signal.connect(FuncWrapper(func), *args, **kwargs)
|
||||
else:
|
||||
return self._signal.connect(func, *args, **kwargs)
|
||||
|
||||
def disconnect(self, *args, **kwargs):
|
||||
return self._signal.disconnect(*args, **kwargs)
|
||||
|
||||
def emit(self, *args, **kwargs):
|
||||
return self._signal.emit(*args, **kwargs)
|
||||
|
||||
def _on_call(self, *args, **kwargs):
|
||||
set_trace_in_qt()
|
||||
|
||||
class ThreadWrapper(QtCore.QThread): # Wrapper for QThread
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
_original_thread_init(self, *args, **kwargs)
|
||||
|
||||
# In PyQt5 the program hangs when we try to call original run method of QThread class.
|
||||
# So we need to distinguish instances of QThread class and instances of QThread inheritors.
|
||||
if self.__class__.run == _original_QThread.run:
|
||||
self.run = self._exec_run
|
||||
else:
|
||||
self._original_run = self.run
|
||||
self.run = self._new_run
|
||||
self._original_started = self.started
|
||||
self.started = StartedSignalWrapper(self, self.started)
|
||||
|
||||
def _exec_run(self):
|
||||
set_trace_in_qt()
|
||||
self.exec_()
|
||||
return None
|
||||
|
||||
def _new_run(self):
|
||||
set_trace_in_qt()
|
||||
return self._original_run()
|
||||
|
||||
class RunnableWrapper(QtCore.QRunnable): # Wrapper for QRunnable
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
_original_runnable_init(self, *args, **kwargs)
|
||||
|
||||
self._original_run = self.run
|
||||
self.run = self._new_run
|
||||
|
||||
|
||||
def _new_run(self):
|
||||
set_trace_in_qt()
|
||||
return self._original_run()
|
||||
|
||||
QtCore.QThread = ThreadWrapper
|
||||
QtCore.QRunnable = RunnableWrapper
|
||||
49
ptvsd/pydevd/_pydev_bundle/pydev_override.py
Normal file
49
ptvsd/pydevd/_pydev_bundle/pydev_override.py
Normal file
|
|
@ -0,0 +1,49 @@
|
|||
def overrides(method):
|
||||
'''
|
||||
Initially meant to be used as
|
||||
|
||||
class B:
|
||||
@overrides(A.m1)
|
||||
def m1(self):
|
||||
pass
|
||||
|
||||
but as we want to be compatible with Jython 2.1 where decorators have an uglier syntax (needing an assign
|
||||
after the method), it should now be used without being a decorator as below (in which case we don't even check
|
||||
for anything, just that the parent name was actually properly loaded).
|
||||
|
||||
i.e.:
|
||||
|
||||
class B:
|
||||
overrides(A.m1)
|
||||
def m1(self):
|
||||
pass
|
||||
'''
|
||||
return
|
||||
|
||||
# def wrapper(func):
|
||||
# if func.__name__ != method.__name__:
|
||||
# msg = "Wrong @override: %r expected, but overwriting %r."
|
||||
# msg = msg % (func.__name__, method.__name__)
|
||||
# raise AssertionError(msg)
|
||||
#
|
||||
# if func.__doc__ is None:
|
||||
# func.__doc__ = method.__doc__
|
||||
#
|
||||
# return func
|
||||
#
|
||||
# return wrapper
|
||||
|
||||
def implements(method):
|
||||
return
|
||||
# def wrapper(func):
|
||||
# if func.__name__ != method.__name__:
|
||||
# msg = "Wrong @implements: %r expected, but implementing %r."
|
||||
# msg = msg % (func.__name__, method.__name__)
|
||||
# raise AssertionError(msg)
|
||||
#
|
||||
# if func.__doc__ is None:
|
||||
# func.__doc__ = method.__doc__
|
||||
#
|
||||
# return func
|
||||
#
|
||||
# return wrapper
|
||||
172
ptvsd/pydevd/_pydev_bundle/pydev_umd.py
Normal file
172
ptvsd/pydevd/_pydev_bundle/pydev_umd.py
Normal file
|
|
@ -0,0 +1,172 @@
|
|||
"""
|
||||
The UserModuleDeleter and runfile methods are copied from
|
||||
Spyder and carry their own license agreement.
|
||||
http://code.google.com/p/spyderlib/source/browse/spyderlib/widgets/externalshell/sitecustomize.py
|
||||
|
||||
Spyder License Agreement (MIT License)
|
||||
--------------------------------------
|
||||
|
||||
Copyright (c) 2009-2012 Pierre Raybaut
|
||||
|
||||
Permission is hereby granted, free of charge, to any person
|
||||
obtaining a copy of this software and associated documentation
|
||||
files (the "Software"), to deal in the Software without
|
||||
restriction, including without limitation the rights to use,
|
||||
copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the
|
||||
Software is furnished to do so, subject to the following
|
||||
conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
||||
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
||||
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
||||
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
||||
OTHER DEALINGS IN THE SOFTWARE.
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
|
||||
# The following classes and functions are mainly intended to be used from
|
||||
# an interactive Python session
|
||||
class UserModuleDeleter:
|
||||
"""
|
||||
User Module Deleter (UMD) aims at deleting user modules
|
||||
to force Python to deeply reload them during import
|
||||
|
||||
pathlist [list]: blacklist in terms of module path
|
||||
namelist [list]: blacklist in terms of module name
|
||||
"""
|
||||
def __init__(self, namelist=None, pathlist=None):
|
||||
if namelist is None:
|
||||
namelist = []
|
||||
self.namelist = namelist
|
||||
if pathlist is None:
|
||||
pathlist = []
|
||||
self.pathlist = pathlist
|
||||
try:
|
||||
# blacklist all files in org.python.pydev/pysrc
|
||||
import pydev_pysrc, inspect
|
||||
self.pathlist.append(os.path.dirname(pydev_pysrc.__file__))
|
||||
except:
|
||||
pass
|
||||
self.previous_modules = list(sys.modules.keys())
|
||||
|
||||
def is_module_blacklisted(self, modname, modpath):
|
||||
for path in [sys.prefix] + self.pathlist:
|
||||
if modpath.startswith(path):
|
||||
return True
|
||||
else:
|
||||
return set(modname.split('.')) & set(self.namelist)
|
||||
|
||||
def run(self, verbose=False):
|
||||
"""
|
||||
Del user modules to force Python to deeply reload them
|
||||
|
||||
Do not del modules which are considered as system modules, i.e.
|
||||
modules installed in subdirectories of Python interpreter's binary
|
||||
Do not del C modules
|
||||
"""
|
||||
log = []
|
||||
modules_copy = dict(sys.modules)
|
||||
for modname, module in modules_copy.items():
|
||||
if modname == 'aaaaa':
|
||||
print(modname, module)
|
||||
print(self.previous_modules)
|
||||
if modname not in self.previous_modules:
|
||||
modpath = getattr(module, '__file__', None)
|
||||
if modpath is None:
|
||||
# *module* is a C module that is statically linked into the
|
||||
# interpreter. There is no way to know its path, so we
|
||||
# choose to ignore it.
|
||||
continue
|
||||
if not self.is_module_blacklisted(modname, modpath):
|
||||
log.append(modname)
|
||||
del sys.modules[modname]
|
||||
if verbose and log:
|
||||
print("\x1b[4;33m%s\x1b[24m%s\x1b[0m" % ("UMD has deleted",
|
||||
": " + ", ".join(log)))
|
||||
|
||||
__umd__ = None
|
||||
|
||||
_get_globals_callback = None
|
||||
def _set_globals_function(get_globals):
|
||||
global _get_globals_callback
|
||||
_get_globals_callback = get_globals
|
||||
def _get_globals():
|
||||
"""Return current Python interpreter globals namespace"""
|
||||
if _get_globals_callback is not None:
|
||||
return _get_globals_callback()
|
||||
else:
|
||||
try:
|
||||
from __main__ import __dict__ as namespace
|
||||
except ImportError:
|
||||
try:
|
||||
# The import fails on IronPython
|
||||
import __main__
|
||||
namespace = __main__.__dict__
|
||||
except:
|
||||
namespace
|
||||
shell = namespace.get('__ipythonshell__')
|
||||
if shell is not None and hasattr(shell, 'user_ns'):
|
||||
# IPython 0.12+ kernel
|
||||
return shell.user_ns
|
||||
else:
|
||||
# Python interpreter
|
||||
return namespace
|
||||
return namespace
|
||||
|
||||
|
||||
def runfile(filename, args=None, wdir=None, namespace=None):
|
||||
"""
|
||||
Run filename
|
||||
args: command line arguments (string)
|
||||
wdir: working directory
|
||||
"""
|
||||
try:
|
||||
if hasattr(filename, 'decode'):
|
||||
filename = filename.decode('utf-8')
|
||||
except (UnicodeError, TypeError):
|
||||
pass
|
||||
global __umd__
|
||||
if os.environ.get("PYDEV_UMD_ENABLED", "").lower() == "true":
|
||||
if __umd__ is None:
|
||||
namelist = os.environ.get("PYDEV_UMD_NAMELIST", None)
|
||||
if namelist is not None:
|
||||
namelist = namelist.split(',')
|
||||
__umd__ = UserModuleDeleter(namelist=namelist)
|
||||
else:
|
||||
verbose = os.environ.get("PYDEV_UMD_VERBOSE", "").lower() == "true"
|
||||
__umd__.run(verbose=verbose)
|
||||
if args is not None and not isinstance(args, basestring):
|
||||
raise TypeError("expected a character buffer object")
|
||||
if namespace is None:
|
||||
namespace = _get_globals()
|
||||
if '__file__' in namespace:
|
||||
old_file = namespace['__file__']
|
||||
else:
|
||||
old_file = None
|
||||
namespace['__file__'] = filename
|
||||
sys.argv = [filename]
|
||||
if args is not None:
|
||||
for arg in args.split():
|
||||
sys.argv.append(arg)
|
||||
if wdir is not None:
|
||||
try:
|
||||
if hasattr(wdir, 'decode'):
|
||||
wdir = wdir.decode('utf-8')
|
||||
except (UnicodeError, TypeError):
|
||||
pass
|
||||
os.chdir(wdir)
|
||||
execfile(filename, namespace)
|
||||
sys.argv = ['']
|
||||
if old_file is None:
|
||||
del namespace['__file__']
|
||||
else:
|
||||
namespace['__file__'] = old_file
|
||||
16
ptvsd/pydevd/_pydev_bundle/pydev_versioncheck.py
Normal file
16
ptvsd/pydevd/_pydev_bundle/pydev_versioncheck.py
Normal file
|
|
@ -0,0 +1,16 @@
|
|||
import sys
|
||||
|
||||
def versionok_for_gui():
|
||||
''' Return True if running Python is suitable for GUI Event Integration and deeper IPython integration '''
|
||||
# We require Python 2.6+ ...
|
||||
if sys.hexversion < 0x02060000:
|
||||
return False
|
||||
# Or Python 3.2+
|
||||
if sys.hexversion >= 0x03000000 and sys.hexversion < 0x03020000:
|
||||
return False
|
||||
# Not supported under Jython nor IronPython
|
||||
if sys.platform.startswith("java") or sys.platform.startswith('cli'):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
0
ptvsd/pydevd/_pydev_imps/__init__.py
Normal file
0
ptvsd/pydevd/_pydev_imps/__init__.py
Normal file
604
ptvsd/pydevd/_pydev_imps/_pydev_BaseHTTPServer.py
Normal file
604
ptvsd/pydevd/_pydev_imps/_pydev_BaseHTTPServer.py
Normal file
|
|
@ -0,0 +1,604 @@
|
|||
"""HTTP server base class.
|
||||
|
||||
Note: the class in this module doesn't implement any HTTP request; see
|
||||
SimpleHTTPServer for simple implementations of GET, HEAD and POST
|
||||
(including CGI scripts). It does, however, optionally implement HTTP/1.1
|
||||
persistent connections, as of version 0.3.
|
||||
|
||||
Contents:
|
||||
|
||||
- BaseHTTPRequestHandler: HTTP request handler base class
|
||||
- test: test function
|
||||
|
||||
XXX To do:
|
||||
|
||||
- log requests even later (to capture byte count)
|
||||
- log user-agent header and other interesting goodies
|
||||
- send error log to separate file
|
||||
"""
|
||||
|
||||
|
||||
# See also:
|
||||
#
|
||||
# HTTP Working Group T. Berners-Lee
|
||||
# INTERNET-DRAFT R. T. Fielding
|
||||
# <draft-ietf-http-v10-spec-00.txt> H. Frystyk Nielsen
|
||||
# Expires September 8, 1995 March 8, 1995
|
||||
#
|
||||
# URL: http://www.ics.uci.edu/pub/ietf/http/draft-ietf-http-v10-spec-00.txt
|
||||
#
|
||||
# and
|
||||
#
|
||||
# Network Working Group R. Fielding
|
||||
# Request for Comments: 2616 et al
|
||||
# Obsoletes: 2068 June 1999
|
||||
# Category: Standards Track
|
||||
#
|
||||
# URL: http://www.faqs.org/rfcs/rfc2616.html
|
||||
|
||||
# Log files
|
||||
# ---------
|
||||
#
|
||||
# Here's a quote from the NCSA httpd docs about log file format.
|
||||
#
|
||||
# | The logfile format is as follows. Each line consists of:
|
||||
# |
|
||||
# | host rfc931 authuser [DD/Mon/YYYY:hh:mm:ss] "request" ddd bbbb
|
||||
# |
|
||||
# | host: Either the DNS name or the IP number of the remote client
|
||||
# | rfc931: Any information returned by identd for this person,
|
||||
# | - otherwise.
|
||||
# | authuser: If user sent a userid for authentication, the user name,
|
||||
# | - otherwise.
|
||||
# | DD: Day
|
||||
# | Mon: Month (calendar name)
|
||||
# | YYYY: Year
|
||||
# | hh: hour (24-hour format, the machine's timezone)
|
||||
# | mm: minutes
|
||||
# | ss: seconds
|
||||
# | request: The first line of the HTTP request as sent by the client.
|
||||
# | ddd: the status code returned by the server, - if not available.
|
||||
# | bbbb: the total number of bytes sent,
|
||||
# | *not including the HTTP/1.0 header*, - if not available
|
||||
# |
|
||||
# | You can determine the name of the file accessed through request.
|
||||
#
|
||||
# (Actually, the latter is only true if you know the server configuration
|
||||
# at the time the request was made!)
|
||||
|
||||
__version__ = "0.3"
|
||||
|
||||
__all__ = ["HTTPServer", "BaseHTTPRequestHandler"]
|
||||
|
||||
import sys
|
||||
from _pydev_imps._pydev_saved_modules import time
|
||||
from _pydev_imps._pydev_saved_modules import socket
|
||||
from warnings import filterwarnings, catch_warnings
|
||||
with catch_warnings():
|
||||
if sys.py3kwarning:
|
||||
filterwarnings("ignore", ".*mimetools has been removed",
|
||||
DeprecationWarning)
|
||||
import mimetools
|
||||
|
||||
from _pydev_imps import _pydev_SocketServer as SocketServer
|
||||
|
||||
# Default error message template
|
||||
DEFAULT_ERROR_MESSAGE = """\
|
||||
<head>
|
||||
<title>Error response</title>
|
||||
</head>
|
||||
<body>
|
||||
<h1>Error response</h1>
|
||||
<p>Error code %(code)d.
|
||||
<p>Message: %(message)s.
|
||||
<p>Error code explanation: %(code)s = %(explain)s.
|
||||
</body>
|
||||
"""
|
||||
|
||||
DEFAULT_ERROR_CONTENT_TYPE = "text/html"
|
||||
|
||||
def _quote_html(html):
|
||||
return html.replace("&", "&").replace("<", "<").replace(">", ">")
|
||||
|
||||
class HTTPServer(SocketServer.TCPServer):
|
||||
|
||||
allow_reuse_address = 1 # Seems to make sense in testing environment
|
||||
|
||||
def server_bind(self):
|
||||
"""Override server_bind to store the server name."""
|
||||
SocketServer.TCPServer.server_bind(self)
|
||||
host, port = self.socket.getsockname()[:2]
|
||||
self.server_name = socket.getfqdn(host)
|
||||
self.server_port = port
|
||||
|
||||
|
||||
class BaseHTTPRequestHandler(SocketServer.StreamRequestHandler):
|
||||
|
||||
"""HTTP request handler base class.
|
||||
|
||||
The following explanation of HTTP serves to guide you through the
|
||||
code as well as to expose any misunderstandings I may have about
|
||||
HTTP (so you don't need to read the code to figure out I'm wrong
|
||||
:-).
|
||||
|
||||
HTTP (HyperText Transfer Protocol) is an extensible protocol on
|
||||
top of a reliable stream transport (e.g. TCP/IP). The protocol
|
||||
recognizes three parts to a request:
|
||||
|
||||
1. One line identifying the request type and path
|
||||
2. An optional set of RFC-822-style headers
|
||||
3. An optional data part
|
||||
|
||||
The headers and data are separated by a blank line.
|
||||
|
||||
The first line of the request has the form
|
||||
|
||||
<command> <path> <version>
|
||||
|
||||
where <command> is a (case-sensitive) keyword such as GET or POST,
|
||||
<path> is a string containing path information for the request,
|
||||
and <version> should be the string "HTTP/1.0" or "HTTP/1.1".
|
||||
<path> is encoded using the URL encoding scheme (using %xx to signify
|
||||
the ASCII character with hex code xx).
|
||||
|
||||
The specification specifies that lines are separated by CRLF but
|
||||
for compatibility with the widest range of clients recommends
|
||||
servers also handle LF. Similarly, whitespace in the request line
|
||||
is treated sensibly (allowing multiple spaces between components
|
||||
and allowing trailing whitespace).
|
||||
|
||||
Similarly, for output, lines ought to be separated by CRLF pairs
|
||||
but most clients grok LF characters just fine.
|
||||
|
||||
If the first line of the request has the form
|
||||
|
||||
<command> <path>
|
||||
|
||||
(i.e. <version> is left out) then this is assumed to be an HTTP
|
||||
0.9 request; this form has no optional headers and data part and
|
||||
the reply consists of just the data.
|
||||
|
||||
The reply form of the HTTP 1.x protocol again has three parts:
|
||||
|
||||
1. One line giving the response code
|
||||
2. An optional set of RFC-822-style headers
|
||||
3. The data
|
||||
|
||||
Again, the headers and data are separated by a blank line.
|
||||
|
||||
The response code line has the form
|
||||
|
||||
<version> <responsecode> <responsestring>
|
||||
|
||||
where <version> is the protocol version ("HTTP/1.0" or "HTTP/1.1"),
|
||||
<responsecode> is a 3-digit response code indicating success or
|
||||
failure of the request, and <responsestring> is an optional
|
||||
human-readable string explaining what the response code means.
|
||||
|
||||
This server parses the request and the headers, and then calls a
|
||||
function specific to the request type (<command>). Specifically,
|
||||
a request SPAM will be handled by a method do_SPAM(). If no
|
||||
such method exists the server sends an error response to the
|
||||
client. If it exists, it is called with no arguments:
|
||||
|
||||
do_SPAM()
|
||||
|
||||
Note that the request name is case sensitive (i.e. SPAM and spam
|
||||
are different requests).
|
||||
|
||||
The various request details are stored in instance variables:
|
||||
|
||||
- client_address is the client IP address in the form (host,
|
||||
port);
|
||||
|
||||
- command, path and version are the broken-down request line;
|
||||
|
||||
- headers is an instance of mimetools.Message (or a derived
|
||||
class) containing the header information;
|
||||
|
||||
- rfile is a file object open for reading positioned at the
|
||||
start of the optional input data part;
|
||||
|
||||
- wfile is a file object open for writing.
|
||||
|
||||
IT IS IMPORTANT TO ADHERE TO THE PROTOCOL FOR WRITING!
|
||||
|
||||
The first thing to be written must be the response line. Then
|
||||
follow 0 or more header lines, then a blank line, and then the
|
||||
actual data (if any). The meaning of the header lines depends on
|
||||
the command executed by the server; in most cases, when data is
|
||||
returned, there should be at least one header line of the form
|
||||
|
||||
Content-type: <type>/<subtype>
|
||||
|
||||
where <type> and <subtype> should be registered MIME types,
|
||||
e.g. "text/html" or "text/plain".
|
||||
|
||||
"""
|
||||
|
||||
# The Python system version, truncated to its first component.
|
||||
sys_version = "Python/" + sys.version.split()[0]
|
||||
|
||||
# The server software version. You may want to override this.
|
||||
# The format is multiple whitespace-separated strings,
|
||||
# where each string is of the form name[/version].
|
||||
server_version = "BaseHTTP/" + __version__
|
||||
|
||||
# The default request version. This only affects responses up until
|
||||
# the point where the request line is parsed, so it mainly decides what
|
||||
# the client gets back when sending a malformed request line.
|
||||
# Most web servers default to HTTP 0.9, i.e. don't send a status line.
|
||||
default_request_version = "HTTP/0.9"
|
||||
|
||||
def parse_request(self):
|
||||
"""Parse a request (internal).
|
||||
|
||||
The request should be stored in self.raw_requestline; the results
|
||||
are in self.command, self.path, self.request_version and
|
||||
self.headers.
|
||||
|
||||
Return True for success, False for failure; on failure, an
|
||||
error is sent back.
|
||||
|
||||
"""
|
||||
self.command = None # set in case of error on the first line
|
||||
self.request_version = version = self.default_request_version
|
||||
self.close_connection = 1
|
||||
requestline = self.raw_requestline
|
||||
requestline = requestline.rstrip('\r\n')
|
||||
self.requestline = requestline
|
||||
words = requestline.split()
|
||||
if len(words) == 3:
|
||||
command, path, version = words
|
||||
if version[:5] != 'HTTP/':
|
||||
self.send_error(400, "Bad request version (%r)" % version)
|
||||
return False
|
||||
try:
|
||||
base_version_number = version.split('/', 1)[1]
|
||||
version_number = base_version_number.split(".")
|
||||
# RFC 2145 section 3.1 says there can be only one "." and
|
||||
# - major and minor numbers MUST be treated as
|
||||
# separate integers;
|
||||
# - HTTP/2.4 is a lower version than HTTP/2.13, which in
|
||||
# turn is lower than HTTP/12.3;
|
||||
# - Leading zeros MUST be ignored by recipients.
|
||||
if len(version_number) != 2:
|
||||
raise ValueError
|
||||
version_number = int(version_number[0]), int(version_number[1])
|
||||
except (ValueError, IndexError):
|
||||
self.send_error(400, "Bad request version (%r)" % version)
|
||||
return False
|
||||
if version_number >= (1, 1) and self.protocol_version >= "HTTP/1.1":
|
||||
self.close_connection = 0
|
||||
if version_number >= (2, 0):
|
||||
self.send_error(505,
|
||||
"Invalid HTTP Version (%s)" % base_version_number)
|
||||
return False
|
||||
elif len(words) == 2:
|
||||
command, path = words
|
||||
self.close_connection = 1
|
||||
if command != 'GET':
|
||||
self.send_error(400,
|
||||
"Bad HTTP/0.9 request type (%r)" % command)
|
||||
return False
|
||||
elif not words:
|
||||
return False
|
||||
else:
|
||||
self.send_error(400, "Bad request syntax (%r)" % requestline)
|
||||
return False
|
||||
self.command, self.path, self.request_version = command, path, version
|
||||
|
||||
# Examine the headers and look for a Connection directive
|
||||
self.headers = self.MessageClass(self.rfile, 0)
|
||||
|
||||
conntype = self.headers.get('Connection', "")
|
||||
if conntype.lower() == 'close':
|
||||
self.close_connection = 1
|
||||
elif (conntype.lower() == 'keep-alive' and
|
||||
self.protocol_version >= "HTTP/1.1"):
|
||||
self.close_connection = 0
|
||||
return True
|
||||
|
||||
def handle_one_request(self):
|
||||
"""Handle a single HTTP request.
|
||||
|
||||
You normally don't need to override this method; see the class
|
||||
__doc__ string for information on how to handle specific HTTP
|
||||
commands such as GET and POST.
|
||||
|
||||
"""
|
||||
try:
|
||||
self.raw_requestline = self.rfile.readline(65537)
|
||||
if len(self.raw_requestline) > 65536:
|
||||
self.requestline = ''
|
||||
self.request_version = ''
|
||||
self.command = ''
|
||||
self.send_error(414)
|
||||
return
|
||||
if not self.raw_requestline:
|
||||
self.close_connection = 1
|
||||
return
|
||||
if not self.parse_request():
|
||||
# An error code has been sent, just exit
|
||||
return
|
||||
mname = 'do_' + self.command
|
||||
if not hasattr(self, mname):
|
||||
self.send_error(501, "Unsupported method (%r)" % self.command)
|
||||
return
|
||||
method = getattr(self, mname)
|
||||
method()
|
||||
self.wfile.flush() #actually send the response if not already done.
|
||||
except socket.timeout:
|
||||
#a read or a write timed out. Discard this connection
|
||||
self.log_error("Request timed out: %r", sys.exc_info()[1])
|
||||
self.close_connection = 1
|
||||
return
|
||||
|
||||
def handle(self):
|
||||
"""Handle multiple requests if necessary."""
|
||||
self.close_connection = 1
|
||||
|
||||
self.handle_one_request()
|
||||
while not self.close_connection:
|
||||
self.handle_one_request()
|
||||
|
||||
def send_error(self, code, message=None):
|
||||
"""Send and log an error reply.
|
||||
|
||||
Arguments are the error code, and a detailed message.
|
||||
The detailed message defaults to the short entry matching the
|
||||
response code.
|
||||
|
||||
This sends an error response (so it must be called before any
|
||||
output has been generated), logs the error, and finally sends
|
||||
a piece of HTML explaining the error to the user.
|
||||
|
||||
"""
|
||||
|
||||
try:
|
||||
short, long = self.responses[code]
|
||||
except KeyError:
|
||||
short, long = '???', '???'
|
||||
if message is None:
|
||||
message = short
|
||||
explain = long
|
||||
self.log_error("code %d, message %s", code, message)
|
||||
# using _quote_html to prevent Cross Site Scripting attacks (see bug #1100201)
|
||||
content = (self.error_message_format %
|
||||
{'code': code, 'message': _quote_html(message), 'explain': explain})
|
||||
self.send_response(code, message)
|
||||
self.send_header("Content-Type", self.error_content_type)
|
||||
self.send_header('Connection', 'close')
|
||||
self.end_headers()
|
||||
if self.command != 'HEAD' and code >= 200 and code not in (204, 304):
|
||||
self.wfile.write(content)
|
||||
|
||||
error_message_format = DEFAULT_ERROR_MESSAGE
|
||||
error_content_type = DEFAULT_ERROR_CONTENT_TYPE
|
||||
|
||||
def send_response(self, code, message=None):
|
||||
"""Send the response header and log the response code.
|
||||
|
||||
Also send two standard headers with the server software
|
||||
version and the current date.
|
||||
|
||||
"""
|
||||
self.log_request(code)
|
||||
if message is None:
|
||||
if code in self.responses:
|
||||
message = self.responses[code][0]
|
||||
else:
|
||||
message = ''
|
||||
if self.request_version != 'HTTP/0.9':
|
||||
self.wfile.write("%s %d %s\r\n" %
|
||||
(self.protocol_version, code, message))
|
||||
# print (self.protocol_version, code, message)
|
||||
self.send_header('Server', self.version_string())
|
||||
self.send_header('Date', self.date_time_string())
|
||||
|
||||
def send_header(self, keyword, value):
|
||||
"""Send a MIME header."""
|
||||
if self.request_version != 'HTTP/0.9':
|
||||
self.wfile.write("%s: %s\r\n" % (keyword, value))
|
||||
|
||||
if keyword.lower() == 'connection':
|
||||
if value.lower() == 'close':
|
||||
self.close_connection = 1
|
||||
elif value.lower() == 'keep-alive':
|
||||
self.close_connection = 0
|
||||
|
||||
def end_headers(self):
|
||||
"""Send the blank line ending the MIME headers."""
|
||||
if self.request_version != 'HTTP/0.9':
|
||||
self.wfile.write("\r\n")
|
||||
|
||||
def log_request(self, code='-', size='-'):
|
||||
"""Log an accepted request.
|
||||
|
||||
This is called by send_response().
|
||||
|
||||
"""
|
||||
|
||||
self.log_message('"%s" %s %s',
|
||||
self.requestline, str(code), str(size))
|
||||
|
||||
def log_error(self, format, *args):
|
||||
"""Log an error.
|
||||
|
||||
This is called when a request cannot be fulfilled. By
|
||||
default it passes the message on to log_message().
|
||||
|
||||
Arguments are the same as for log_message().
|
||||
|
||||
XXX This should go to the separate error log.
|
||||
|
||||
"""
|
||||
|
||||
self.log_message(format, *args)
|
||||
|
||||
def log_message(self, format, *args):
|
||||
"""Log an arbitrary message.
|
||||
|
||||
This is used by all other logging functions. Override
|
||||
it if you have specific logging wishes.
|
||||
|
||||
The first argument, FORMAT, is a format string for the
|
||||
message to be logged. If the format string contains
|
||||
any % escapes requiring parameters, they should be
|
||||
specified as subsequent arguments (it's just like
|
||||
printf!).
|
||||
|
||||
The client host and current date/time are prefixed to
|
||||
every message.
|
||||
|
||||
"""
|
||||
|
||||
sys.stderr.write("%s - - [%s] %s\n" %
|
||||
(self.address_string(),
|
||||
self.log_date_time_string(),
|
||||
format%args))
|
||||
|
||||
def version_string(self):
|
||||
"""Return the server software version string."""
|
||||
return self.server_version + ' ' + self.sys_version
|
||||
|
||||
def date_time_string(self, timestamp=None):
|
||||
"""Return the current date and time formatted for a message header."""
|
||||
if timestamp is None:
|
||||
timestamp = time.time()
|
||||
year, month, day, hh, mm, ss, wd, y, z = time.gmtime(timestamp)
|
||||
s = "%s, %02d %3s %4d %02d:%02d:%02d GMT" % (
|
||||
self.weekdayname[wd],
|
||||
day, self.monthname[month], year,
|
||||
hh, mm, ss)
|
||||
return s
|
||||
|
||||
def log_date_time_string(self):
|
||||
"""Return the current time formatted for logging."""
|
||||
now = time.time()
|
||||
year, month, day, hh, mm, ss, x, y, z = time.localtime(now)
|
||||
s = "%02d/%3s/%04d %02d:%02d:%02d" % (
|
||||
day, self.monthname[month], year, hh, mm, ss)
|
||||
return s
|
||||
|
||||
weekdayname = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun']
|
||||
|
||||
monthname = [None,
|
||||
'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun',
|
||||
'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec']
|
||||
|
||||
def address_string(self):
|
||||
"""Return the client address formatted for logging.
|
||||
|
||||
This version looks up the full hostname using gethostbyaddr(),
|
||||
and tries to find a name that contains at least one dot.
|
||||
|
||||
"""
|
||||
|
||||
host, port = self.client_address[:2]
|
||||
return socket.getfqdn(host)
|
||||
|
||||
# Essentially static class variables
|
||||
|
||||
# The version of the HTTP protocol we support.
|
||||
# Set this to HTTP/1.1 to enable automatic keepalive
|
||||
protocol_version = "HTTP/1.0"
|
||||
|
||||
# The Message-like class used to parse headers
|
||||
MessageClass = mimetools.Message
|
||||
|
||||
# Table mapping response codes to messages; entries have the
|
||||
# form {code: (shortmessage, longmessage)}.
|
||||
# See RFC 2616.
|
||||
responses = {
|
||||
100: ('Continue', 'Request received, please continue'),
|
||||
101: ('Switching Protocols',
|
||||
'Switching to new protocol; obey Upgrade header'),
|
||||
|
||||
200: ('OK', 'Request fulfilled, document follows'),
|
||||
201: ('Created', 'Document created, URL follows'),
|
||||
202: ('Accepted',
|
||||
'Request accepted, processing continues off-line'),
|
||||
203: ('Non-Authoritative Information', 'Request fulfilled from cache'),
|
||||
204: ('No Content', 'Request fulfilled, nothing follows'),
|
||||
205: ('Reset Content', 'Clear input form for further input.'),
|
||||
206: ('Partial Content', 'Partial content follows.'),
|
||||
|
||||
300: ('Multiple Choices',
|
||||
'Object has several resources -- see URI list'),
|
||||
301: ('Moved Permanently', 'Object moved permanently -- see URI list'),
|
||||
302: ('Found', 'Object moved temporarily -- see URI list'),
|
||||
303: ('See Other', 'Object moved -- see Method and URL list'),
|
||||
304: ('Not Modified',
|
||||
'Document has not changed since given time'),
|
||||
305: ('Use Proxy',
|
||||
'You must use proxy specified in Location to access this '
|
||||
'resource.'),
|
||||
307: ('Temporary Redirect',
|
||||
'Object moved temporarily -- see URI list'),
|
||||
|
||||
400: ('Bad Request',
|
||||
'Bad request syntax or unsupported method'),
|
||||
401: ('Unauthorized',
|
||||
'No permission -- see authorization schemes'),
|
||||
402: ('Payment Required',
|
||||
'No payment -- see charging schemes'),
|
||||
403: ('Forbidden',
|
||||
'Request forbidden -- authorization will not help'),
|
||||
404: ('Not Found', 'Nothing matches the given URI'),
|
||||
405: ('Method Not Allowed',
|
||||
'Specified method is invalid for this resource.'),
|
||||
406: ('Not Acceptable', 'URI not available in preferred format.'),
|
||||
407: ('Proxy Authentication Required', 'You must authenticate with '
|
||||
'this proxy before proceeding.'),
|
||||
408: ('Request Timeout', 'Request timed out; try again later.'),
|
||||
409: ('Conflict', 'Request conflict.'),
|
||||
410: ('Gone',
|
||||
'URI no longer exists and has been permanently removed.'),
|
||||
411: ('Length Required', 'Client must specify Content-Length.'),
|
||||
412: ('Precondition Failed', 'Precondition in headers is false.'),
|
||||
413: ('Request Entity Too Large', 'Entity is too large.'),
|
||||
414: ('Request-URI Too Long', 'URI is too long.'),
|
||||
415: ('Unsupported Media Type', 'Entity body in unsupported format.'),
|
||||
416: ('Requested Range Not Satisfiable',
|
||||
'Cannot satisfy request range.'),
|
||||
417: ('Expectation Failed',
|
||||
'Expect condition could not be satisfied.'),
|
||||
|
||||
500: ('Internal Server Error', 'Server got itself in trouble'),
|
||||
501: ('Not Implemented',
|
||||
'Server does not support this operation'),
|
||||
502: ('Bad Gateway', 'Invalid responses from another server/proxy.'),
|
||||
503: ('Service Unavailable',
|
||||
'The server cannot process the request due to a high load'),
|
||||
504: ('Gateway Timeout',
|
||||
'The gateway server did not receive a timely response'),
|
||||
505: ('HTTP Version Not Supported', 'Cannot fulfill request.'),
|
||||
}
|
||||
|
||||
|
||||
def test(HandlerClass = BaseHTTPRequestHandler,
|
||||
ServerClass = HTTPServer, protocol="HTTP/1.0"):
|
||||
"""Test the HTTP request handler class.
|
||||
|
||||
This runs an HTTP server on port 8000 (or the first command line
|
||||
argument).
|
||||
|
||||
"""
|
||||
|
||||
if sys.argv[1:]:
|
||||
port = int(sys.argv[1])
|
||||
else:
|
||||
port = 8000
|
||||
server_address = ('', port)
|
||||
|
||||
HandlerClass.protocol_version = protocol
|
||||
httpd = ServerClass(server_address, HandlerClass)
|
||||
|
||||
sa = httpd.socket.getsockname()
|
||||
print ("Serving HTTP on", sa[0], "port", sa[1], "...")
|
||||
httpd.serve_forever()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
test()
|
||||
601
ptvsd/pydevd/_pydev_imps/_pydev_SimpleXMLRPCServer.py
Normal file
601
ptvsd/pydevd/_pydev_imps/_pydev_SimpleXMLRPCServer.py
Normal file
|
|
@ -0,0 +1,601 @@
|
|||
#Just a copy of the version in python 2.5 to be used if it's not available in jython 2.1
|
||||
|
||||
"""Simple XML-RPC Server.
|
||||
|
||||
This module can be used to create simple XML-RPC servers
|
||||
by creating a server and either installing functions, a
|
||||
class instance, or by extending the SimpleXMLRPCServer
|
||||
class.
|
||||
|
||||
It can also be used to handle XML-RPC requests in a CGI
|
||||
environment using CGIXMLRPCRequestHandler.
|
||||
|
||||
A list of possible usage patterns follows:
|
||||
|
||||
1. Install functions:
|
||||
|
||||
server = SimpleXMLRPCServer(("localhost", 8000))
|
||||
server.register_function(pow)
|
||||
server.register_function(lambda x,y: x+y, 'add')
|
||||
server.serve_forever()
|
||||
|
||||
2. Install an instance:
|
||||
|
||||
class MyFuncs:
|
||||
def __init__(self):
|
||||
# make all of the string functions available through
|
||||
# string.func_name
|
||||
import string
|
||||
self.string = string
|
||||
def _listMethods(self):
|
||||
# implement this method so that system.listMethods
|
||||
# knows to advertise the strings methods
|
||||
return list_public_methods(self) + \
|
||||
['string.' + method for method in list_public_methods(self.string)]
|
||||
def pow(self, x, y): return pow(x, y)
|
||||
def add(self, x, y) : return x + y
|
||||
|
||||
server = SimpleXMLRPCServer(("localhost", 8000))
|
||||
server.register_introspection_functions()
|
||||
server.register_instance(MyFuncs())
|
||||
server.serve_forever()
|
||||
|
||||
3. Install an instance with custom dispatch method:
|
||||
|
||||
class Math:
|
||||
def _listMethods(self):
|
||||
# this method must be present for system.listMethods
|
||||
# to work
|
||||
return ['add', 'pow']
|
||||
def _methodHelp(self, method):
|
||||
# this method must be present for system.methodHelp
|
||||
# to work
|
||||
if method == 'add':
|
||||
return "add(2,3) => 5"
|
||||
elif method == 'pow':
|
||||
return "pow(x, y[, z]) => number"
|
||||
else:
|
||||
# By convention, return empty
|
||||
# string if no help is available
|
||||
return ""
|
||||
def _dispatch(self, method, params):
|
||||
if method == 'pow':
|
||||
return pow(*params)
|
||||
elif method == 'add':
|
||||
return params[0] + params[1]
|
||||
else:
|
||||
raise 'bad method'
|
||||
|
||||
server = SimpleXMLRPCServer(("localhost", 8000))
|
||||
server.register_introspection_functions()
|
||||
server.register_instance(Math())
|
||||
server.serve_forever()
|
||||
|
||||
4. Subclass SimpleXMLRPCServer:
|
||||
|
||||
class MathServer(SimpleXMLRPCServer):
|
||||
def _dispatch(self, method, params):
|
||||
try:
|
||||
# We are forcing the 'export_' prefix on methods that are
|
||||
# callable through XML-RPC to prevent potential security
|
||||
# problems
|
||||
func = getattr(self, 'export_' + method)
|
||||
except AttributeError:
|
||||
raise Exception('method "%s" is not supported' % method)
|
||||
else:
|
||||
return func(*params)
|
||||
|
||||
def export_add(self, x, y):
|
||||
return x + y
|
||||
|
||||
server = MathServer(("localhost", 8000))
|
||||
server.serve_forever()
|
||||
|
||||
5. CGI script:
|
||||
|
||||
server = CGIXMLRPCRequestHandler()
|
||||
server.register_function(pow)
|
||||
server.handle_request()
|
||||
"""
|
||||
|
||||
# Written by Brian Quinlan (brian@sweetapp.com).
|
||||
# Based on code written by Fredrik Lundh.
|
||||
|
||||
from _pydev_imps import _pydev_xmlrpclib as xmlrpclib
|
||||
from _pydev_imps._pydev_xmlrpclib import Fault
|
||||
from _pydev_imps import _pydev_SocketServer as SocketServer
|
||||
from _pydev_imps import _pydev_BaseHTTPServer as BaseHTTPServer
|
||||
import sys
|
||||
import os
|
||||
try:
|
||||
import fcntl
|
||||
except ImportError:
|
||||
fcntl = None
|
||||
|
||||
def resolve_dotted_attribute(obj, attr, allow_dotted_names=True):
|
||||
"""resolve_dotted_attribute(a, 'b.c.d') => a.b.c.d
|
||||
|
||||
Resolves a dotted attribute name to an object. Raises
|
||||
an AttributeError if any attribute in the chain starts with a '_'.
|
||||
|
||||
If the optional allow_dotted_names argument is false, dots are not
|
||||
supported and this function operates similar to getattr(obj, attr).
|
||||
"""
|
||||
|
||||
if allow_dotted_names:
|
||||
attrs = attr.split('.')
|
||||
else:
|
||||
attrs = [attr]
|
||||
|
||||
for i in attrs:
|
||||
if i.startswith('_'):
|
||||
raise AttributeError(
|
||||
'attempt to access private attribute "%s"' % i
|
||||
)
|
||||
else:
|
||||
obj = getattr(obj, i)
|
||||
return obj
|
||||
|
||||
def list_public_methods(obj):
|
||||
"""Returns a list of attribute strings, found in the specified
|
||||
object, which represent callable attributes"""
|
||||
|
||||
return [member for member in dir(obj)
|
||||
if not member.startswith('_') and
|
||||
callable(getattr(obj, member))]
|
||||
|
||||
def remove_duplicates(lst):
|
||||
"""remove_duplicates([2,2,2,1,3,3]) => [3,1,2]
|
||||
|
||||
Returns a copy of a list without duplicates. Every list
|
||||
item must be hashable and the order of the items in the
|
||||
resulting list is not defined.
|
||||
"""
|
||||
u = {}
|
||||
for x in lst:
|
||||
u[x] = 1
|
||||
|
||||
return u.keys()
|
||||
|
||||
class SimpleXMLRPCDispatcher:
|
||||
"""Mix-in class that dispatches XML-RPC requests.
|
||||
|
||||
This class is used to register XML-RPC method handlers
|
||||
and then to dispatch them. There should never be any
|
||||
reason to instantiate this class directly.
|
||||
"""
|
||||
|
||||
def __init__(self, allow_none, encoding):
|
||||
self.funcs = {}
|
||||
self.instance = None
|
||||
self.allow_none = allow_none
|
||||
self.encoding = encoding
|
||||
|
||||
def register_instance(self, instance, allow_dotted_names=False):
|
||||
"""Registers an instance to respond to XML-RPC requests.
|
||||
|
||||
Only one instance can be installed at a time.
|
||||
|
||||
If the registered instance has a _dispatch method then that
|
||||
method will be called with the name of the XML-RPC method and
|
||||
its parameters as a tuple
|
||||
e.g. instance._dispatch('add',(2,3))
|
||||
|
||||
If the registered instance does not have a _dispatch method
|
||||
then the instance will be searched to find a matching method
|
||||
and, if found, will be called. Methods beginning with an '_'
|
||||
are considered private and will not be called by
|
||||
SimpleXMLRPCServer.
|
||||
|
||||
If a registered function matches a XML-RPC request, then it
|
||||
will be called instead of the registered instance.
|
||||
|
||||
If the optional allow_dotted_names argument is true and the
|
||||
instance does not have a _dispatch method, method names
|
||||
containing dots are supported and resolved, as long as none of
|
||||
the name segments start with an '_'.
|
||||
|
||||
*** SECURITY WARNING: ***
|
||||
|
||||
Enabling the allow_dotted_names options allows intruders
|
||||
to access your module's global variables and may allow
|
||||
intruders to execute arbitrary code on your machine. Only
|
||||
use this option on a secure, closed network.
|
||||
|
||||
"""
|
||||
|
||||
self.instance = instance
|
||||
self.allow_dotted_names = allow_dotted_names
|
||||
|
||||
def register_function(self, function, name=None):
|
||||
"""Registers a function to respond to XML-RPC requests.
|
||||
|
||||
The optional name argument can be used to set a Unicode name
|
||||
for the function.
|
||||
"""
|
||||
|
||||
if name is None:
|
||||
name = function.__name__
|
||||
self.funcs[name] = function
|
||||
|
||||
def register_introspection_functions(self):
|
||||
"""Registers the XML-RPC introspection methods in the system
|
||||
namespace.
|
||||
|
||||
see http://xmlrpc.usefulinc.com/doc/reserved.html
|
||||
"""
|
||||
|
||||
self.funcs.update({'system.listMethods' : self.system_listMethods,
|
||||
'system.methodSignature' : self.system_methodSignature,
|
||||
'system.methodHelp' : self.system_methodHelp})
|
||||
|
||||
def register_multicall_functions(self):
|
||||
"""Registers the XML-RPC multicall method in the system
|
||||
namespace.
|
||||
|
||||
see http://www.xmlrpc.com/discuss/msgReader$1208"""
|
||||
|
||||
self.funcs.update({'system.multicall' : self.system_multicall})
|
||||
|
||||
def _marshaled_dispatch(self, data, dispatch_method=None):
|
||||
"""Dispatches an XML-RPC method from marshalled (XML) data.
|
||||
|
||||
XML-RPC methods are dispatched from the marshalled (XML) data
|
||||
using the _dispatch method and the result is returned as
|
||||
marshalled data. For backwards compatibility, a dispatch
|
||||
function can be provided as an argument (see comment in
|
||||
SimpleXMLRPCRequestHandler.do_POST) but overriding the
|
||||
existing method through subclassing is the prefered means
|
||||
of changing method dispatch behavior.
|
||||
"""
|
||||
try:
|
||||
params, method = xmlrpclib.loads(data)
|
||||
|
||||
# generate response
|
||||
if dispatch_method is not None:
|
||||
response = dispatch_method(method, params)
|
||||
else:
|
||||
response = self._dispatch(method, params)
|
||||
# wrap response in a singleton tuple
|
||||
response = (response,)
|
||||
response = xmlrpclib.dumps(response, methodresponse=1,
|
||||
allow_none=self.allow_none, encoding=self.encoding)
|
||||
except Fault, fault:
|
||||
response = xmlrpclib.dumps(fault, allow_none=self.allow_none,
|
||||
encoding=self.encoding)
|
||||
except:
|
||||
# report exception back to server
|
||||
response = xmlrpclib.dumps(
|
||||
xmlrpclib.Fault(1, "%s:%s" % (sys.exc_type, sys.exc_value)), #@UndefinedVariable exc_value only available when we actually have an exception
|
||||
encoding=self.encoding, allow_none=self.allow_none,
|
||||
)
|
||||
|
||||
return response
|
||||
|
||||
def system_listMethods(self):
|
||||
"""system.listMethods() => ['add', 'subtract', 'multiple']
|
||||
|
||||
Returns a list of the methods supported by the server."""
|
||||
|
||||
methods = self.funcs.keys()
|
||||
if self.instance is not None:
|
||||
# Instance can implement _listMethod to return a list of
|
||||
# methods
|
||||
if hasattr(self.instance, '_listMethods'):
|
||||
methods = remove_duplicates(
|
||||
methods + self.instance._listMethods()
|
||||
)
|
||||
# if the instance has a _dispatch method then we
|
||||
# don't have enough information to provide a list
|
||||
# of methods
|
||||
elif not hasattr(self.instance, '_dispatch'):
|
||||
methods = remove_duplicates(
|
||||
methods + list_public_methods(self.instance)
|
||||
)
|
||||
methods.sort()
|
||||
return methods
|
||||
|
||||
def system_methodSignature(self, method_name):
|
||||
"""system.methodSignature('add') => [double, int, int]
|
||||
|
||||
Returns a list describing the signature of the method. In the
|
||||
above example, the add method takes two integers as arguments
|
||||
and returns a double result.
|
||||
|
||||
This server does NOT support system.methodSignature."""
|
||||
|
||||
# See http://xmlrpc.usefulinc.com/doc/sysmethodsig.html
|
||||
|
||||
return 'signatures not supported'
|
||||
|
||||
def system_methodHelp(self, method_name):
|
||||
"""system.methodHelp('add') => "Adds two integers together"
|
||||
|
||||
Returns a string containing documentation for the specified method."""
|
||||
|
||||
method = None
|
||||
if self.funcs.has_key(method_name):
|
||||
method = self.funcs[method_name]
|
||||
elif self.instance is not None:
|
||||
# Instance can implement _methodHelp to return help for a method
|
||||
if hasattr(self.instance, '_methodHelp'):
|
||||
return self.instance._methodHelp(method_name)
|
||||
# if the instance has a _dispatch method then we
|
||||
# don't have enough information to provide help
|
||||
elif not hasattr(self.instance, '_dispatch'):
|
||||
try:
|
||||
method = resolve_dotted_attribute(
|
||||
self.instance,
|
||||
method_name,
|
||||
self.allow_dotted_names
|
||||
)
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
# Note that we aren't checking that the method actually
|
||||
# be a callable object of some kind
|
||||
if method is None:
|
||||
return ""
|
||||
else:
|
||||
try:
|
||||
import pydoc
|
||||
except ImportError:
|
||||
return "" #not there for jython
|
||||
else:
|
||||
return pydoc.getdoc(method)
|
||||
|
||||
def system_multicall(self, call_list):
|
||||
"""system.multicall([{'methodName': 'add', 'params': [2, 2]}, ...]) => \
|
||||
[[4], ...]
|
||||
|
||||
Allows the caller to package multiple XML-RPC calls into a single
|
||||
request.
|
||||
|
||||
See http://www.xmlrpc.com/discuss/msgReader$1208
|
||||
"""
|
||||
|
||||
results = []
|
||||
for call in call_list:
|
||||
method_name = call['methodName']
|
||||
params = call['params']
|
||||
|
||||
try:
|
||||
# XXX A marshalling error in any response will fail the entire
|
||||
# multicall. If someone cares they should fix this.
|
||||
results.append([self._dispatch(method_name, params)])
|
||||
except Fault, fault:
|
||||
results.append(
|
||||
{'faultCode' : fault.faultCode,
|
||||
'faultString' : fault.faultString}
|
||||
)
|
||||
except:
|
||||
results.append(
|
||||
{'faultCode' : 1,
|
||||
'faultString' : "%s:%s" % (sys.exc_type, sys.exc_value)} #@UndefinedVariable exc_value only available when we actually have an exception
|
||||
)
|
||||
return results
|
||||
|
||||
def _dispatch(self, method, params):
|
||||
"""Dispatches the XML-RPC method.
|
||||
|
||||
XML-RPC calls are forwarded to a registered function that
|
||||
matches the called XML-RPC method name. If no such function
|
||||
exists then the call is forwarded to the registered instance,
|
||||
if available.
|
||||
|
||||
If the registered instance has a _dispatch method then that
|
||||
method will be called with the name of the XML-RPC method and
|
||||
its parameters as a tuple
|
||||
e.g. instance._dispatch('add',(2,3))
|
||||
|
||||
If the registered instance does not have a _dispatch method
|
||||
then the instance will be searched to find a matching method
|
||||
and, if found, will be called.
|
||||
|
||||
Methods beginning with an '_' are considered private and will
|
||||
not be called.
|
||||
"""
|
||||
|
||||
func = None
|
||||
try:
|
||||
# check to see if a matching function has been registered
|
||||
func = self.funcs[method]
|
||||
except KeyError:
|
||||
if self.instance is not None:
|
||||
# check for a _dispatch method
|
||||
if hasattr(self.instance, '_dispatch'):
|
||||
return self.instance._dispatch(method, params)
|
||||
else:
|
||||
# call instance method directly
|
||||
try:
|
||||
func = resolve_dotted_attribute(
|
||||
self.instance,
|
||||
method,
|
||||
self.allow_dotted_names
|
||||
)
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
if func is not None:
|
||||
return func(*params)
|
||||
else:
|
||||
raise Exception('method "%s" is not supported' % method)
|
||||
|
||||
class SimpleXMLRPCRequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
|
||||
"""Simple XML-RPC request handler class.
|
||||
|
||||
Handles all HTTP POST requests and attempts to decode them as
|
||||
XML-RPC requests.
|
||||
"""
|
||||
|
||||
# Class attribute listing the accessible path components;
|
||||
# paths not on this list will result in a 404 error.
|
||||
rpc_paths = ('/', '/RPC2')
|
||||
|
||||
def is_rpc_path_valid(self):
|
||||
if self.rpc_paths:
|
||||
return self.path in self.rpc_paths
|
||||
else:
|
||||
# If .rpc_paths is empty, just assume all paths are legal
|
||||
return True
|
||||
|
||||
def do_POST(self):
|
||||
"""Handles the HTTP POST request.
|
||||
|
||||
Attempts to interpret all HTTP POST requests as XML-RPC calls,
|
||||
which are forwarded to the server's _dispatch method for handling.
|
||||
"""
|
||||
|
||||
# Check that the path is legal
|
||||
if not self.is_rpc_path_valid():
|
||||
self.report_404()
|
||||
return
|
||||
|
||||
try:
|
||||
# Get arguments by reading body of request.
|
||||
# We read this in chunks to avoid straining
|
||||
# socket.read(); around the 10 or 15Mb mark, some platforms
|
||||
# begin to have problems (bug #792570).
|
||||
max_chunk_size = 10 * 1024 * 1024
|
||||
size_remaining = int(self.headers["content-length"])
|
||||
L = []
|
||||
while size_remaining:
|
||||
chunk_size = min(size_remaining, max_chunk_size)
|
||||
L.append(self.rfile.read(chunk_size))
|
||||
size_remaining -= len(L[-1])
|
||||
data = ''.join(L)
|
||||
|
||||
# In previous versions of SimpleXMLRPCServer, _dispatch
|
||||
# could be overridden in this class, instead of in
|
||||
# SimpleXMLRPCDispatcher. To maintain backwards compatibility,
|
||||
# check to see if a subclass implements _dispatch and dispatch
|
||||
# using that method if present.
|
||||
response = self.server._marshaled_dispatch(
|
||||
data, getattr(self, '_dispatch', None)
|
||||
)
|
||||
except: # This should only happen if the module is buggy
|
||||
# internal error, report as HTTP server error
|
||||
self.send_response(500)
|
||||
self.end_headers()
|
||||
else:
|
||||
# got a valid XML RPC response
|
||||
self.send_response(200)
|
||||
self.send_header("Content-type", "text/xml")
|
||||
self.send_header("Content-length", str(len(response)))
|
||||
self.end_headers()
|
||||
self.wfile.write(response)
|
||||
|
||||
# shut down the connection
|
||||
self.wfile.flush()
|
||||
self.connection.shutdown(1)
|
||||
|
||||
def report_404 (self):
|
||||
# Report a 404 error
|
||||
self.send_response(404)
|
||||
response = 'No such page'
|
||||
self.send_header("Content-type", "text/plain")
|
||||
self.send_header("Content-length", str(len(response)))
|
||||
self.end_headers()
|
||||
self.wfile.write(response)
|
||||
# shut down the connection
|
||||
self.wfile.flush()
|
||||
self.connection.shutdown(1)
|
||||
|
||||
def log_request(self, code='-', size='-'):
|
||||
"""Selectively log an accepted request."""
|
||||
|
||||
if self.server.logRequests:
|
||||
BaseHTTPServer.BaseHTTPRequestHandler.log_request(self, code, size)
|
||||
|
||||
class SimpleXMLRPCServer(SocketServer.TCPServer,
|
||||
SimpleXMLRPCDispatcher):
|
||||
"""Simple XML-RPC server.
|
||||
|
||||
Simple XML-RPC server that allows functions and a single instance
|
||||
to be installed to handle requests. The default implementation
|
||||
attempts to dispatch XML-RPC calls to the functions or instance
|
||||
installed in the server. Override the _dispatch method inhereted
|
||||
from SimpleXMLRPCDispatcher to change this behavior.
|
||||
"""
|
||||
|
||||
allow_reuse_address = True
|
||||
|
||||
def __init__(self, addr, requestHandler=SimpleXMLRPCRequestHandler,
|
||||
logRequests=True, allow_none=False, encoding=None):
|
||||
self.logRequests = logRequests
|
||||
|
||||
SimpleXMLRPCDispatcher.__init__(self, allow_none, encoding)
|
||||
SocketServer.TCPServer.__init__(self, addr, requestHandler)
|
||||
|
||||
# [Bug #1222790] If possible, set close-on-exec flag; if a
|
||||
# method spawns a subprocess, the subprocess shouldn't have
|
||||
# the listening socket open.
|
||||
if fcntl is not None and hasattr(fcntl, 'FD_CLOEXEC'):
|
||||
flags = fcntl.fcntl(self.fileno(), fcntl.F_GETFD)
|
||||
flags |= fcntl.FD_CLOEXEC
|
||||
fcntl.fcntl(self.fileno(), fcntl.F_SETFD, flags)
|
||||
|
||||
class CGIXMLRPCRequestHandler(SimpleXMLRPCDispatcher):
|
||||
"""Simple handler for XML-RPC data passed through CGI."""
|
||||
|
||||
def __init__(self, allow_none=False, encoding=None):
|
||||
SimpleXMLRPCDispatcher.__init__(self, allow_none, encoding)
|
||||
|
||||
def handle_xmlrpc(self, request_text):
|
||||
"""Handle a single XML-RPC request"""
|
||||
|
||||
response = self._marshaled_dispatch(request_text)
|
||||
|
||||
sys.stdout.write('Content-Type: text/xml\n')
|
||||
sys.stdout.write('Content-Length: %d\n' % len(response))
|
||||
sys.stdout.write('\n')
|
||||
|
||||
sys.stdout.write(response)
|
||||
|
||||
def handle_get(self):
|
||||
"""Handle a single HTTP GET request.
|
||||
|
||||
Default implementation indicates an error because
|
||||
XML-RPC uses the POST method.
|
||||
"""
|
||||
|
||||
code = 400
|
||||
message, explain = \
|
||||
BaseHTTPServer.BaseHTTPRequestHandler.responses[code]
|
||||
|
||||
response = BaseHTTPServer.DEFAULT_ERROR_MESSAGE % { #@UndefinedVariable
|
||||
'code' : code,
|
||||
'message' : message,
|
||||
'explain' : explain
|
||||
}
|
||||
sys.stdout.write('Status: %d %s\n' % (code, message))
|
||||
sys.stdout.write('Content-Type: text/html\n')
|
||||
sys.stdout.write('Content-Length: %d\n' % len(response))
|
||||
sys.stdout.write('\n')
|
||||
|
||||
sys.stdout.write(response)
|
||||
|
||||
def handle_request(self, request_text=None):
|
||||
"""Handle a single XML-RPC request passed through a CGI post method.
|
||||
|
||||
If no XML data is given then it is read from stdin. The resulting
|
||||
XML-RPC response is printed to stdout along with the correct HTTP
|
||||
headers.
|
||||
"""
|
||||
|
||||
if request_text is None and \
|
||||
os.environ.get('REQUEST_METHOD', None) == 'GET':
|
||||
self.handle_get()
|
||||
else:
|
||||
# POST data is normally available through stdin
|
||||
if request_text is None:
|
||||
request_text = sys.stdin.read()
|
||||
|
||||
self.handle_xmlrpc(request_text)
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.stdout.write('Running XML-RPC server on port 8000\n')
|
||||
server = SimpleXMLRPCServer(("localhost", 8000))
|
||||
server.register_function(pow)
|
||||
server.register_function(lambda x, y: x + y, 'add')
|
||||
server.serve_forever()
|
||||
715
ptvsd/pydevd/_pydev_imps/_pydev_SocketServer.py
Normal file
715
ptvsd/pydevd/_pydev_imps/_pydev_SocketServer.py
Normal file
|
|
@ -0,0 +1,715 @@
|
|||
"""Generic socket server classes.
|
||||
|
||||
This module tries to capture the various aspects of defining a server:
|
||||
|
||||
For socket-based servers:
|
||||
|
||||
- address family:
|
||||
- AF_INET{,6}: IP (Internet Protocol) sockets (default)
|
||||
- AF_UNIX: Unix domain sockets
|
||||
- others, e.g. AF_DECNET are conceivable (see <socket.h>
|
||||
- socket type:
|
||||
- SOCK_STREAM (reliable stream, e.g. TCP)
|
||||
- SOCK_DGRAM (datagrams, e.g. UDP)
|
||||
|
||||
For request-based servers (including socket-based):
|
||||
|
||||
- client address verification before further looking at the request
|
||||
(This is actually a hook for any processing that needs to look
|
||||
at the request before anything else, e.g. logging)
|
||||
- how to handle multiple requests:
|
||||
- synchronous (one request is handled at a time)
|
||||
- forking (each request is handled by a new process)
|
||||
- threading (each request is handled by a new thread)
|
||||
|
||||
The classes in this module favor the server type that is simplest to
|
||||
write: a synchronous TCP/IP server. This is bad class design, but
|
||||
save some typing. (There's also the issue that a deep class hierarchy
|
||||
slows down method lookups.)
|
||||
|
||||
There are five classes in an inheritance diagram, four of which represent
|
||||
synchronous servers of four types:
|
||||
|
||||
+------------+
|
||||
| BaseServer |
|
||||
+------------+
|
||||
|
|
||||
v
|
||||
+-----------+ +------------------+
|
||||
| TCPServer |------->| UnixStreamServer |
|
||||
+-----------+ +------------------+
|
||||
|
|
||||
v
|
||||
+-----------+ +--------------------+
|
||||
| UDPServer |------->| UnixDatagramServer |
|
||||
+-----------+ +--------------------+
|
||||
|
||||
Note that UnixDatagramServer derives from UDPServer, not from
|
||||
UnixStreamServer -- the only difference between an IP and a Unix
|
||||
stream server is the address family, which is simply repeated in both
|
||||
unix server classes.
|
||||
|
||||
Forking and threading versions of each type of server can be created
|
||||
using the ForkingMixIn and ThreadingMixIn mix-in classes. For
|
||||
instance, a threading UDP server class is created as follows:
|
||||
|
||||
class ThreadingUDPServer(ThreadingMixIn, UDPServer): pass
|
||||
|
||||
The Mix-in class must come first, since it overrides a method defined
|
||||
in UDPServer! Setting the various member variables also changes
|
||||
the behavior of the underlying server mechanism.
|
||||
|
||||
To implement a service, you must derive a class from
|
||||
BaseRequestHandler and redefine its handle() method. You can then run
|
||||
various versions of the service by combining one of the server classes
|
||||
with your request handler class.
|
||||
|
||||
The request handler class must be different for datagram or stream
|
||||
services. This can be hidden by using the request handler
|
||||
subclasses StreamRequestHandler or DatagramRequestHandler.
|
||||
|
||||
Of course, you still have to use your head!
|
||||
|
||||
For instance, it makes no sense to use a forking server if the service
|
||||
contains state in memory that can be modified by requests (since the
|
||||
modifications in the child process would never reach the initial state
|
||||
kept in the parent process and passed to each child). In this case,
|
||||
you can use a threading server, but you will probably have to use
|
||||
locks to avoid two requests that come in nearly simultaneous to apply
|
||||
conflicting changes to the server state.
|
||||
|
||||
On the other hand, if you are building e.g. an HTTP server, where all
|
||||
data is stored externally (e.g. in the file system), a synchronous
|
||||
class will essentially render the service "deaf" while one request is
|
||||
being handled -- which may be for a very long time if a client is slow
|
||||
to read all the data it has requested. Here a threading or forking
|
||||
server is appropriate.
|
||||
|
||||
In some cases, it may be appropriate to process part of a request
|
||||
synchronously, but to finish processing in a forked child depending on
|
||||
the request data. This can be implemented by using a synchronous
|
||||
server and doing an explicit fork in the request handler class
|
||||
handle() method.
|
||||
|
||||
Another approach to handling multiple simultaneous requests in an
|
||||
environment that supports neither threads nor fork (or where these are
|
||||
too expensive or inappropriate for the service) is to maintain an
|
||||
explicit table of partially finished requests and to use select() to
|
||||
decide which request to work on next (or whether to handle a new
|
||||
incoming request). This is particularly important for stream services
|
||||
where each client can potentially be connected for a long time (if
|
||||
threads or subprocesses cannot be used).
|
||||
|
||||
Future work:
|
||||
- Standard classes for Sun RPC (which uses either UDP or TCP)
|
||||
- Standard mix-in classes to implement various authentication
|
||||
and encryption schemes
|
||||
- Standard framework for select-based multiplexing
|
||||
|
||||
XXX Open problems:
|
||||
- What to do with out-of-band data?
|
||||
|
||||
BaseServer:
|
||||
- split generic "request" functionality out into BaseServer class.
|
||||
Copyright (C) 2000 Luke Kenneth Casson Leighton <lkcl@samba.org>
|
||||
|
||||
example: read entries from a SQL database (requires overriding
|
||||
get_request() to return a table entry from the database).
|
||||
entry is processed by a RequestHandlerClass.
|
||||
|
||||
"""
|
||||
|
||||
# Author of the BaseServer patch: Luke Kenneth Casson Leighton
|
||||
|
||||
# XXX Warning!
|
||||
# There is a test suite for this module, but it cannot be run by the
|
||||
# standard regression test.
|
||||
# To run it manually, run Lib/test/test_socketserver.py.
|
||||
|
||||
__version__ = "0.4"
|
||||
|
||||
|
||||
from _pydev_imps._pydev_saved_modules import socket
|
||||
from _pydev_imps._pydev_saved_modules import select
|
||||
import sys
|
||||
import os
|
||||
try:
|
||||
from _pydev_imps._pydev_saved_modules import threading
|
||||
except ImportError:
|
||||
import dummy_threading as threading
|
||||
|
||||
__all__ = ["TCPServer","UDPServer","ForkingUDPServer","ForkingTCPServer",
|
||||
"ThreadingUDPServer","ThreadingTCPServer","BaseRequestHandler",
|
||||
"StreamRequestHandler","DatagramRequestHandler",
|
||||
"ThreadingMixIn", "ForkingMixIn"]
|
||||
if hasattr(socket, "AF_UNIX"):
|
||||
__all__.extend(["UnixStreamServer","UnixDatagramServer",
|
||||
"ThreadingUnixStreamServer",
|
||||
"ThreadingUnixDatagramServer"])
|
||||
|
||||
class BaseServer:
|
||||
|
||||
"""Base class for server classes.
|
||||
|
||||
Methods for the caller:
|
||||
|
||||
- __init__(server_address, RequestHandlerClass)
|
||||
- serve_forever(poll_interval=0.5)
|
||||
- shutdown()
|
||||
- handle_request() # if you do not use serve_forever()
|
||||
- fileno() -> int # for select()
|
||||
|
||||
Methods that may be overridden:
|
||||
|
||||
- server_bind()
|
||||
- server_activate()
|
||||
- get_request() -> request, client_address
|
||||
- handle_timeout()
|
||||
- verify_request(request, client_address)
|
||||
- server_close()
|
||||
- process_request(request, client_address)
|
||||
- shutdown_request(request)
|
||||
- close_request(request)
|
||||
- handle_error()
|
||||
|
||||
Methods for derived classes:
|
||||
|
||||
- finish_request(request, client_address)
|
||||
|
||||
Class variables that may be overridden by derived classes or
|
||||
instances:
|
||||
|
||||
- timeout
|
||||
- address_family
|
||||
- socket_type
|
||||
- allow_reuse_address
|
||||
|
||||
Instance variables:
|
||||
|
||||
- RequestHandlerClass
|
||||
- socket
|
||||
|
||||
"""
|
||||
|
||||
timeout = None
|
||||
|
||||
def __init__(self, server_address, RequestHandlerClass):
|
||||
"""Constructor. May be extended, do not override."""
|
||||
self.server_address = server_address
|
||||
self.RequestHandlerClass = RequestHandlerClass
|
||||
self.__is_shut_down = threading.Event() # @UndefinedVariable
|
||||
self.__shutdown_request = False
|
||||
|
||||
def server_activate(self):
|
||||
"""Called by constructor to activate the server.
|
||||
|
||||
May be overridden.
|
||||
|
||||
"""
|
||||
pass
|
||||
|
||||
def serve_forever(self, poll_interval=0.5):
|
||||
"""Handle one request at a time until shutdown.
|
||||
|
||||
Polls for shutdown every poll_interval seconds. Ignores
|
||||
self.timeout. If you need to do periodic tasks, do them in
|
||||
another thread.
|
||||
"""
|
||||
self.__is_shut_down.clear()
|
||||
try:
|
||||
while not self.__shutdown_request:
|
||||
# XXX: Consider using another file descriptor or
|
||||
# connecting to the socket to wake this up instead of
|
||||
# polling. Polling reduces our responsiveness to a
|
||||
# shutdown request and wastes cpu at all other times.
|
||||
r, w, e = select.select([self], [], [], poll_interval)
|
||||
if self in r:
|
||||
self._handle_request_noblock()
|
||||
finally:
|
||||
self.__shutdown_request = False
|
||||
self.__is_shut_down.set()
|
||||
|
||||
def shutdown(self):
|
||||
"""Stops the serve_forever loop.
|
||||
|
||||
Blocks until the loop has finished. This must be called while
|
||||
serve_forever() is running in another thread, or it will
|
||||
deadlock.
|
||||
"""
|
||||
self.__shutdown_request = True
|
||||
self.__is_shut_down.wait()
|
||||
|
||||
# The distinction between handling, getting, processing and
|
||||
# finishing a request is fairly arbitrary. Remember:
|
||||
#
|
||||
# - handle_request() is the top-level call. It calls
|
||||
# select, get_request(), verify_request() and process_request()
|
||||
# - get_request() is different for stream or datagram sockets
|
||||
# - process_request() is the place that may fork a new process
|
||||
# or create a new thread to finish the request
|
||||
# - finish_request() instantiates the request handler class;
|
||||
# this constructor will handle the request all by itself
|
||||
|
||||
def handle_request(self):
|
||||
"""Handle one request, possibly blocking.
|
||||
|
||||
Respects self.timeout.
|
||||
"""
|
||||
# Support people who used socket.settimeout() to escape
|
||||
# handle_request before self.timeout was available.
|
||||
timeout = self.socket.gettimeout()
|
||||
if timeout is None:
|
||||
timeout = self.timeout
|
||||
elif self.timeout is not None:
|
||||
timeout = min(timeout, self.timeout)
|
||||
fd_sets = select.select([self], [], [], timeout)
|
||||
if not fd_sets[0]:
|
||||
self.handle_timeout()
|
||||
return
|
||||
self._handle_request_noblock()
|
||||
|
||||
def _handle_request_noblock(self):
|
||||
"""Handle one request, without blocking.
|
||||
|
||||
I assume that select.select has returned that the socket is
|
||||
readable before this function was called, so there should be
|
||||
no risk of blocking in get_request().
|
||||
"""
|
||||
try:
|
||||
request, client_address = self.get_request()
|
||||
except socket.error:
|
||||
return
|
||||
if self.verify_request(request, client_address):
|
||||
try:
|
||||
self.process_request(request, client_address)
|
||||
except:
|
||||
self.handle_error(request, client_address)
|
||||
self.shutdown_request(request)
|
||||
|
||||
def handle_timeout(self):
|
||||
"""Called if no new request arrives within self.timeout.
|
||||
|
||||
Overridden by ForkingMixIn.
|
||||
"""
|
||||
pass
|
||||
|
||||
def verify_request(self, request, client_address):
|
||||
"""Verify the request. May be overridden.
|
||||
|
||||
Return True if we should proceed with this request.
|
||||
|
||||
"""
|
||||
return True
|
||||
|
||||
def process_request(self, request, client_address):
|
||||
"""Call finish_request.
|
||||
|
||||
Overridden by ForkingMixIn and ThreadingMixIn.
|
||||
|
||||
"""
|
||||
self.finish_request(request, client_address)
|
||||
self.shutdown_request(request)
|
||||
|
||||
def server_close(self):
|
||||
"""Called to clean-up the server.
|
||||
|
||||
May be overridden.
|
||||
|
||||
"""
|
||||
pass
|
||||
|
||||
def finish_request(self, request, client_address):
|
||||
"""Finish one request by instantiating RequestHandlerClass."""
|
||||
self.RequestHandlerClass(request, client_address, self)
|
||||
|
||||
def shutdown_request(self, request):
|
||||
"""Called to shutdown and close an individual request."""
|
||||
self.close_request(request)
|
||||
|
||||
def close_request(self, request):
|
||||
"""Called to clean up an individual request."""
|
||||
pass
|
||||
|
||||
def handle_error(self, request, client_address):
|
||||
"""Handle an error gracefully. May be overridden.
|
||||
|
||||
The default is to print a traceback and continue.
|
||||
|
||||
"""
|
||||
print '-'*40
|
||||
print 'Exception happened during processing of request from',
|
||||
print client_address
|
||||
import traceback
|
||||
traceback.print_exc() # XXX But this goes to stderr!
|
||||
print '-'*40
|
||||
|
||||
|
||||
class TCPServer(BaseServer):
|
||||
|
||||
"""Base class for various socket-based server classes.
|
||||
|
||||
Defaults to synchronous IP stream (i.e., TCP).
|
||||
|
||||
Methods for the caller:
|
||||
|
||||
- __init__(server_address, RequestHandlerClass, bind_and_activate=True)
|
||||
- serve_forever(poll_interval=0.5)
|
||||
- shutdown()
|
||||
- handle_request() # if you don't use serve_forever()
|
||||
- fileno() -> int # for select()
|
||||
|
||||
Methods that may be overridden:
|
||||
|
||||
- server_bind()
|
||||
- server_activate()
|
||||
- get_request() -> request, client_address
|
||||
- handle_timeout()
|
||||
- verify_request(request, client_address)
|
||||
- process_request(request, client_address)
|
||||
- shutdown_request(request)
|
||||
- close_request(request)
|
||||
- handle_error()
|
||||
|
||||
Methods for derived classes:
|
||||
|
||||
- finish_request(request, client_address)
|
||||
|
||||
Class variables that may be overridden by derived classes or
|
||||
instances:
|
||||
|
||||
- timeout
|
||||
- address_family
|
||||
- socket_type
|
||||
- request_queue_size (only for stream sockets)
|
||||
- allow_reuse_address
|
||||
|
||||
Instance variables:
|
||||
|
||||
- server_address
|
||||
- RequestHandlerClass
|
||||
- socket
|
||||
|
||||
"""
|
||||
|
||||
address_family = socket.AF_INET
|
||||
|
||||
socket_type = socket.SOCK_STREAM
|
||||
|
||||
request_queue_size = 5
|
||||
|
||||
allow_reuse_address = False
|
||||
|
||||
def __init__(self, server_address, RequestHandlerClass, bind_and_activate=True):
|
||||
"""Constructor. May be extended, do not override."""
|
||||
BaseServer.__init__(self, server_address, RequestHandlerClass)
|
||||
self.socket = socket.socket(self.address_family,
|
||||
self.socket_type)
|
||||
if bind_and_activate:
|
||||
self.server_bind()
|
||||
self.server_activate()
|
||||
|
||||
def server_bind(self):
|
||||
"""Called by constructor to bind the socket.
|
||||
|
||||
May be overridden.
|
||||
|
||||
"""
|
||||
if self.allow_reuse_address:
|
||||
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||
self.socket.bind(self.server_address)
|
||||
self.server_address = self.socket.getsockname()
|
||||
|
||||
def server_activate(self):
|
||||
"""Called by constructor to activate the server.
|
||||
|
||||
May be overridden.
|
||||
|
||||
"""
|
||||
self.socket.listen(self.request_queue_size)
|
||||
|
||||
def server_close(self):
|
||||
"""Called to clean-up the server.
|
||||
|
||||
May be overridden.
|
||||
|
||||
"""
|
||||
self.socket.close()
|
||||
|
||||
def fileno(self):
|
||||
"""Return socket file number.
|
||||
|
||||
Interface required by select().
|
||||
|
||||
"""
|
||||
return self.socket.fileno()
|
||||
|
||||
def get_request(self):
|
||||
"""Get the request and client address from the socket.
|
||||
|
||||
May be overridden.
|
||||
|
||||
"""
|
||||
return self.socket.accept()
|
||||
|
||||
def shutdown_request(self, request):
|
||||
"""Called to shutdown and close an individual request."""
|
||||
try:
|
||||
#explicitly shutdown. socket.close() merely releases
|
||||
#the socket and waits for GC to perform the actual close.
|
||||
request.shutdown(socket.SHUT_WR)
|
||||
except socket.error:
|
||||
pass #some platforms may raise ENOTCONN here
|
||||
self.close_request(request)
|
||||
|
||||
def close_request(self, request):
|
||||
"""Called to clean up an individual request."""
|
||||
request.close()
|
||||
|
||||
|
||||
class UDPServer(TCPServer):
|
||||
|
||||
"""UDP server class."""
|
||||
|
||||
allow_reuse_address = False
|
||||
|
||||
socket_type = socket.SOCK_DGRAM
|
||||
|
||||
max_packet_size = 8192
|
||||
|
||||
def get_request(self):
|
||||
data, client_addr = self.socket.recvfrom(self.max_packet_size)
|
||||
return (data, self.socket), client_addr
|
||||
|
||||
def server_activate(self):
|
||||
# No need to call listen() for UDP.
|
||||
pass
|
||||
|
||||
def shutdown_request(self, request):
|
||||
# No need to shutdown anything.
|
||||
self.close_request(request)
|
||||
|
||||
def close_request(self, request):
|
||||
# No need to close anything.
|
||||
pass
|
||||
|
||||
class ForkingMixIn:
|
||||
|
||||
"""Mix-in class to handle each request in a new process."""
|
||||
|
||||
timeout = 300
|
||||
active_children = None
|
||||
max_children = 40
|
||||
|
||||
def collect_children(self):
|
||||
"""Internal routine to wait for children that have exited."""
|
||||
if self.active_children is None: return
|
||||
while len(self.active_children) >= self.max_children:
|
||||
# XXX: This will wait for any child process, not just ones
|
||||
# spawned by this library. This could confuse other
|
||||
# libraries that expect to be able to wait for their own
|
||||
# children.
|
||||
try:
|
||||
pid, status = os.waitpid(0, 0)
|
||||
except os.error:
|
||||
pid = None
|
||||
if pid not in self.active_children: continue
|
||||
self.active_children.remove(pid)
|
||||
|
||||
# XXX: This loop runs more system calls than it ought
|
||||
# to. There should be a way to put the active_children into a
|
||||
# process group and then use os.waitpid(-pgid) to wait for any
|
||||
# of that set, but I couldn't find a way to allocate pgids
|
||||
# that couldn't collide.
|
||||
for child in self.active_children:
|
||||
try:
|
||||
pid, status = os.waitpid(child, os.WNOHANG) # @UndefinedVariable
|
||||
except os.error:
|
||||
pid = None
|
||||
if not pid: continue
|
||||
try:
|
||||
self.active_children.remove(pid)
|
||||
except ValueError, e:
|
||||
raise ValueError('%s. x=%d and list=%r' % (e.message, pid,
|
||||
self.active_children))
|
||||
|
||||
def handle_timeout(self):
|
||||
"""Wait for zombies after self.timeout seconds of inactivity.
|
||||
|
||||
May be extended, do not override.
|
||||
"""
|
||||
self.collect_children()
|
||||
|
||||
def process_request(self, request, client_address):
|
||||
"""Fork a new subprocess to process the request."""
|
||||
self.collect_children()
|
||||
pid = os.fork() # @UndefinedVariable
|
||||
if pid:
|
||||
# Parent process
|
||||
if self.active_children is None:
|
||||
self.active_children = []
|
||||
self.active_children.append(pid)
|
||||
self.close_request(request) #close handle in parent process
|
||||
return
|
||||
else:
|
||||
# Child process.
|
||||
# This must never return, hence os._exit()!
|
||||
try:
|
||||
self.finish_request(request, client_address)
|
||||
self.shutdown_request(request)
|
||||
os._exit(0)
|
||||
except:
|
||||
try:
|
||||
self.handle_error(request, client_address)
|
||||
self.shutdown_request(request)
|
||||
finally:
|
||||
os._exit(1)
|
||||
|
||||
|
||||
class ThreadingMixIn:
|
||||
"""Mix-in class to handle each request in a new thread."""
|
||||
|
||||
# Decides how threads will act upon termination of the
|
||||
# main process
|
||||
daemon_threads = False
|
||||
|
||||
def process_request_thread(self, request, client_address):
|
||||
"""Same as in BaseServer but as a thread.
|
||||
|
||||
In addition, exception handling is done here.
|
||||
|
||||
"""
|
||||
try:
|
||||
self.finish_request(request, client_address)
|
||||
self.shutdown_request(request)
|
||||
except:
|
||||
self.handle_error(request, client_address)
|
||||
self.shutdown_request(request)
|
||||
|
||||
def process_request(self, request, client_address):
|
||||
"""Start a new thread to process the request."""
|
||||
t = threading.Thread(target = self.process_request_thread, # @UndefinedVariable
|
||||
args = (request, client_address))
|
||||
t.daemon = self.daemon_threads
|
||||
t.start()
|
||||
|
||||
|
||||
class ForkingUDPServer(ForkingMixIn, UDPServer): pass
|
||||
class ForkingTCPServer(ForkingMixIn, TCPServer): pass
|
||||
|
||||
class ThreadingUDPServer(ThreadingMixIn, UDPServer): pass
|
||||
class ThreadingTCPServer(ThreadingMixIn, TCPServer): pass
|
||||
|
||||
if hasattr(socket, 'AF_UNIX'):
|
||||
|
||||
class UnixStreamServer(TCPServer):
|
||||
address_family = socket.AF_UNIX # @UndefinedVariable
|
||||
|
||||
class UnixDatagramServer(UDPServer):
|
||||
address_family = socket.AF_UNIX # @UndefinedVariable
|
||||
|
||||
class ThreadingUnixStreamServer(ThreadingMixIn, UnixStreamServer): pass
|
||||
|
||||
class ThreadingUnixDatagramServer(ThreadingMixIn, UnixDatagramServer): pass
|
||||
|
||||
class BaseRequestHandler:
|
||||
|
||||
"""Base class for request handler classes.
|
||||
|
||||
This class is instantiated for each request to be handled. The
|
||||
constructor sets the instance variables request, client_address
|
||||
and server, and then calls the handle() method. To implement a
|
||||
specific service, all you need to do is to derive a class which
|
||||
defines a handle() method.
|
||||
|
||||
The handle() method can find the request as self.request, the
|
||||
client address as self.client_address, and the server (in case it
|
||||
needs access to per-server information) as self.server. Since a
|
||||
separate instance is created for each request, the handle() method
|
||||
can define arbitrary other instance variariables.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, request, client_address, server):
|
||||
self.request = request
|
||||
self.client_address = client_address
|
||||
self.server = server
|
||||
self.setup()
|
||||
try:
|
||||
self.handle()
|
||||
finally:
|
||||
self.finish()
|
||||
|
||||
def setup(self):
|
||||
pass
|
||||
|
||||
def handle(self):
|
||||
pass
|
||||
|
||||
def finish(self):
|
||||
pass
|
||||
|
||||
|
||||
# The following two classes make it possible to use the same service
|
||||
# class for stream or datagram servers.
|
||||
# Each class sets up these instance variables:
|
||||
# - rfile: a file object from which receives the request is read
|
||||
# - wfile: a file object to which the reply is written
|
||||
# When the handle() method returns, wfile is flushed properly
|
||||
|
||||
|
||||
class StreamRequestHandler(BaseRequestHandler):
|
||||
|
||||
"""Define self.rfile and self.wfile for stream sockets."""
|
||||
|
||||
# Default buffer sizes for rfile, wfile.
|
||||
# We default rfile to buffered because otherwise it could be
|
||||
# really slow for large data (a getc() call per byte); we make
|
||||
# wfile unbuffered because (a) often after a write() we want to
|
||||
# read and we need to flush the line; (b) big writes to unbuffered
|
||||
# files are typically optimized by stdio even when big reads
|
||||
# aren't.
|
||||
rbufsize = -1
|
||||
wbufsize = 0
|
||||
|
||||
# A timeout to apply to the request socket, if not None.
|
||||
timeout = None
|
||||
|
||||
# Disable nagle algorithm for this socket, if True.
|
||||
# Use only when wbufsize != 0, to avoid small packets.
|
||||
disable_nagle_algorithm = False
|
||||
|
||||
def setup(self):
|
||||
self.connection = self.request
|
||||
if self.timeout is not None:
|
||||
self.connection.settimeout(self.timeout)
|
||||
if self.disable_nagle_algorithm:
|
||||
self.connection.setsockopt(socket.IPPROTO_TCP,
|
||||
socket.TCP_NODELAY, True)
|
||||
self.rfile = self.connection.makefile('rb', self.rbufsize)
|
||||
self.wfile = self.connection.makefile('wb', self.wbufsize)
|
||||
|
||||
def finish(self):
|
||||
if not self.wfile.closed:
|
||||
self.wfile.flush()
|
||||
self.wfile.close()
|
||||
self.rfile.close()
|
||||
|
||||
|
||||
class DatagramRequestHandler(BaseRequestHandler):
|
||||
|
||||
# XXX Regrettably, I cannot get this working on Linux;
|
||||
# s.recvfrom() doesn't return a meaningful client address.
|
||||
|
||||
"""Define self.rfile and self.wfile for datagram sockets."""
|
||||
|
||||
def setup(self):
|
||||
try:
|
||||
from cStringIO import StringIO
|
||||
except ImportError:
|
||||
from StringIO import StringIO
|
||||
self.packet, self.socket = self.request
|
||||
self.rfile = StringIO(self.packet)
|
||||
self.wfile = StringIO()
|
||||
|
||||
def finish(self):
|
||||
self.socket.sendto(self.wfile.getvalue(), self.client_address)
|
||||
25
ptvsd/pydevd/_pydev_imps/_pydev_execfile.py
Normal file
25
ptvsd/pydevd/_pydev_imps/_pydev_execfile.py
Normal file
|
|
@ -0,0 +1,25 @@
|
|||
#We must redefine it in Py3k if it's not already there
|
||||
def execfile(file, glob=None, loc=None):
|
||||
if glob is None:
|
||||
import sys
|
||||
glob = sys._getframe().f_back.f_globals
|
||||
if loc is None:
|
||||
loc = glob
|
||||
|
||||
# It seems that the best way is using tokenize.open(): http://code.activestate.com/lists/python-dev/131251/
|
||||
# (but tokenize.open() is only available for python 3.2)
|
||||
import tokenize
|
||||
if hasattr(tokenize, 'open'):
|
||||
# version 3.2
|
||||
stream = tokenize.open(file) # @UndefinedVariable
|
||||
else:
|
||||
# version 3.0 or 3.1
|
||||
detect_encoding = tokenize.detect_encoding(open(file, mode="rb" ).readline)
|
||||
stream = open(file, encoding=detect_encoding[0])
|
||||
try:
|
||||
contents = stream.read()
|
||||
finally:
|
||||
stream.close()
|
||||
|
||||
#execute the script (note: it's important to compile first to have the filename set in debug mode)
|
||||
exec(compile(contents+"\n", file, 'exec'), glob, loc)
|
||||
788
ptvsd/pydevd/_pydev_imps/_pydev_inspect.py
Normal file
788
ptvsd/pydevd/_pydev_imps/_pydev_inspect.py
Normal file
|
|
@ -0,0 +1,788 @@
|
|||
"""Get useful information from live Python objects.
|
||||
|
||||
This module encapsulates the interface provided by the internal special
|
||||
attributes (func_*, co_*, im_*, tb_*, etc.) in a friendlier fashion.
|
||||
It also provides some help for examining source code and class layout.
|
||||
|
||||
Here are some of the useful functions provided by this module:
|
||||
|
||||
ismodule(), isclass(), ismethod(), isfunction(), istraceback(),
|
||||
isframe(), iscode(), isbuiltin(), isroutine() - check object types
|
||||
getmembers() - get members of an object that satisfy a given condition
|
||||
|
||||
getfile(), getsourcefile(), getsource() - find an object's source code
|
||||
getdoc(), getcomments() - get documentation on an object
|
||||
getmodule() - determine the module that an object came from
|
||||
getclasstree() - arrange classes so as to represent their hierarchy
|
||||
|
||||
getargspec(), getargvalues() - get info about function arguments
|
||||
formatargspec(), formatargvalues() - format an argument spec
|
||||
getouterframes(), getinnerframes() - get info about frames
|
||||
currentframe() - get the current stack frame
|
||||
stack(), trace() - get info about frames on the stack or in a traceback
|
||||
"""
|
||||
|
||||
# This module is in the public domain. No warranties.
|
||||
|
||||
__author__ = 'Ka-Ping Yee <ping@lfw.org>'
|
||||
__date__ = '1 Jan 2001'
|
||||
|
||||
import sys, os, types, string, re, imp, tokenize
|
||||
|
||||
# ----------------------------------------------------------- type-checking
|
||||
def ismodule(object):
|
||||
"""Return true if the object is a module.
|
||||
|
||||
Module objects provide these attributes:
|
||||
__doc__ documentation string
|
||||
__file__ filename (missing for built-in modules)"""
|
||||
return isinstance(object, types.ModuleType)
|
||||
|
||||
def isclass(object):
|
||||
"""Return true if the object is a class.
|
||||
|
||||
Class objects provide these attributes:
|
||||
__doc__ documentation string
|
||||
__module__ name of module in which this class was defined"""
|
||||
return isinstance(object, types.ClassType) or hasattr(object, '__bases__')
|
||||
|
||||
def ismethod(object):
|
||||
"""Return true if the object is an instance method.
|
||||
|
||||
Instance method objects provide these attributes:
|
||||
__doc__ documentation string
|
||||
__name__ name with which this method was defined
|
||||
im_class class object in which this method belongs
|
||||
im_func function object containing implementation of method
|
||||
im_self instance to which this method is bound, or None"""
|
||||
return isinstance(object, types.MethodType)
|
||||
|
||||
def ismethoddescriptor(object):
|
||||
"""Return true if the object is a method descriptor.
|
||||
|
||||
But not if ismethod() or isclass() or isfunction() are true.
|
||||
|
||||
This is new in Python 2.2, and, for example, is true of int.__add__.
|
||||
An object passing this test has a __get__ attribute but not a __set__
|
||||
attribute, but beyond that the set of attributes varies. __name__ is
|
||||
usually sensible, and __doc__ often is.
|
||||
|
||||
Methods implemented via descriptors that also pass one of the other
|
||||
tests return false from the ismethoddescriptor() test, simply because
|
||||
the other tests promise more -- you can, e.g., count on having the
|
||||
im_func attribute (etc) when an object passes ismethod()."""
|
||||
return (hasattr(object, "__get__")
|
||||
and not hasattr(object, "__set__") # else it's a data descriptor
|
||||
and not ismethod(object) # mutual exclusion
|
||||
and not isfunction(object)
|
||||
and not isclass(object))
|
||||
|
||||
def isfunction(object):
|
||||
"""Return true if the object is a user-defined function.
|
||||
|
||||
Function objects provide these attributes:
|
||||
__doc__ documentation string
|
||||
__name__ name with which this function was defined
|
||||
func_code code object containing compiled function bytecode
|
||||
func_defaults tuple of any default values for arguments
|
||||
func_doc (same as __doc__)
|
||||
func_globals global namespace in which this function was defined
|
||||
func_name (same as __name__)"""
|
||||
return isinstance(object, types.FunctionType)
|
||||
|
||||
def istraceback(object):
|
||||
"""Return true if the object is a traceback.
|
||||
|
||||
Traceback objects provide these attributes:
|
||||
tb_frame frame object at this level
|
||||
tb_lasti index of last attempted instruction in bytecode
|
||||
tb_lineno current line number in Python source code
|
||||
tb_next next inner traceback object (called by this level)"""
|
||||
return isinstance(object, types.TracebackType)
|
||||
|
||||
def isframe(object):
|
||||
"""Return true if the object is a frame object.
|
||||
|
||||
Frame objects provide these attributes:
|
||||
f_back next outer frame object (this frame's caller)
|
||||
f_builtins built-in namespace seen by this frame
|
||||
f_code code object being executed in this frame
|
||||
f_exc_traceback traceback if raised in this frame, or None
|
||||
f_exc_type exception type if raised in this frame, or None
|
||||
f_exc_value exception value if raised in this frame, or None
|
||||
f_globals global namespace seen by this frame
|
||||
f_lasti index of last attempted instruction in bytecode
|
||||
f_lineno current line number in Python source code
|
||||
f_locals local namespace seen by this frame
|
||||
f_restricted 0 or 1 if frame is in restricted execution mode
|
||||
f_trace tracing function for this frame, or None"""
|
||||
return isinstance(object, types.FrameType)
|
||||
|
||||
def iscode(object):
|
||||
"""Return true if the object is a code object.
|
||||
|
||||
Code objects provide these attributes:
|
||||
co_argcount number of arguments (not including * or ** args)
|
||||
co_code string of raw compiled bytecode
|
||||
co_consts tuple of constants used in the bytecode
|
||||
co_filename name of file in which this code object was created
|
||||
co_firstlineno number of first line in Python source code
|
||||
co_flags bitmap: 1=optimized | 2=newlocals | 4=*arg | 8=**arg
|
||||
co_lnotab encoded mapping of line numbers to bytecode indices
|
||||
co_name name with which this code object was defined
|
||||
co_names tuple of names of local variables
|
||||
co_nlocals number of local variables
|
||||
co_stacksize virtual machine stack space required
|
||||
co_varnames tuple of names of arguments and local variables"""
|
||||
return isinstance(object, types.CodeType)
|
||||
|
||||
def isbuiltin(object):
|
||||
"""Return true if the object is a built-in function or method.
|
||||
|
||||
Built-in functions and methods provide these attributes:
|
||||
__doc__ documentation string
|
||||
__name__ original name of this function or method
|
||||
__self__ instance to which a method is bound, or None"""
|
||||
return isinstance(object, types.BuiltinFunctionType)
|
||||
|
||||
def isroutine(object):
|
||||
"""Return true if the object is any kind of function or method."""
|
||||
return (isbuiltin(object)
|
||||
or isfunction(object)
|
||||
or ismethod(object)
|
||||
or ismethoddescriptor(object))
|
||||
|
||||
def getmembers(object, predicate=None):
|
||||
"""Return all members of an object as (name, value) pairs sorted by name.
|
||||
Optionally, only return members that satisfy a given predicate."""
|
||||
results = []
|
||||
for key in dir(object):
|
||||
value = getattr(object, key)
|
||||
if not predicate or predicate(value):
|
||||
results.append((key, value))
|
||||
results.sort()
|
||||
return results
|
||||
|
||||
def classify_class_attrs(cls):
|
||||
"""Return list of attribute-descriptor tuples.
|
||||
|
||||
For each name in dir(cls), the return list contains a 4-tuple
|
||||
with these elements:
|
||||
|
||||
0. The name (a string).
|
||||
|
||||
1. The kind of attribute this is, one of these strings:
|
||||
'class method' created via classmethod()
|
||||
'static method' created via staticmethod()
|
||||
'property' created via property()
|
||||
'method' any other flavor of method
|
||||
'data' not a method
|
||||
|
||||
2. The class which defined this attribute (a class).
|
||||
|
||||
3. The object as obtained directly from the defining class's
|
||||
__dict__, not via getattr. This is especially important for
|
||||
data attributes: C.data is just a data object, but
|
||||
C.__dict__['data'] may be a data descriptor with additional
|
||||
info, like a __doc__ string.
|
||||
"""
|
||||
|
||||
mro = getmro(cls)
|
||||
names = dir(cls)
|
||||
result = []
|
||||
for name in names:
|
||||
# Get the object associated with the name.
|
||||
# Getting an obj from the __dict__ sometimes reveals more than
|
||||
# using getattr. Static and class methods are dramatic examples.
|
||||
if name in cls.__dict__:
|
||||
obj = cls.__dict__[name]
|
||||
else:
|
||||
obj = getattr(cls, name)
|
||||
|
||||
# Figure out where it was defined.
|
||||
homecls = getattr(obj, "__objclass__", None)
|
||||
if homecls is None:
|
||||
# search the dicts.
|
||||
for base in mro:
|
||||
if name in base.__dict__:
|
||||
homecls = base
|
||||
break
|
||||
|
||||
# Get the object again, in order to get it from the defining
|
||||
# __dict__ instead of via getattr (if possible).
|
||||
if homecls is not None and name in homecls.__dict__:
|
||||
obj = homecls.__dict__[name]
|
||||
|
||||
# Also get the object via getattr.
|
||||
obj_via_getattr = getattr(cls, name)
|
||||
|
||||
# Classify the object.
|
||||
if isinstance(obj, staticmethod):
|
||||
kind = "static method"
|
||||
elif isinstance(obj, classmethod):
|
||||
kind = "class method"
|
||||
elif isinstance(obj, property):
|
||||
kind = "property"
|
||||
elif (ismethod(obj_via_getattr) or
|
||||
ismethoddescriptor(obj_via_getattr)):
|
||||
kind = "method"
|
||||
else:
|
||||
kind = "data"
|
||||
|
||||
result.append((name, kind, homecls, obj))
|
||||
|
||||
return result
|
||||
|
||||
# ----------------------------------------------------------- class helpers
|
||||
def _searchbases(cls, accum):
|
||||
# Simulate the "classic class" search order.
|
||||
if cls in accum:
|
||||
return
|
||||
accum.append(cls)
|
||||
for base in cls.__bases__:
|
||||
_searchbases(base, accum)
|
||||
|
||||
def getmro(cls):
|
||||
"Return tuple of base classes (including cls) in method resolution order."
|
||||
if hasattr(cls, "__mro__"):
|
||||
return cls.__mro__
|
||||
else:
|
||||
result = []
|
||||
_searchbases(cls, result)
|
||||
return tuple(result)
|
||||
|
||||
# -------------------------------------------------- source code extraction
|
||||
def indentsize(line):
|
||||
"""Return the indent size, in spaces, at the start of a line of text."""
|
||||
expline = string.expandtabs(line)
|
||||
return len(expline) - len(string.lstrip(expline))
|
||||
|
||||
def getdoc(object):
|
||||
"""Get the documentation string for an object.
|
||||
|
||||
All tabs are expanded to spaces. To clean up docstrings that are
|
||||
indented to line up with blocks of code, any whitespace than can be
|
||||
uniformly removed from the second line onwards is removed."""
|
||||
try:
|
||||
doc = object.__doc__
|
||||
except AttributeError:
|
||||
return None
|
||||
if not isinstance(doc, (str, unicode)):
|
||||
return None
|
||||
try:
|
||||
lines = string.split(string.expandtabs(doc), '\n')
|
||||
except UnicodeError:
|
||||
return None
|
||||
else:
|
||||
margin = None
|
||||
for line in lines[1:]:
|
||||
content = len(string.lstrip(line))
|
||||
if not content: continue
|
||||
indent = len(line) - content
|
||||
if margin is None: margin = indent
|
||||
else: margin = min(margin, indent)
|
||||
if margin is not None:
|
||||
for i in range(1, len(lines)): lines[i] = lines[i][margin:]
|
||||
return string.join(lines, '\n')
|
||||
|
||||
def getfile(object):
|
||||
"""Work out which source or compiled file an object was defined in."""
|
||||
if ismodule(object):
|
||||
if hasattr(object, '__file__'):
|
||||
return object.__file__
|
||||
raise TypeError, 'arg is a built-in module'
|
||||
if isclass(object):
|
||||
object = sys.modules.get(object.__module__)
|
||||
if hasattr(object, '__file__'):
|
||||
return object.__file__
|
||||
raise TypeError, 'arg is a built-in class'
|
||||
if ismethod(object):
|
||||
object = object.im_func
|
||||
if isfunction(object):
|
||||
object = object.func_code
|
||||
if istraceback(object):
|
||||
object = object.tb_frame
|
||||
if isframe(object):
|
||||
object = object.f_code
|
||||
if iscode(object):
|
||||
return object.co_filename
|
||||
raise TypeError, 'arg is not a module, class, method, ' \
|
||||
'function, traceback, frame, or code object'
|
||||
|
||||
def getmoduleinfo(path):
|
||||
"""Get the module name, suffix, mode, and module type for a given file."""
|
||||
filename = os.path.basename(path)
|
||||
suffixes = map(lambda (suffix, mode, mtype):
|
||||
(-len(suffix), suffix, mode, mtype), imp.get_suffixes())
|
||||
suffixes.sort() # try longest suffixes first, in case they overlap
|
||||
for neglen, suffix, mode, mtype in suffixes:
|
||||
if filename[neglen:] == suffix:
|
||||
return filename[:neglen], suffix, mode, mtype
|
||||
|
||||
def getmodulename(path):
|
||||
"""Return the module name for a given file, or None."""
|
||||
info = getmoduleinfo(path)
|
||||
if info: return info[0]
|
||||
|
||||
def getsourcefile(object):
|
||||
"""Return the Python source file an object was defined in, if it exists."""
|
||||
filename = getfile(object)
|
||||
if string.lower(filename[-4:]) in ['.pyc', '.pyo']:
|
||||
filename = filename[:-4] + '.py'
|
||||
for suffix, mode, kind in imp.get_suffixes():
|
||||
if 'b' in mode and string.lower(filename[-len(suffix):]) == suffix:
|
||||
# Looks like a binary file. We want to only return a text file.
|
||||
return None
|
||||
if os.path.exists(filename):
|
||||
return filename
|
||||
|
||||
def getabsfile(object):
|
||||
"""Return an absolute path to the source or compiled file for an object.
|
||||
|
||||
The idea is for each object to have a unique origin, so this routine
|
||||
normalizes the result as much as possible."""
|
||||
return os.path.normcase(
|
||||
os.path.abspath(getsourcefile(object) or getfile(object)))
|
||||
|
||||
modulesbyfile = {}
|
||||
|
||||
def getmodule(object):
|
||||
"""Return the module an object was defined in, or None if not found."""
|
||||
if ismodule(object):
|
||||
return object
|
||||
if isclass(object):
|
||||
return sys.modules.get(object.__module__)
|
||||
try:
|
||||
file = getabsfile(object)
|
||||
except TypeError:
|
||||
return None
|
||||
if modulesbyfile.has_key(file):
|
||||
return sys.modules[modulesbyfile[file]]
|
||||
for module in sys.modules.values():
|
||||
if hasattr(module, '__file__'):
|
||||
modulesbyfile[getabsfile(module)] = module.__name__
|
||||
if modulesbyfile.has_key(file):
|
||||
return sys.modules[modulesbyfile[file]]
|
||||
main = sys.modules['__main__']
|
||||
if hasattr(main, object.__name__):
|
||||
mainobject = getattr(main, object.__name__)
|
||||
if mainobject is object:
|
||||
return main
|
||||
builtin = sys.modules['__builtin__']
|
||||
if hasattr(builtin, object.__name__):
|
||||
builtinobject = getattr(builtin, object.__name__)
|
||||
if builtinobject is object:
|
||||
return builtin
|
||||
|
||||
def findsource(object):
|
||||
"""Return the entire source file and starting line number for an object.
|
||||
|
||||
The argument may be a module, class, method, function, traceback, frame,
|
||||
or code object. The source code is returned as a list of all the lines
|
||||
in the file and the line number indexes a line in that list. An IOError
|
||||
is raised if the source code cannot be retrieved."""
|
||||
try:
|
||||
file = open(getsourcefile(object))
|
||||
except (TypeError, IOError):
|
||||
raise IOError, 'could not get source code'
|
||||
lines = file.readlines()
|
||||
file.close()
|
||||
|
||||
if ismodule(object):
|
||||
return lines, 0
|
||||
|
||||
if isclass(object):
|
||||
name = object.__name__
|
||||
pat = re.compile(r'^\s*class\s*' + name + r'\b')
|
||||
for i in range(len(lines)):
|
||||
if pat.match(lines[i]): return lines, i
|
||||
else: raise IOError, 'could not find class definition'
|
||||
|
||||
if ismethod(object):
|
||||
object = object.im_func
|
||||
if isfunction(object):
|
||||
object = object.func_code
|
||||
if istraceback(object):
|
||||
object = object.tb_frame
|
||||
if isframe(object):
|
||||
object = object.f_code
|
||||
if iscode(object):
|
||||
if not hasattr(object, 'co_firstlineno'):
|
||||
raise IOError, 'could not find function definition'
|
||||
lnum = object.co_firstlineno - 1
|
||||
pat = re.compile(r'^(\s*def\s)|(.*\slambda(:|\s))')
|
||||
while lnum > 0:
|
||||
if pat.match(lines[lnum]): break
|
||||
lnum = lnum - 1
|
||||
return lines, lnum
|
||||
raise IOError, 'could not find code object'
|
||||
|
||||
def getcomments(object):
|
||||
"""Get lines of comments immediately preceding an object's source code."""
|
||||
try: lines, lnum = findsource(object)
|
||||
except IOError: return None
|
||||
|
||||
if ismodule(object):
|
||||
# Look for a comment block at the top of the file.
|
||||
start = 0
|
||||
if lines and lines[0][:2] == '#!': start = 1
|
||||
while start < len(lines) and string.strip(lines[start]) in ['', '#']:
|
||||
start = start + 1
|
||||
if start < len(lines) and lines[start][:1] == '#':
|
||||
comments = []
|
||||
end = start
|
||||
while end < len(lines) and lines[end][:1] == '#':
|
||||
comments.append(string.expandtabs(lines[end]))
|
||||
end = end + 1
|
||||
return string.join(comments, '')
|
||||
|
||||
# Look for a preceding block of comments at the same indentation.
|
||||
elif lnum > 0:
|
||||
indent = indentsize(lines[lnum])
|
||||
end = lnum - 1
|
||||
if end >= 0 and string.lstrip(lines[end])[:1] == '#' and \
|
||||
indentsize(lines[end]) == indent:
|
||||
comments = [string.lstrip(string.expandtabs(lines[end]))]
|
||||
if end > 0:
|
||||
end = end - 1
|
||||
comment = string.lstrip(string.expandtabs(lines[end]))
|
||||
while comment[:1] == '#' and indentsize(lines[end]) == indent:
|
||||
comments[:0] = [comment]
|
||||
end = end - 1
|
||||
if end < 0: break
|
||||
comment = string.lstrip(string.expandtabs(lines[end]))
|
||||
while comments and string.strip(comments[0]) == '#':
|
||||
comments[:1] = []
|
||||
while comments and string.strip(comments[-1]) == '#':
|
||||
comments[-1:] = []
|
||||
return string.join(comments, '')
|
||||
|
||||
class ListReader:
|
||||
"""Provide a readline() method to return lines from a list of strings."""
|
||||
def __init__(self, lines):
|
||||
self.lines = lines
|
||||
self.index = 0
|
||||
|
||||
def readline(self):
|
||||
i = self.index
|
||||
if i < len(self.lines):
|
||||
self.index = i + 1
|
||||
return self.lines[i]
|
||||
else: return ''
|
||||
|
||||
class EndOfBlock(Exception): pass
|
||||
|
||||
class BlockFinder:
|
||||
"""Provide a tokeneater() method to detect the end of a code block."""
|
||||
def __init__(self):
|
||||
self.indent = 0
|
||||
self.started = 0
|
||||
self.last = 0
|
||||
|
||||
def tokeneater(self, type, token, (srow, scol), (erow, ecol), line):
|
||||
if not self.started:
|
||||
if type == tokenize.NAME: self.started = 1
|
||||
elif type == tokenize.NEWLINE:
|
||||
self.last = srow
|
||||
elif type == tokenize.INDENT:
|
||||
self.indent = self.indent + 1
|
||||
elif type == tokenize.DEDENT:
|
||||
self.indent = self.indent - 1
|
||||
if self.indent == 0: raise EndOfBlock, self.last
|
||||
elif type == tokenize.NAME and scol == 0:
|
||||
raise EndOfBlock, self.last
|
||||
|
||||
def getblock(lines):
|
||||
"""Extract the block of code at the top of the given list of lines."""
|
||||
try:
|
||||
tokenize.tokenize(ListReader(lines).readline, BlockFinder().tokeneater)
|
||||
except EndOfBlock, eob:
|
||||
return lines[:eob.args[0]]
|
||||
# Fooling the indent/dedent logic implies a one-line definition
|
||||
return lines[:1]
|
||||
|
||||
def getsourcelines(object):
|
||||
"""Return a list of source lines and starting line number for an object.
|
||||
|
||||
The argument may be a module, class, method, function, traceback, frame,
|
||||
or code object. The source code is returned as a list of the lines
|
||||
corresponding to the object and the line number indicates where in the
|
||||
original source file the first line of code was found. An IOError is
|
||||
raised if the source code cannot be retrieved."""
|
||||
lines, lnum = findsource(object)
|
||||
|
||||
if ismodule(object): return lines, 0
|
||||
else: return getblock(lines[lnum:]), lnum + 1
|
||||
|
||||
def getsource(object):
|
||||
"""Return the text of the source code for an object.
|
||||
|
||||
The argument may be a module, class, method, function, traceback, frame,
|
||||
or code object. The source code is returned as a single string. An
|
||||
IOError is raised if the source code cannot be retrieved."""
|
||||
lines, lnum = getsourcelines(object)
|
||||
return string.join(lines, '')
|
||||
|
||||
# --------------------------------------------------- class tree extraction
|
||||
def walktree(classes, children, parent):
|
||||
"""Recursive helper function for getclasstree()."""
|
||||
results = []
|
||||
classes.sort(lambda a, b: cmp(a.__name__, b.__name__))
|
||||
for c in classes:
|
||||
results.append((c, c.__bases__))
|
||||
if children.has_key(c):
|
||||
results.append(walktree(children[c], children, c))
|
||||
return results
|
||||
|
||||
def getclasstree(classes, unique=0):
|
||||
"""Arrange the given list of classes into a hierarchy of nested lists.
|
||||
|
||||
Where a nested list appears, it contains classes derived from the class
|
||||
whose entry immediately precedes the list. Each entry is a 2-tuple
|
||||
containing a class and a tuple of its base classes. If the 'unique'
|
||||
argument is true, exactly one entry appears in the returned structure
|
||||
for each class in the given list. Otherwise, classes using multiple
|
||||
inheritance and their descendants will appear multiple times."""
|
||||
children = {}
|
||||
roots = []
|
||||
for c in classes:
|
||||
if c.__bases__:
|
||||
for parent in c.__bases__:
|
||||
if not children.has_key(parent):
|
||||
children[parent] = []
|
||||
children[parent].append(c)
|
||||
if unique and parent in classes: break
|
||||
elif c not in roots:
|
||||
roots.append(c)
|
||||
for parent in children.keys():
|
||||
if parent not in classes:
|
||||
roots.append(parent)
|
||||
return walktree(roots, children, None)
|
||||
|
||||
# ------------------------------------------------ argument list extraction
|
||||
# These constants are from Python's compile.h.
|
||||
CO_OPTIMIZED, CO_NEWLOCALS, CO_VARARGS, CO_VARKEYWORDS = 1, 2, 4, 8
|
||||
|
||||
def getargs(co):
|
||||
"""Get information about the arguments accepted by a code object.
|
||||
|
||||
Three things are returned: (args, varargs, varkw), where 'args' is
|
||||
a list of argument names (possibly containing nested lists), and
|
||||
'varargs' and 'varkw' are the names of the * and ** arguments or None."""
|
||||
if not iscode(co): raise TypeError, 'arg is not a code object'
|
||||
|
||||
nargs = co.co_argcount
|
||||
names = co.co_varnames
|
||||
args = list(names[:nargs])
|
||||
step = 0
|
||||
|
||||
# The following acrobatics are for anonymous (tuple) arguments.
|
||||
if not sys.platform.startswith('java'):#Jython doesn't have co_code
|
||||
code = co.co_code
|
||||
import dis
|
||||
for i in range(nargs):
|
||||
if args[i][:1] in ['', '.']:
|
||||
stack, remain, count = [], [], []
|
||||
while step < len(code):
|
||||
op = ord(code[step])
|
||||
step = step + 1
|
||||
if op >= dis.HAVE_ARGUMENT:
|
||||
opname = dis.opname[op]
|
||||
value = ord(code[step]) + ord(code[step + 1]) * 256
|
||||
step = step + 2
|
||||
if opname in ['UNPACK_TUPLE', 'UNPACK_SEQUENCE']:
|
||||
remain.append(value)
|
||||
count.append(value)
|
||||
elif opname == 'STORE_FAST':
|
||||
stack.append(names[value])
|
||||
remain[-1] = remain[-1] - 1
|
||||
while remain[-1] == 0:
|
||||
remain.pop()
|
||||
size = count.pop()
|
||||
stack[-size:] = [stack[-size:]]
|
||||
if not remain: break
|
||||
remain[-1] = remain[-1] - 1
|
||||
if not remain: break
|
||||
args[i] = stack[0]
|
||||
|
||||
varargs = None
|
||||
if co.co_flags & CO_VARARGS:
|
||||
varargs = co.co_varnames[nargs]
|
||||
nargs = nargs + 1
|
||||
varkw = None
|
||||
if co.co_flags & CO_VARKEYWORDS:
|
||||
varkw = co.co_varnames[nargs]
|
||||
return args, varargs, varkw
|
||||
|
||||
def getargspec(func):
|
||||
"""Get the names and default values of a function's arguments.
|
||||
|
||||
A tuple of four things is returned: (args, varargs, varkw, defaults).
|
||||
'args' is a list of the argument names (it may contain nested lists).
|
||||
'varargs' and 'varkw' are the names of the * and ** arguments or None.
|
||||
'defaults' is an n-tuple of the default values of the last n arguments."""
|
||||
if ismethod(func):
|
||||
func = func.im_func
|
||||
if not isfunction(func): raise TypeError, 'arg is not a Python function'
|
||||
args, varargs, varkw = getargs(func.func_code)
|
||||
return args, varargs, varkw, func.func_defaults
|
||||
|
||||
def getargvalues(frame):
|
||||
"""Get information about arguments passed into a particular frame.
|
||||
|
||||
A tuple of four things is returned: (args, varargs, varkw, locals).
|
||||
'args' is a list of the argument names (it may contain nested lists).
|
||||
'varargs' and 'varkw' are the names of the * and ** arguments or None.
|
||||
'locals' is the locals dictionary of the given frame."""
|
||||
args, varargs, varkw = getargs(frame.f_code)
|
||||
return args, varargs, varkw, frame.f_locals
|
||||
|
||||
def joinseq(seq):
|
||||
if len(seq) == 1:
|
||||
return '(' + seq[0] + ',)'
|
||||
else:
|
||||
return '(' + string.join(seq, ', ') + ')'
|
||||
|
||||
def strseq(object, convert, join=joinseq):
|
||||
"""Recursively walk a sequence, stringifying each element."""
|
||||
if type(object) in [types.ListType, types.TupleType]:
|
||||
return join(map(lambda o, c=convert, j=join: strseq(o, c, j), object))
|
||||
else:
|
||||
return convert(object)
|
||||
|
||||
def formatargspec(args, varargs=None, varkw=None, defaults=None,
|
||||
formatarg=str,
|
||||
formatvarargs=lambda name: '*' + name,
|
||||
formatvarkw=lambda name: '**' + name,
|
||||
formatvalue=lambda value: '=' + repr(value),
|
||||
join=joinseq):
|
||||
"""Format an argument spec from the 4 values returned by getargspec.
|
||||
|
||||
The first four arguments are (args, varargs, varkw, defaults). The
|
||||
other four arguments are the corresponding optional formatting functions
|
||||
that are called to turn names and values into strings. The ninth
|
||||
argument is an optional function to format the sequence of arguments."""
|
||||
specs = []
|
||||
if defaults:
|
||||
firstdefault = len(args) - len(defaults)
|
||||
for i in range(len(args)):
|
||||
spec = strseq(args[i], formatarg, join)
|
||||
if defaults and i >= firstdefault:
|
||||
spec = spec + formatvalue(defaults[i - firstdefault])
|
||||
specs.append(spec)
|
||||
if varargs:
|
||||
specs.append(formatvarargs(varargs))
|
||||
if varkw:
|
||||
specs.append(formatvarkw(varkw))
|
||||
return '(' + string.join(specs, ', ') + ')'
|
||||
|
||||
def formatargvalues(args, varargs, varkw, locals,
|
||||
formatarg=str,
|
||||
formatvarargs=lambda name: '*' + name,
|
||||
formatvarkw=lambda name: '**' + name,
|
||||
formatvalue=lambda value: '=' + repr(value),
|
||||
join=joinseq):
|
||||
"""Format an argument spec from the 4 values returned by getargvalues.
|
||||
|
||||
The first four arguments are (args, varargs, varkw, locals). The
|
||||
next four arguments are the corresponding optional formatting functions
|
||||
that are called to turn names and values into strings. The ninth
|
||||
argument is an optional function to format the sequence of arguments."""
|
||||
def convert(name, locals=locals,
|
||||
formatarg=formatarg, formatvalue=formatvalue):
|
||||
return formatarg(name) + formatvalue(locals[name])
|
||||
specs = []
|
||||
for i in range(len(args)):
|
||||
specs.append(strseq(args[i], convert, join))
|
||||
if varargs:
|
||||
specs.append(formatvarargs(varargs) + formatvalue(locals[varargs]))
|
||||
if varkw:
|
||||
specs.append(formatvarkw(varkw) + formatvalue(locals[varkw]))
|
||||
return '(' + string.join(specs, ', ') + ')'
|
||||
|
||||
# -------------------------------------------------- stack frame extraction
|
||||
def getframeinfo(frame, context=1):
|
||||
"""Get information about a frame or traceback object.
|
||||
|
||||
A tuple of five things is returned: the filename, the line number of
|
||||
the current line, the function name, a list of lines of context from
|
||||
the source code, and the index of the current line within that list.
|
||||
The optional second argument specifies the number of lines of context
|
||||
to return, which are centered around the current line."""
|
||||
raise NotImplementedError
|
||||
# if istraceback(frame):
|
||||
# frame = frame.tb_frame
|
||||
# if not isframe(frame):
|
||||
# raise TypeError, 'arg is not a frame or traceback object'
|
||||
#
|
||||
# filename = getsourcefile(frame)
|
||||
# lineno = getlineno(frame)
|
||||
# if context > 0:
|
||||
# start = lineno - 1 - context//2
|
||||
# try:
|
||||
# lines, lnum = findsource(frame)
|
||||
# except IOError:
|
||||
# lines = index = None
|
||||
# else:
|
||||
# start = max(start, 1)
|
||||
# start = min(start, len(lines) - context)
|
||||
# lines = lines[start:start+context]
|
||||
# index = lineno - 1 - start
|
||||
# else:
|
||||
# lines = index = None
|
||||
#
|
||||
# return (filename, lineno, frame.f_code.co_name, lines, index)
|
||||
|
||||
def getlineno(frame):
|
||||
"""Get the line number from a frame object, allowing for optimization."""
|
||||
# Written by Marc-Andr Lemburg; revised by Jim Hugunin and Fredrik Lundh.
|
||||
lineno = frame.f_lineno
|
||||
code = frame.f_code
|
||||
if hasattr(code, 'co_lnotab'):
|
||||
table = code.co_lnotab
|
||||
lineno = code.co_firstlineno
|
||||
addr = 0
|
||||
for i in range(0, len(table), 2):
|
||||
addr = addr + ord(table[i])
|
||||
if addr > frame.f_lasti: break
|
||||
lineno = lineno + ord(table[i + 1])
|
||||
return lineno
|
||||
|
||||
def getouterframes(frame, context=1):
|
||||
"""Get a list of records for a frame and all higher (calling) frames.
|
||||
|
||||
Each record contains a frame object, filename, line number, function
|
||||
name, a list of lines of context, and index within the context."""
|
||||
framelist = []
|
||||
while frame:
|
||||
framelist.append((frame,) + getframeinfo(frame, context))
|
||||
frame = frame.f_back
|
||||
return framelist
|
||||
|
||||
def getinnerframes(tb, context=1):
|
||||
"""Get a list of records for a traceback's frame and all lower frames.
|
||||
|
||||
Each record contains a frame object, filename, line number, function
|
||||
name, a list of lines of context, and index within the context."""
|
||||
framelist = []
|
||||
while tb:
|
||||
framelist.append((tb.tb_frame,) + getframeinfo(tb, context))
|
||||
tb = tb.tb_next
|
||||
return framelist
|
||||
|
||||
def currentframe():
|
||||
"""Return the frame object for the caller's stack frame."""
|
||||
try:
|
||||
raise 'catch me'
|
||||
except:
|
||||
return sys.exc_traceback.tb_frame.f_back #@UndefinedVariable
|
||||
|
||||
if hasattr(sys, '_getframe'): currentframe = sys._getframe
|
||||
|
||||
def stack(context=1):
|
||||
"""Return a list of records for the stack above the caller's frame."""
|
||||
return getouterframes(currentframe().f_back, context)
|
||||
|
||||
def trace(context=1):
|
||||
"""Return a list of records for the stack below the current exception."""
|
||||
return getinnerframes(sys.exc_traceback, context) #@UndefinedVariable
|
||||
591
ptvsd/pydevd/_pydev_imps/_pydev_pkgutil_old.py
Normal file
591
ptvsd/pydevd/_pydev_imps/_pydev_pkgutil_old.py
Normal file
|
|
@ -0,0 +1,591 @@
|
|||
"""Utilities to support packages."""
|
||||
|
||||
# NOTE: This module must remain compatible with Python 2.3, as it is shared
|
||||
# by setuptools for distribution with Python 2.3 and up.
|
||||
|
||||
import os
|
||||
import sys
|
||||
import imp
|
||||
import os.path
|
||||
from types import ModuleType
|
||||
|
||||
__all__ = [
|
||||
'get_importer', 'iter_importers', 'get_loader', 'find_loader',
|
||||
'walk_packages', 'iter_modules', 'get_data',
|
||||
'ImpImporter', 'ImpLoader', 'read_code', 'extend_path',
|
||||
]
|
||||
|
||||
def read_code(stream):
|
||||
# This helper is needed in order for the PEP 302 emulation to
|
||||
# correctly handle compiled files
|
||||
import marshal
|
||||
|
||||
magic = stream.read(4)
|
||||
if magic != imp.get_magic():
|
||||
return None
|
||||
|
||||
stream.read(4) # Skip timestamp
|
||||
return marshal.load(stream)
|
||||
|
||||
|
||||
def simplegeneric(func):
|
||||
"""Make a trivial single-dispatch generic function"""
|
||||
registry = {}
|
||||
def wrapper(*args, **kw):
|
||||
ob = args[0]
|
||||
try:
|
||||
cls = ob.__class__
|
||||
except AttributeError:
|
||||
cls = type(ob)
|
||||
try:
|
||||
mro = cls.__mro__
|
||||
except AttributeError:
|
||||
try:
|
||||
class cls(cls, object):
|
||||
pass
|
||||
mro = cls.__mro__[1:]
|
||||
except TypeError:
|
||||
mro = object, # must be an ExtensionClass or some such :(
|
||||
for t in mro:
|
||||
if t in registry:
|
||||
return registry[t](*args, **kw)
|
||||
else:
|
||||
return func(*args, **kw)
|
||||
try:
|
||||
wrapper.__name__ = func.__name__
|
||||
except (TypeError, AttributeError):
|
||||
pass # Python 2.3 doesn't allow functions to be renamed
|
||||
|
||||
def register(typ, func=None):
|
||||
if func is None:
|
||||
return lambda f: register(typ, f)
|
||||
registry[typ] = func
|
||||
return func
|
||||
|
||||
wrapper.__dict__ = func.__dict__
|
||||
wrapper.__doc__ = func.__doc__
|
||||
wrapper.register = register
|
||||
return wrapper
|
||||
|
||||
|
||||
def walk_packages(path=None, prefix='', onerror=None):
|
||||
"""Yields (module_loader, name, ispkg) for all modules recursively
|
||||
on path, or, if path is None, all accessible modules.
|
||||
|
||||
'path' should be either None or a list of paths to look for
|
||||
modules in.
|
||||
|
||||
'prefix' is a string to output on the front of every module name
|
||||
on output.
|
||||
|
||||
Note that this function must import all *packages* (NOT all
|
||||
modules!) on the given path, in order to access the __path__
|
||||
attribute to find submodules.
|
||||
|
||||
'onerror' is a function which gets called with one argument (the
|
||||
name of the package which was being imported) if any exception
|
||||
occurs while trying to import a package. If no onerror function is
|
||||
supplied, ImportErrors are caught and ignored, while all other
|
||||
exceptions are propagated, terminating the search.
|
||||
|
||||
Examples:
|
||||
|
||||
# list all modules python can access
|
||||
walk_packages()
|
||||
|
||||
# list all submodules of ctypes
|
||||
walk_packages(ctypes.__path__, ctypes.__name__+'.')
|
||||
"""
|
||||
|
||||
def seen(p, m={}):
|
||||
if p in m:
|
||||
return True
|
||||
m[p] = True
|
||||
|
||||
for importer, name, ispkg in iter_modules(path, prefix):
|
||||
yield importer, name, ispkg
|
||||
|
||||
if ispkg:
|
||||
try:
|
||||
__import__(name)
|
||||
except ImportError:
|
||||
if onerror is not None:
|
||||
onerror(name)
|
||||
except Exception:
|
||||
if onerror is not None:
|
||||
onerror(name)
|
||||
else:
|
||||
raise
|
||||
else:
|
||||
path = getattr(sys.modules[name], '__path__', None) or []
|
||||
|
||||
# don't traverse path items we've seen before
|
||||
path = [p for p in path if not seen(p)]
|
||||
|
||||
for item in walk_packages(path, name+'.', onerror):
|
||||
yield item
|
||||
|
||||
|
||||
def iter_modules(path=None, prefix=''):
|
||||
"""Yields (module_loader, name, ispkg) for all submodules on path,
|
||||
or, if path is None, all top-level modules on sys.path.
|
||||
|
||||
'path' should be either None or a list of paths to look for
|
||||
modules in.
|
||||
|
||||
'prefix' is a string to output on the front of every module name
|
||||
on output.
|
||||
"""
|
||||
|
||||
if path is None:
|
||||
importers = iter_importers()
|
||||
else:
|
||||
importers = map(get_importer, path)
|
||||
|
||||
yielded = {}
|
||||
for i in importers:
|
||||
for name, ispkg in iter_importer_modules(i, prefix):
|
||||
if name not in yielded:
|
||||
yielded[name] = 1
|
||||
yield i, name, ispkg
|
||||
|
||||
|
||||
#@simplegeneric
|
||||
def iter_importer_modules(importer, prefix=''):
|
||||
if not hasattr(importer, 'iter_modules'):
|
||||
return []
|
||||
return importer.iter_modules(prefix)
|
||||
|
||||
iter_importer_modules = simplegeneric(iter_importer_modules)
|
||||
|
||||
|
||||
class ImpImporter:
|
||||
"""PEP 302 Importer that wraps Python's "classic" import algorithm
|
||||
|
||||
ImpImporter(dirname) produces a PEP 302 importer that searches that
|
||||
directory. ImpImporter(None) produces a PEP 302 importer that searches
|
||||
the current sys.path, plus any modules that are frozen or built-in.
|
||||
|
||||
Note that ImpImporter does not currently support being used by placement
|
||||
on sys.meta_path.
|
||||
"""
|
||||
|
||||
def __init__(self, path=None):
|
||||
self.path = path
|
||||
|
||||
def find_module(self, fullname, path=None):
|
||||
# Note: we ignore 'path' argument since it is only used via meta_path
|
||||
subname = fullname.split(".")[-1]
|
||||
if subname != fullname and self.path is None:
|
||||
return None
|
||||
if self.path is None:
|
||||
path = None
|
||||
else:
|
||||
path = [os.path.realpath(self.path)]
|
||||
try:
|
||||
file, filename, etc = imp.find_module(subname, path)
|
||||
except ImportError:
|
||||
return None
|
||||
return ImpLoader(fullname, file, filename, etc)
|
||||
|
||||
def iter_modules(self, prefix=''):
|
||||
if self.path is None or not os.path.isdir(self.path):
|
||||
return
|
||||
|
||||
yielded = {}
|
||||
import inspect
|
||||
try:
|
||||
filenames = os.listdir(self.path)
|
||||
except OSError:
|
||||
# ignore unreadable directories like import does
|
||||
filenames = []
|
||||
filenames.sort() # handle packages before same-named modules
|
||||
|
||||
for fn in filenames:
|
||||
modname = inspect.getmodulename(fn)
|
||||
if modname=='__init__' or modname in yielded:
|
||||
continue
|
||||
|
||||
path = os.path.join(self.path, fn)
|
||||
ispkg = False
|
||||
|
||||
if not modname and os.path.isdir(path) and '.' not in fn:
|
||||
modname = fn
|
||||
try:
|
||||
dircontents = os.listdir(path)
|
||||
except OSError:
|
||||
# ignore unreadable directories like import does
|
||||
dircontents = []
|
||||
for fn in dircontents:
|
||||
subname = inspect.getmodulename(fn)
|
||||
if subname=='__init__':
|
||||
ispkg = True
|
||||
break
|
||||
else:
|
||||
continue # not a package
|
||||
|
||||
if modname and '.' not in modname:
|
||||
yielded[modname] = 1
|
||||
yield prefix + modname, ispkg
|
||||
|
||||
|
||||
class ImpLoader:
|
||||
"""PEP 302 Loader that wraps Python's "classic" import algorithm
|
||||
"""
|
||||
code = source = None
|
||||
|
||||
def __init__(self, fullname, file, filename, etc):
|
||||
self.file = file
|
||||
self.filename = filename
|
||||
self.fullname = fullname
|
||||
self.etc = etc
|
||||
|
||||
def load_module(self, fullname):
|
||||
self._reopen()
|
||||
try:
|
||||
mod = imp.load_module(fullname, self.file, self.filename, self.etc)
|
||||
finally:
|
||||
if self.file:
|
||||
self.file.close()
|
||||
# Note: we don't set __loader__ because we want the module to look
|
||||
# normal; i.e. this is just a wrapper for standard import machinery
|
||||
return mod
|
||||
|
||||
def get_data(self, pathname):
|
||||
return open(pathname, "rb").read()
|
||||
|
||||
def _reopen(self):
|
||||
if self.file and self.file.closed:
|
||||
mod_type = self.etc[2]
|
||||
if mod_type==imp.PY_SOURCE:
|
||||
self.file = open(self.filename, 'rU')
|
||||
elif mod_type in (imp.PY_COMPILED, imp.C_EXTENSION):
|
||||
self.file = open(self.filename, 'rb')
|
||||
|
||||
def _fix_name(self, fullname):
|
||||
if fullname is None:
|
||||
fullname = self.fullname
|
||||
elif fullname != self.fullname:
|
||||
raise ImportError("Loader for module %s cannot handle "
|
||||
"module %s" % (self.fullname, fullname))
|
||||
return fullname
|
||||
|
||||
def is_package(self, fullname):
|
||||
fullname = self._fix_name(fullname)
|
||||
return self.etc[2]==imp.PKG_DIRECTORY
|
||||
|
||||
def get_code(self, fullname=None):
|
||||
fullname = self._fix_name(fullname)
|
||||
if self.code is None:
|
||||
mod_type = self.etc[2]
|
||||
if mod_type==imp.PY_SOURCE:
|
||||
source = self.get_source(fullname)
|
||||
self.code = compile(source, self.filename, 'exec')
|
||||
elif mod_type==imp.PY_COMPILED:
|
||||
self._reopen()
|
||||
try:
|
||||
self.code = read_code(self.file)
|
||||
finally:
|
||||
self.file.close()
|
||||
elif mod_type==imp.PKG_DIRECTORY:
|
||||
self.code = self._get_delegate().get_code()
|
||||
return self.code
|
||||
|
||||
def get_source(self, fullname=None):
|
||||
fullname = self._fix_name(fullname)
|
||||
if self.source is None:
|
||||
mod_type = self.etc[2]
|
||||
if mod_type==imp.PY_SOURCE:
|
||||
self._reopen()
|
||||
try:
|
||||
self.source = self.file.read()
|
||||
finally:
|
||||
self.file.close()
|
||||
elif mod_type==imp.PY_COMPILED:
|
||||
if os.path.exists(self.filename[:-1]):
|
||||
f = open(self.filename[:-1], 'rU')
|
||||
self.source = f.read()
|
||||
f.close()
|
||||
elif mod_type==imp.PKG_DIRECTORY:
|
||||
self.source = self._get_delegate().get_source()
|
||||
return self.source
|
||||
|
||||
|
||||
def _get_delegate(self):
|
||||
return ImpImporter(self.filename).find_module('__init__')
|
||||
|
||||
def get_filename(self, fullname=None):
|
||||
fullname = self._fix_name(fullname)
|
||||
mod_type = self.etc[2]
|
||||
if self.etc[2]==imp.PKG_DIRECTORY:
|
||||
return self._get_delegate().get_filename()
|
||||
elif self.etc[2] in (imp.PY_SOURCE, imp.PY_COMPILED, imp.C_EXTENSION):
|
||||
return self.filename
|
||||
return None
|
||||
|
||||
|
||||
try:
|
||||
import zipimport
|
||||
from zipimport import zipimporter
|
||||
|
||||
def iter_zipimport_modules(importer, prefix=''):
|
||||
dirlist = zipimport._zip_directory_cache[importer.archive].keys()
|
||||
dirlist.sort()
|
||||
_prefix = importer.prefix
|
||||
plen = len(_prefix)
|
||||
yielded = {}
|
||||
import inspect
|
||||
for fn in dirlist:
|
||||
if not fn.startswith(_prefix):
|
||||
continue
|
||||
|
||||
fn = fn[plen:].split(os.sep)
|
||||
|
||||
if len(fn)==2 and fn[1].startswith('__init__.py'):
|
||||
if fn[0] not in yielded:
|
||||
yielded[fn[0]] = 1
|
||||
yield fn[0], True
|
||||
|
||||
if len(fn)!=1:
|
||||
continue
|
||||
|
||||
modname = inspect.getmodulename(fn[0])
|
||||
if modname=='__init__':
|
||||
continue
|
||||
|
||||
if modname and '.' not in modname and modname not in yielded:
|
||||
yielded[modname] = 1
|
||||
yield prefix + modname, False
|
||||
|
||||
iter_importer_modules.register(zipimporter, iter_zipimport_modules)
|
||||
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
|
||||
def get_importer(path_item):
|
||||
"""Retrieve a PEP 302 importer for the given path item
|
||||
|
||||
The returned importer is cached in sys.path_importer_cache
|
||||
if it was newly created by a path hook.
|
||||
|
||||
If there is no importer, a wrapper around the basic import
|
||||
machinery is returned. This wrapper is never inserted into
|
||||
the importer cache (None is inserted instead).
|
||||
|
||||
The cache (or part of it) can be cleared manually if a
|
||||
rescan of sys.path_hooks is necessary.
|
||||
"""
|
||||
try:
|
||||
importer = sys.path_importer_cache[path_item]
|
||||
except KeyError:
|
||||
for path_hook in sys.path_hooks:
|
||||
try:
|
||||
importer = path_hook(path_item)
|
||||
break
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
importer = None
|
||||
sys.path_importer_cache.setdefault(path_item, importer)
|
||||
|
||||
if importer is None:
|
||||
try:
|
||||
importer = ImpImporter(path_item)
|
||||
except ImportError:
|
||||
importer = None
|
||||
return importer
|
||||
|
||||
|
||||
def iter_importers(fullname=""):
|
||||
"""Yield PEP 302 importers for the given module name
|
||||
|
||||
If fullname contains a '.', the importers will be for the package
|
||||
containing fullname, otherwise they will be importers for sys.meta_path,
|
||||
sys.path, and Python's "classic" import machinery, in that order. If
|
||||
the named module is in a package, that package is imported as a side
|
||||
effect of invoking this function.
|
||||
|
||||
Non PEP 302 mechanisms (e.g. the Windows registry) used by the
|
||||
standard import machinery to find files in alternative locations
|
||||
are partially supported, but are searched AFTER sys.path. Normally,
|
||||
these locations are searched BEFORE sys.path, preventing sys.path
|
||||
entries from shadowing them.
|
||||
|
||||
For this to cause a visible difference in behaviour, there must
|
||||
be a module or package name that is accessible via both sys.path
|
||||
and one of the non PEP 302 file system mechanisms. In this case,
|
||||
the emulation will find the former version, while the builtin
|
||||
import mechanism will find the latter.
|
||||
|
||||
Items of the following types can be affected by this discrepancy:
|
||||
imp.C_EXTENSION, imp.PY_SOURCE, imp.PY_COMPILED, imp.PKG_DIRECTORY
|
||||
"""
|
||||
if fullname.startswith('.'):
|
||||
raise ImportError("Relative module names not supported")
|
||||
if '.' in fullname:
|
||||
# Get the containing package's __path__
|
||||
pkg = '.'.join(fullname.split('.')[:-1])
|
||||
if pkg not in sys.modules:
|
||||
__import__(pkg)
|
||||
path = getattr(sys.modules[pkg], '__path__', None) or []
|
||||
else:
|
||||
for importer in sys.meta_path:
|
||||
yield importer
|
||||
path = sys.path
|
||||
for item in path:
|
||||
yield get_importer(item)
|
||||
if '.' not in fullname:
|
||||
yield ImpImporter()
|
||||
|
||||
def get_loader(module_or_name):
|
||||
"""Get a PEP 302 "loader" object for module_or_name
|
||||
|
||||
If the module or package is accessible via the normal import
|
||||
mechanism, a wrapper around the relevant part of that machinery
|
||||
is returned. Returns None if the module cannot be found or imported.
|
||||
If the named module is not already imported, its containing package
|
||||
(if any) is imported, in order to establish the package __path__.
|
||||
|
||||
This function uses iter_importers(), and is thus subject to the same
|
||||
limitations regarding platform-specific special import locations such
|
||||
as the Windows registry.
|
||||
"""
|
||||
if module_or_name in sys.modules:
|
||||
module_or_name = sys.modules[module_or_name]
|
||||
if isinstance(module_or_name, ModuleType):
|
||||
module = module_or_name
|
||||
loader = getattr(module, '__loader__', None)
|
||||
if loader is not None:
|
||||
return loader
|
||||
fullname = module.__name__
|
||||
else:
|
||||
fullname = module_or_name
|
||||
return find_loader(fullname)
|
||||
|
||||
def find_loader(fullname):
|
||||
"""Find a PEP 302 "loader" object for fullname
|
||||
|
||||
If fullname contains dots, path must be the containing package's __path__.
|
||||
Returns None if the module cannot be found or imported. This function uses
|
||||
iter_importers(), and is thus subject to the same limitations regarding
|
||||
platform-specific special import locations such as the Windows registry.
|
||||
"""
|
||||
for importer in iter_importers(fullname):
|
||||
loader = importer.find_module(fullname)
|
||||
if loader is not None:
|
||||
return loader
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def extend_path(path, name):
|
||||
"""Extend a package's path.
|
||||
|
||||
Intended use is to place the following code in a package's __init__.py:
|
||||
|
||||
from pkgutil import extend_path
|
||||
__path__ = extend_path(__path__, __name__)
|
||||
|
||||
This will add to the package's __path__ all subdirectories of
|
||||
directories on sys.path named after the package. This is useful
|
||||
if one wants to distribute different parts of a single logical
|
||||
package as multiple directories.
|
||||
|
||||
It also looks for *.pkg files beginning where * matches the name
|
||||
argument. This feature is similar to *.pth files (see site.py),
|
||||
except that it doesn't special-case lines starting with 'import'.
|
||||
A *.pkg file is trusted at face value: apart from checking for
|
||||
duplicates, all entries found in a *.pkg file are added to the
|
||||
path, regardless of whether they are exist the filesystem. (This
|
||||
is a feature.)
|
||||
|
||||
If the input path is not a list (as is the case for frozen
|
||||
packages) it is returned unchanged. The input path is not
|
||||
modified; an extended copy is returned. Items are only appended
|
||||
to the copy at the end.
|
||||
|
||||
It is assumed that sys.path is a sequence. Items of sys.path that
|
||||
are not (unicode or 8-bit) strings referring to existing
|
||||
directories are ignored. Unicode items of sys.path that cause
|
||||
errors when used as filenames may cause this function to raise an
|
||||
exception (in line with os.path.isdir() behavior).
|
||||
"""
|
||||
|
||||
if not isinstance(path, list):
|
||||
# This could happen e.g. when this is called from inside a
|
||||
# frozen package. Return the path unchanged in that case.
|
||||
return path
|
||||
|
||||
pname = os.path.join(*name.split('.')) # Reconstitute as relative path
|
||||
# Just in case os.extsep != '.'
|
||||
sname = os.extsep.join(name.split('.'))
|
||||
sname_pkg = sname + os.extsep + "pkg"
|
||||
init_py = "__init__" + os.extsep + "py"
|
||||
|
||||
path = path[:] # Start with a copy of the existing path
|
||||
|
||||
for dir in sys.path:
|
||||
if not isinstance(dir, basestring) or not os.path.isdir(dir):
|
||||
continue
|
||||
subdir = os.path.join(dir, pname)
|
||||
# XXX This may still add duplicate entries to path on
|
||||
# case-insensitive filesystems
|
||||
initfile = os.path.join(subdir, init_py)
|
||||
if subdir not in path and os.path.isfile(initfile):
|
||||
path.append(subdir)
|
||||
# XXX Is this the right thing for subpackages like zope.app?
|
||||
# It looks for a file named "zope.app.pkg"
|
||||
pkgfile = os.path.join(dir, sname_pkg)
|
||||
if os.path.isfile(pkgfile):
|
||||
try:
|
||||
f = open(pkgfile)
|
||||
except IOError, msg:
|
||||
sys.stderr.write("Can't open %s: %s\n" %
|
||||
(pkgfile, msg))
|
||||
else:
|
||||
for line in f:
|
||||
line = line.rstrip('\n')
|
||||
if not line or line.startswith('#'):
|
||||
continue
|
||||
path.append(line) # Don't check for existence!
|
||||
f.close()
|
||||
|
||||
return path
|
||||
|
||||
def get_data(package, resource):
|
||||
"""Get a resource from a package.
|
||||
|
||||
This is a wrapper round the PEP 302 loader get_data API. The package
|
||||
argument should be the name of a package, in standard module format
|
||||
(foo.bar). The resource argument should be in the form of a relative
|
||||
filename, using '/' as the path separator. The parent directory name '..'
|
||||
is not allowed, and nor is a rooted name (starting with a '/').
|
||||
|
||||
The function returns a binary string, which is the contents of the
|
||||
specified resource.
|
||||
|
||||
For packages located in the filesystem, which have already been imported,
|
||||
this is the rough equivalent of
|
||||
|
||||
d = os.path.dirname(sys.modules[package].__file__)
|
||||
data = open(os.path.join(d, resource), 'rb').read()
|
||||
|
||||
If the package cannot be located or loaded, or it uses a PEP 302 loader
|
||||
which does not support get_data(), then None is returned.
|
||||
"""
|
||||
|
||||
loader = get_loader(package)
|
||||
if loader is None or not hasattr(loader, 'get_data'):
|
||||
return None
|
||||
mod = sys.modules.get(package) or loader.load_module(package)
|
||||
if mod is None or not hasattr(mod, '__file__'):
|
||||
return None
|
||||
|
||||
# Modify the resource name to be compatible with the loader.get_data
|
||||
# signature - an os.path format "filename" starting with the dirname of
|
||||
# the package's __file__
|
||||
parts = resource.split('/')
|
||||
parts.insert(0, os.path.dirname(mod.__file__))
|
||||
resource_name = os.path.join(*parts)
|
||||
return loader.get_data(resource_name)
|
||||
23
ptvsd/pydevd/_pydev_imps/_pydev_saved_modules.py
Normal file
23
ptvsd/pydevd/_pydev_imps/_pydev_saved_modules.py
Normal file
|
|
@ -0,0 +1,23 @@
|
|||
import sys
|
||||
IS_PY2 = sys.version_info < (3,)
|
||||
|
||||
import threading
|
||||
|
||||
import time
|
||||
|
||||
import socket
|
||||
|
||||
import select
|
||||
|
||||
if IS_PY2:
|
||||
import thread
|
||||
import Queue as _queue
|
||||
import xmlrpclib
|
||||
import SimpleXMLRPCServer as _pydev_SimpleXMLRPCServer
|
||||
import BaseHTTPServer
|
||||
else:
|
||||
import _thread as thread
|
||||
import queue as _queue
|
||||
import xmlrpc.client as xmlrpclib
|
||||
import xmlrpc.server as _pydev_SimpleXMLRPCServer
|
||||
import http.server as BaseHTTPServer
|
||||
75
ptvsd/pydevd/_pydev_imps/_pydev_sys_patch.py
Normal file
75
ptvsd/pydevd/_pydev_imps/_pydev_sys_patch.py
Normal file
|
|
@ -0,0 +1,75 @@
|
|||
|
||||
import sys
|
||||
|
||||
|
||||
def patch_sys_module():
|
||||
def patched_exc_info(fun):
|
||||
def pydev_debugger_exc_info():
|
||||
type, value, traceback = fun()
|
||||
if type == ImportError:
|
||||
#we should not show frame added by plugin_import call
|
||||
if traceback and hasattr(traceback, "tb_next"):
|
||||
return type, value, traceback.tb_next
|
||||
return type, value, traceback
|
||||
return pydev_debugger_exc_info
|
||||
|
||||
system_exc_info = sys.exc_info
|
||||
sys.exc_info = patched_exc_info(system_exc_info)
|
||||
if not hasattr(sys, "system_exc_info"):
|
||||
sys.system_exc_info = system_exc_info
|
||||
|
||||
|
||||
def patched_reload(orig_reload):
|
||||
def pydev_debugger_reload(module):
|
||||
orig_reload(module)
|
||||
if module.__name__ == "sys":
|
||||
# if sys module was reloaded we should patch it again
|
||||
patch_sys_module()
|
||||
return pydev_debugger_reload
|
||||
|
||||
|
||||
def patch_reload():
|
||||
if sys.version_info[0] >= 3:
|
||||
import builtins # Py3
|
||||
else:
|
||||
import __builtin__ as builtins
|
||||
|
||||
if hasattr(builtins, "reload"):
|
||||
sys.builtin_orig_reload = builtins.reload
|
||||
builtins.reload = patched_reload(sys.builtin_orig_reload) # @UndefinedVariable
|
||||
try:
|
||||
import imp
|
||||
sys.imp_orig_reload = imp.reload
|
||||
imp.reload = patched_reload(sys.imp_orig_reload) # @UndefinedVariable
|
||||
except:
|
||||
pass
|
||||
else:
|
||||
try:
|
||||
import importlib
|
||||
sys.importlib_orig_reload = importlib.reload # @UndefinedVariable
|
||||
importlib.reload = patched_reload(sys.importlib_orig_reload) # @UndefinedVariable
|
||||
except:
|
||||
pass
|
||||
|
||||
del builtins
|
||||
|
||||
|
||||
def cancel_patches_in_sys_module():
|
||||
sys.exc_info = sys.system_exc_info # @UndefinedVariable
|
||||
if sys.version_info[0] >= 3:
|
||||
import builtins # Py3
|
||||
else:
|
||||
import __builtin__ as builtins
|
||||
|
||||
if hasattr(sys, "builtin_orig_reload"):
|
||||
builtins.reload = sys.builtin_orig_reload
|
||||
|
||||
if hasattr(sys, "imp_orig_reload"):
|
||||
import imp
|
||||
imp.reload = sys.imp_orig_reload
|
||||
|
||||
if hasattr(sys, "importlib_orig_reload"):
|
||||
import importlib
|
||||
importlib.reload = sys.importlib_orig_reload
|
||||
|
||||
del builtins
|
||||
541
ptvsd/pydevd/_pydev_imps/_pydev_uuid_old.py
Normal file
541
ptvsd/pydevd/_pydev_imps/_pydev_uuid_old.py
Normal file
|
|
@ -0,0 +1,541 @@
|
|||
r"""UUID objects (universally unique identifiers) according to RFC 4122.
|
||||
|
||||
This module provides immutable UUID objects (class UUID) and the functions
|
||||
uuid1(), uuid3(), uuid4(), uuid5() for generating version 1, 3, 4, and 5
|
||||
UUIDs as specified in RFC 4122.
|
||||
|
||||
If all you want is a unique ID, you should probably call uuid1() or uuid4().
|
||||
Note that uuid1() may compromise privacy since it creates a UUID containing
|
||||
the computer's network address. uuid4() creates a random UUID.
|
||||
|
||||
Typical usage:
|
||||
|
||||
>>> import uuid
|
||||
|
||||
# make a UUID based on the host ID and current time
|
||||
>>> uuid.uuid1()
|
||||
UUID('a8098c1a-f86e-11da-bd1a-00112444be1e')
|
||||
|
||||
# make a UUID using an MD5 hash of a namespace UUID and a name
|
||||
>>> uuid.uuid3(uuid.NAMESPACE_DNS, 'python.org')
|
||||
UUID('6fa459ea-ee8a-3ca4-894e-db77e160355e')
|
||||
|
||||
# make a random UUID
|
||||
>>> uuid.uuid4()
|
||||
UUID('16fd2706-8baf-433b-82eb-8c7fada847da')
|
||||
|
||||
# make a UUID using a SHA-1 hash of a namespace UUID and a name
|
||||
>>> uuid.uuid5(uuid.NAMESPACE_DNS, 'python.org')
|
||||
UUID('886313e1-3b8a-5372-9b90-0c9aee199e5d')
|
||||
|
||||
# make a UUID from a string of hex digits (braces and hyphens ignored)
|
||||
>>> x = uuid.UUID('{00010203-0405-0607-0809-0a0b0c0d0e0f}')
|
||||
|
||||
# convert a UUID to a string of hex digits in standard form
|
||||
>>> str(x)
|
||||
'00010203-0405-0607-0809-0a0b0c0d0e0f'
|
||||
|
||||
# get the raw 16 bytes of the UUID
|
||||
>>> x.bytes
|
||||
'\x00\x01\x02\x03\x04\x05\x06\x07\x08\t\n\x0b\x0c\r\x0e\x0f'
|
||||
|
||||
# make a UUID from a 16-byte string
|
||||
>>> uuid.UUID(bytes=x.bytes)
|
||||
UUID('00010203-0405-0607-0809-0a0b0c0d0e0f')
|
||||
"""
|
||||
|
||||
__author__ = 'Ka-Ping Yee <ping@zesty.ca>'
|
||||
|
||||
RESERVED_NCS, RFC_4122, RESERVED_MICROSOFT, RESERVED_FUTURE = [
|
||||
'reserved for NCS compatibility', 'specified in RFC 4122',
|
||||
'reserved for Microsoft compatibility', 'reserved for future definition']
|
||||
|
||||
class UUID(object):
|
||||
"""Instances of the UUID class represent UUIDs as specified in RFC 4122.
|
||||
UUID objects are immutable, hashable, and usable as dictionary keys.
|
||||
Converting a UUID to a string with str() yields something in the form
|
||||
'12345678-1234-1234-1234-123456789abc'. The UUID constructor accepts
|
||||
five possible forms: a similar string of hexadecimal digits, or a tuple
|
||||
of six integer fields (with 32-bit, 16-bit, 16-bit, 8-bit, 8-bit, and
|
||||
48-bit values respectively) as an argument named 'fields', or a string
|
||||
of 16 bytes (with all the integer fields in big-endian order) as an
|
||||
argument named 'bytes', or a string of 16 bytes (with the first three
|
||||
fields in little-endian order) as an argument named 'bytes_le', or a
|
||||
single 128-bit integer as an argument named 'int'.
|
||||
|
||||
UUIDs have these read-only attributes:
|
||||
|
||||
bytes the UUID as a 16-byte string (containing the six
|
||||
integer fields in big-endian byte order)
|
||||
|
||||
bytes_le the UUID as a 16-byte string (with time_low, time_mid,
|
||||
and time_hi_version in little-endian byte order)
|
||||
|
||||
fields a tuple of the six integer fields of the UUID,
|
||||
which are also available as six individual attributes
|
||||
and two derived attributes:
|
||||
|
||||
time_low the first 32 bits of the UUID
|
||||
time_mid the next 16 bits of the UUID
|
||||
time_hi_version the next 16 bits of the UUID
|
||||
clock_seq_hi_variant the next 8 bits of the UUID
|
||||
clock_seq_low the next 8 bits of the UUID
|
||||
node the last 48 bits of the UUID
|
||||
|
||||
time the 60-bit timestamp
|
||||
clock_seq the 14-bit sequence number
|
||||
|
||||
hex the UUID as a 32-character hexadecimal string
|
||||
|
||||
int the UUID as a 128-bit integer
|
||||
|
||||
urn the UUID as a URN as specified in RFC 4122
|
||||
|
||||
variant the UUID variant (one of the constants RESERVED_NCS,
|
||||
RFC_4122, RESERVED_MICROSOFT, or RESERVED_FUTURE)
|
||||
|
||||
version the UUID version number (1 through 5, meaningful only
|
||||
when the variant is RFC_4122)
|
||||
"""
|
||||
|
||||
def __init__(self, hex=None, bytes=None, bytes_le=None, fields=None,
|
||||
int=None, version=None):
|
||||
r"""Create a UUID from either a string of 32 hexadecimal digits,
|
||||
a string of 16 bytes as the 'bytes' argument, a string of 16 bytes
|
||||
in little-endian order as the 'bytes_le' argument, a tuple of six
|
||||
integers (32-bit time_low, 16-bit time_mid, 16-bit time_hi_version,
|
||||
8-bit clock_seq_hi_variant, 8-bit clock_seq_low, 48-bit node) as
|
||||
the 'fields' argument, or a single 128-bit integer as the 'int'
|
||||
argument. When a string of hex digits is given, curly braces,
|
||||
hyphens, and a URN prefix are all optional. For example, these
|
||||
expressions all yield the same UUID:
|
||||
|
||||
UUID('{12345678-1234-5678-1234-567812345678}')
|
||||
UUID('12345678123456781234567812345678')
|
||||
UUID('urn:uuid:12345678-1234-5678-1234-567812345678')
|
||||
UUID(bytes='\x12\x34\x56\x78'*4)
|
||||
UUID(bytes_le='\x78\x56\x34\x12\x34\x12\x78\x56' +
|
||||
'\x12\x34\x56\x78\x12\x34\x56\x78')
|
||||
UUID(fields=(0x12345678, 0x1234, 0x5678, 0x12, 0x34, 0x567812345678))
|
||||
UUID(int=0x12345678123456781234567812345678)
|
||||
|
||||
Exactly one of 'hex', 'bytes', 'bytes_le', 'fields', or 'int' must
|
||||
be given. The 'version' argument is optional; if given, the resulting
|
||||
UUID will have its variant and version set according to RFC 4122,
|
||||
overriding the given 'hex', 'bytes', 'bytes_le', 'fields', or 'int'.
|
||||
"""
|
||||
|
||||
if [hex, bytes, bytes_le, fields, int].count(None) != 4:
|
||||
raise TypeError('need one of hex, bytes, bytes_le, fields, or int')
|
||||
if hex is not None:
|
||||
hex = hex.replace('urn:', '').replace('uuid:', '')
|
||||
hex = hex.strip('{}').replace('-', '')
|
||||
if len(hex) != 32:
|
||||
raise ValueError('badly formed hexadecimal UUID string')
|
||||
int = long(hex, 16)
|
||||
if bytes_le is not None:
|
||||
if len(bytes_le) != 16:
|
||||
raise ValueError('bytes_le is not a 16-char string')
|
||||
bytes = (bytes_le[3] + bytes_le[2] + bytes_le[1] + bytes_le[0] +
|
||||
bytes_le[5] + bytes_le[4] + bytes_le[7] + bytes_le[6] +
|
||||
bytes_le[8:])
|
||||
if bytes is not None:
|
||||
if len(bytes) != 16:
|
||||
raise ValueError('bytes is not a 16-char string')
|
||||
int = long(('%02x'*16) % tuple(map(ord, bytes)), 16)
|
||||
if fields is not None:
|
||||
if len(fields) != 6:
|
||||
raise ValueError('fields is not a 6-tuple')
|
||||
(time_low, time_mid, time_hi_version,
|
||||
clock_seq_hi_variant, clock_seq_low, node) = fields
|
||||
if not 0 <= time_low < 1<<32L:
|
||||
raise ValueError('field 1 out of range (need a 32-bit value)')
|
||||
if not 0 <= time_mid < 1<<16L:
|
||||
raise ValueError('field 2 out of range (need a 16-bit value)')
|
||||
if not 0 <= time_hi_version < 1<<16L:
|
||||
raise ValueError('field 3 out of range (need a 16-bit value)')
|
||||
if not 0 <= clock_seq_hi_variant < 1<<8L:
|
||||
raise ValueError('field 4 out of range (need an 8-bit value)')
|
||||
if not 0 <= clock_seq_low < 1<<8L:
|
||||
raise ValueError('field 5 out of range (need an 8-bit value)')
|
||||
if not 0 <= node < 1<<48L:
|
||||
raise ValueError('field 6 out of range (need a 48-bit value)')
|
||||
clock_seq = (clock_seq_hi_variant << 8L) | clock_seq_low
|
||||
int = ((time_low << 96L) | (time_mid << 80L) |
|
||||
(time_hi_version << 64L) | (clock_seq << 48L) | node)
|
||||
if int is not None:
|
||||
if not 0 <= int < 1<<128L:
|
||||
raise ValueError('int is out of range (need a 128-bit value)')
|
||||
if version is not None:
|
||||
if not 1 <= version <= 5:
|
||||
raise ValueError('illegal version number')
|
||||
# Set the variant to RFC 4122.
|
||||
int &= ~(0xc000 << 48L)
|
||||
int |= 0x8000 << 48L
|
||||
# Set the version number.
|
||||
int &= ~(0xf000 << 64L)
|
||||
int |= version << 76L
|
||||
self.__dict__['int'] = int
|
||||
|
||||
def __cmp__(self, other):
|
||||
if isinstance(other, UUID):
|
||||
return cmp(self.int, other.int)
|
||||
return NotImplemented
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self.int)
|
||||
|
||||
def __int__(self):
|
||||
return self.int
|
||||
|
||||
def __repr__(self):
|
||||
return 'UUID(%r)' % str(self)
|
||||
|
||||
def __setattr__(self, name, value):
|
||||
raise TypeError('UUID objects are immutable')
|
||||
|
||||
def __str__(self):
|
||||
hex = '%032x' % self.int
|
||||
return '%s-%s-%s-%s-%s' % (
|
||||
hex[:8], hex[8:12], hex[12:16], hex[16:20], hex[20:])
|
||||
|
||||
def get_bytes(self):
|
||||
bytes = ''
|
||||
for shift in range(0, 128, 8):
|
||||
bytes = chr((self.int >> shift) & 0xff) + bytes
|
||||
return bytes
|
||||
|
||||
bytes = property(get_bytes)
|
||||
|
||||
def get_bytes_le(self):
|
||||
bytes = self.bytes
|
||||
return (bytes[3] + bytes[2] + bytes[1] + bytes[0] +
|
||||
bytes[5] + bytes[4] + bytes[7] + bytes[6] + bytes[8:])
|
||||
|
||||
bytes_le = property(get_bytes_le)
|
||||
|
||||
def get_fields(self):
|
||||
return (self.time_low, self.time_mid, self.time_hi_version,
|
||||
self.clock_seq_hi_variant, self.clock_seq_low, self.node)
|
||||
|
||||
fields = property(get_fields)
|
||||
|
||||
def get_time_low(self):
|
||||
return self.int >> 96L
|
||||
|
||||
time_low = property(get_time_low)
|
||||
|
||||
def get_time_mid(self):
|
||||
return (self.int >> 80L) & 0xffff
|
||||
|
||||
time_mid = property(get_time_mid)
|
||||
|
||||
def get_time_hi_version(self):
|
||||
return (self.int >> 64L) & 0xffff
|
||||
|
||||
time_hi_version = property(get_time_hi_version)
|
||||
|
||||
def get_clock_seq_hi_variant(self):
|
||||
return (self.int >> 56L) & 0xff
|
||||
|
||||
clock_seq_hi_variant = property(get_clock_seq_hi_variant)
|
||||
|
||||
def get_clock_seq_low(self):
|
||||
return (self.int >> 48L) & 0xff
|
||||
|
||||
clock_seq_low = property(get_clock_seq_low)
|
||||
|
||||
def get_time(self):
|
||||
return (((self.time_hi_version & 0x0fffL) << 48L) |
|
||||
(self.time_mid << 32L) | self.time_low)
|
||||
|
||||
time = property(get_time)
|
||||
|
||||
def get_clock_seq(self):
|
||||
return (((self.clock_seq_hi_variant & 0x3fL) << 8L) |
|
||||
self.clock_seq_low)
|
||||
|
||||
clock_seq = property(get_clock_seq)
|
||||
|
||||
def get_node(self):
|
||||
return self.int & 0xffffffffffff
|
||||
|
||||
node = property(get_node)
|
||||
|
||||
def get_hex(self):
|
||||
return '%032x' % self.int
|
||||
|
||||
hex = property(get_hex)
|
||||
|
||||
def get_urn(self):
|
||||
return 'urn:uuid:' + str(self)
|
||||
|
||||
urn = property(get_urn)
|
||||
|
||||
def get_variant(self):
|
||||
if not self.int & (0x8000 << 48L):
|
||||
return RESERVED_NCS
|
||||
elif not self.int & (0x4000 << 48L):
|
||||
return RFC_4122
|
||||
elif not self.int & (0x2000 << 48L):
|
||||
return RESERVED_MICROSOFT
|
||||
else:
|
||||
return RESERVED_FUTURE
|
||||
|
||||
variant = property(get_variant)
|
||||
|
||||
def get_version(self):
|
||||
# The version bits are only meaningful for RFC 4122 UUIDs.
|
||||
if self.variant == RFC_4122:
|
||||
return int((self.int >> 76L) & 0xf)
|
||||
|
||||
version = property(get_version)
|
||||
|
||||
def _find_mac(command, args, hw_identifiers, get_index):
|
||||
import os
|
||||
for dir in ['', '/sbin/', '/usr/sbin']:
|
||||
executable = os.path.join(dir, command)
|
||||
if not os.path.exists(executable):
|
||||
continue
|
||||
|
||||
try:
|
||||
# LC_ALL to get English output, 2>/dev/null to
|
||||
# prevent output on stderr
|
||||
cmd = 'LC_ALL=C %s %s 2>/dev/null' % (executable, args)
|
||||
pipe = os.popen(cmd)
|
||||
except IOError:
|
||||
continue
|
||||
|
||||
for line in pipe:
|
||||
words = line.lower().split()
|
||||
for i in range(len(words)):
|
||||
if words[i] in hw_identifiers:
|
||||
return int(words[get_index(i)].replace(':', ''), 16)
|
||||
return None
|
||||
|
||||
def _ifconfig_getnode():
|
||||
"""Get the hardware address on Unix by running ifconfig."""
|
||||
|
||||
# This works on Linux ('' or '-a'), Tru64 ('-av'), but not all Unixes.
|
||||
for args in ('', '-a', '-av'):
|
||||
mac = _find_mac('ifconfig', args, ['hwaddr', 'ether'], lambda i: i+1)
|
||||
if mac:
|
||||
return mac
|
||||
|
||||
import socket
|
||||
ip_addr = socket.gethostbyname(socket.gethostname())
|
||||
|
||||
# Try getting the MAC addr from arp based on our IP address (Solaris).
|
||||
mac = _find_mac('arp', '-an', [ip_addr], lambda i: -1)
|
||||
if mac:
|
||||
return mac
|
||||
|
||||
# This might work on HP-UX.
|
||||
mac = _find_mac('lanscan', '-ai', ['lan0'], lambda i: 0)
|
||||
if mac:
|
||||
return mac
|
||||
|
||||
return None
|
||||
|
||||
def _ipconfig_getnode():
|
||||
"""Get the hardware address on Windows by running ipconfig.exe."""
|
||||
import os, re
|
||||
dirs = ['', r'c:\windows\system32', r'c:\winnt\system32']
|
||||
try:
|
||||
import ctypes
|
||||
buffer = ctypes.create_string_buffer(300)
|
||||
ctypes.windll.kernel32.GetSystemDirectoryA(buffer, 300) # @UndefinedVariable
|
||||
dirs.insert(0, buffer.value.decode('mbcs'))
|
||||
except:
|
||||
pass
|
||||
for dir in dirs:
|
||||
try:
|
||||
pipe = os.popen(os.path.join(dir, 'ipconfig') + ' /all')
|
||||
except IOError:
|
||||
continue
|
||||
for line in pipe:
|
||||
value = line.split(':')[-1].strip().lower()
|
||||
if re.match('([0-9a-f][0-9a-f]-){5}[0-9a-f][0-9a-f]', value):
|
||||
return int(value.replace('-', ''), 16)
|
||||
|
||||
def _netbios_getnode():
|
||||
"""Get the hardware address on Windows using NetBIOS calls.
|
||||
See http://support.microsoft.com/kb/118623 for details."""
|
||||
import win32wnet, netbios
|
||||
ncb = netbios.NCB()
|
||||
ncb.Command = netbios.NCBENUM
|
||||
ncb.Buffer = adapters = netbios.LANA_ENUM()
|
||||
adapters._pack()
|
||||
if win32wnet.Netbios(ncb) != 0:
|
||||
return
|
||||
adapters._unpack()
|
||||
for i in range(adapters.length):
|
||||
ncb.Reset()
|
||||
ncb.Command = netbios.NCBRESET
|
||||
ncb.Lana_num = ord(adapters.lana[i])
|
||||
if win32wnet.Netbios(ncb) != 0:
|
||||
continue
|
||||
ncb.Reset()
|
||||
ncb.Command = netbios.NCBASTAT
|
||||
ncb.Lana_num = ord(adapters.lana[i])
|
||||
ncb.Callname = '*'.ljust(16)
|
||||
ncb.Buffer = status = netbios.ADAPTER_STATUS()
|
||||
if win32wnet.Netbios(ncb) != 0:
|
||||
continue
|
||||
status._unpack()
|
||||
bytes = map(ord, status.adapter_address)
|
||||
return ((bytes[0]<<40L) + (bytes[1]<<32L) + (bytes[2]<<24L) +
|
||||
(bytes[3]<<16L) + (bytes[4]<<8L) + bytes[5])
|
||||
|
||||
# Thanks to Thomas Heller for ctypes and for his help with its use here.
|
||||
|
||||
# If ctypes is available, use it to find system routines for UUID generation.
|
||||
_uuid_generate_random = _uuid_generate_time = _UuidCreate = None
|
||||
try:
|
||||
import ctypes, ctypes.util
|
||||
_buffer = ctypes.create_string_buffer(16)
|
||||
|
||||
# The uuid_generate_* routines are provided by libuuid on at least
|
||||
# Linux and FreeBSD, and provided by libc on Mac OS X.
|
||||
for libname in ['uuid', 'c']:
|
||||
try:
|
||||
lib = ctypes.CDLL(ctypes.util.find_library(libname))
|
||||
except:
|
||||
continue
|
||||
if hasattr(lib, 'uuid_generate_random'):
|
||||
_uuid_generate_random = lib.uuid_generate_random
|
||||
if hasattr(lib, 'uuid_generate_time'):
|
||||
_uuid_generate_time = lib.uuid_generate_time
|
||||
|
||||
# On Windows prior to 2000, UuidCreate gives a UUID containing the
|
||||
# hardware address. On Windows 2000 and later, UuidCreate makes a
|
||||
# random UUID and UuidCreateSequential gives a UUID containing the
|
||||
# hardware address. These routines are provided by the RPC runtime.
|
||||
# NOTE: at least on Tim's WinXP Pro SP2 desktop box, while the last
|
||||
# 6 bytes returned by UuidCreateSequential are fixed, they don't appear
|
||||
# to bear any relationship to the MAC address of any network device
|
||||
# on the box.
|
||||
try:
|
||||
lib = ctypes.windll.rpcrt4
|
||||
except:
|
||||
lib = None
|
||||
_UuidCreate = getattr(lib, 'UuidCreateSequential',
|
||||
getattr(lib, 'UuidCreate', None))
|
||||
except:
|
||||
pass
|
||||
|
||||
def _unixdll_getnode():
|
||||
"""Get the hardware address on Unix using ctypes."""
|
||||
_uuid_generate_time(_buffer)
|
||||
return UUID(bytes=_buffer.raw).node
|
||||
|
||||
def _windll_getnode():
|
||||
"""Get the hardware address on Windows using ctypes."""
|
||||
if _UuidCreate(_buffer) == 0:
|
||||
return UUID(bytes=_buffer.raw).node
|
||||
|
||||
def _random_getnode():
|
||||
"""Get a random node ID, with eighth bit set as suggested by RFC 4122."""
|
||||
import random
|
||||
return random.randrange(0, 1<<48L) | 0x010000000000L
|
||||
|
||||
_node = None
|
||||
|
||||
def getnode():
|
||||
"""Get the hardware address as a 48-bit positive integer.
|
||||
|
||||
The first time this runs, it may launch a separate program, which could
|
||||
be quite slow. If all attempts to obtain the hardware address fail, we
|
||||
choose a random 48-bit number with its eighth bit set to 1 as recommended
|
||||
in RFC 4122.
|
||||
"""
|
||||
|
||||
global _node
|
||||
if _node is not None:
|
||||
return _node
|
||||
|
||||
import sys
|
||||
if sys.platform == 'win32':
|
||||
getters = [_windll_getnode, _netbios_getnode, _ipconfig_getnode]
|
||||
else:
|
||||
getters = [_unixdll_getnode, _ifconfig_getnode]
|
||||
|
||||
for getter in getters + [_random_getnode]:
|
||||
try:
|
||||
_node = getter()
|
||||
except:
|
||||
continue
|
||||
if _node is not None:
|
||||
return _node
|
||||
|
||||
_last_timestamp = None
|
||||
|
||||
def uuid1(node=None, clock_seq=None):
|
||||
"""Generate a UUID from a host ID, sequence number, and the current time.
|
||||
If 'node' is not given, getnode() is used to obtain the hardware
|
||||
address. If 'clock_seq' is given, it is used as the sequence number;
|
||||
otherwise a random 14-bit sequence number is chosen."""
|
||||
|
||||
# When the system provides a version-1 UUID generator, use it (but don't
|
||||
# use UuidCreate here because its UUIDs don't conform to RFC 4122).
|
||||
if _uuid_generate_time and node is clock_seq is None:
|
||||
_uuid_generate_time(_buffer)
|
||||
return UUID(bytes=_buffer.raw)
|
||||
|
||||
global _last_timestamp
|
||||
import time
|
||||
nanoseconds = int(time.time() * 1e9)
|
||||
# 0x01b21dd213814000 is the number of 100-ns intervals between the
|
||||
# UUID epoch 1582-10-15 00:00:00 and the Unix epoch 1970-01-01 00:00:00.
|
||||
timestamp = int(nanoseconds/100) + 0x01b21dd213814000L
|
||||
if timestamp <= _last_timestamp:
|
||||
timestamp = _last_timestamp + 1
|
||||
_last_timestamp = timestamp
|
||||
if clock_seq is None:
|
||||
import random
|
||||
clock_seq = random.randrange(1<<14L) # instead of stable storage
|
||||
time_low = timestamp & 0xffffffffL
|
||||
time_mid = (timestamp >> 32L) & 0xffffL
|
||||
time_hi_version = (timestamp >> 48L) & 0x0fffL
|
||||
clock_seq_low = clock_seq & 0xffL
|
||||
clock_seq_hi_variant = (clock_seq >> 8L) & 0x3fL
|
||||
if node is None:
|
||||
node = getnode()
|
||||
return UUID(fields=(time_low, time_mid, time_hi_version,
|
||||
clock_seq_hi_variant, clock_seq_low, node), version=1)
|
||||
|
||||
def uuid3(namespace, name):
|
||||
"""Generate a UUID from the MD5 hash of a namespace UUID and a name."""
|
||||
import md5
|
||||
hash = md5.md5(namespace.bytes + name).digest()
|
||||
return UUID(bytes=hash[:16], version=3)
|
||||
|
||||
def uuid4():
|
||||
"""Generate a random UUID."""
|
||||
|
||||
# When the system provides a version-4 UUID generator, use it.
|
||||
if _uuid_generate_random:
|
||||
_uuid_generate_random(_buffer)
|
||||
return UUID(bytes=_buffer.raw)
|
||||
|
||||
# Otherwise, get randomness from urandom or the 'random' module.
|
||||
try:
|
||||
import os
|
||||
return UUID(bytes=os.urandom(16), version=4)
|
||||
except:
|
||||
import random
|
||||
bytes = [chr(random.randrange(256)) for i in range(16)]
|
||||
return UUID(bytes=bytes, version=4)
|
||||
|
||||
def uuid5(namespace, name):
|
||||
"""Generate a UUID from the SHA-1 hash of a namespace UUID and a name."""
|
||||
import sha
|
||||
hash = sha.sha(namespace.bytes + name).digest()
|
||||
return UUID(bytes=hash[:16], version=5)
|
||||
|
||||
# The following standard UUIDs are for use with uuid3() or uuid5().
|
||||
|
||||
NAMESPACE_DNS = UUID('6ba7b810-9dad-11d1-80b4-00c04fd430c8')
|
||||
NAMESPACE_URL = UUID('6ba7b811-9dad-11d1-80b4-00c04fd430c8')
|
||||
NAMESPACE_OID = UUID('6ba7b812-9dad-11d1-80b4-00c04fd430c8')
|
||||
NAMESPACE_X500 = UUID('6ba7b814-9dad-11d1-80b4-00c04fd430c8')
|
||||
1493
ptvsd/pydevd/_pydev_imps/_pydev_xmlrpclib.py
Normal file
1493
ptvsd/pydevd/_pydev_imps/_pydev_xmlrpclib.py
Normal file
File diff suppressed because it is too large
Load diff
0
ptvsd/pydevd/_pydev_runfiles/__init__.py
Normal file
0
ptvsd/pydevd/_pydev_runfiles/__init__.py
Normal file
874
ptvsd/pydevd/_pydev_runfiles/pydev_runfiles.py
Normal file
874
ptvsd/pydevd/_pydev_runfiles/pydev_runfiles.py
Normal file
|
|
@ -0,0 +1,874 @@
|
|||
from __future__ import nested_scopes
|
||||
|
||||
import fnmatch
|
||||
import os.path
|
||||
from _pydev_runfiles.pydev_runfiles_coverage import start_coverage_support
|
||||
from _pydevd_bundle.pydevd_constants import * #@UnusedWildImport
|
||||
import re
|
||||
import time
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# Configuration
|
||||
#=======================================================================================================================
|
||||
class Configuration:
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
files_or_dirs='',
|
||||
verbosity=2,
|
||||
include_tests=None,
|
||||
tests=None,
|
||||
port=None,
|
||||
files_to_tests=None,
|
||||
jobs=1,
|
||||
split_jobs='tests',
|
||||
coverage_output_dir=None,
|
||||
coverage_include=None,
|
||||
coverage_output_file=None,
|
||||
exclude_files=None,
|
||||
exclude_tests=None,
|
||||
include_files=None,
|
||||
django=False,
|
||||
):
|
||||
self.files_or_dirs = files_or_dirs
|
||||
self.verbosity = verbosity
|
||||
self.include_tests = include_tests
|
||||
self.tests = tests
|
||||
self.port = port
|
||||
self.files_to_tests = files_to_tests
|
||||
self.jobs = jobs
|
||||
self.split_jobs = split_jobs
|
||||
self.django = django
|
||||
|
||||
if include_tests:
|
||||
assert isinstance(include_tests, (list, tuple))
|
||||
|
||||
if exclude_files:
|
||||
assert isinstance(exclude_files, (list, tuple))
|
||||
|
||||
if exclude_tests:
|
||||
assert isinstance(exclude_tests, (list, tuple))
|
||||
|
||||
self.exclude_files = exclude_files
|
||||
self.include_files = include_files
|
||||
self.exclude_tests = exclude_tests
|
||||
|
||||
self.coverage_output_dir = coverage_output_dir
|
||||
self.coverage_include = coverage_include
|
||||
self.coverage_output_file = coverage_output_file
|
||||
|
||||
def __str__(self):
|
||||
return '''Configuration
|
||||
- files_or_dirs: %s
|
||||
- verbosity: %s
|
||||
- tests: %s
|
||||
- port: %s
|
||||
- files_to_tests: %s
|
||||
- jobs: %s
|
||||
- split_jobs: %s
|
||||
|
||||
- include_files: %s
|
||||
- include_tests: %s
|
||||
|
||||
- exclude_files: %s
|
||||
- exclude_tests: %s
|
||||
|
||||
- coverage_output_dir: %s
|
||||
- coverage_include_dir: %s
|
||||
- coverage_output_file: %s
|
||||
|
||||
- django: %s
|
||||
''' % (
|
||||
self.files_or_dirs,
|
||||
self.verbosity,
|
||||
self.tests,
|
||||
self.port,
|
||||
self.files_to_tests,
|
||||
self.jobs,
|
||||
self.split_jobs,
|
||||
|
||||
self.include_files,
|
||||
self.include_tests,
|
||||
|
||||
self.exclude_files,
|
||||
self.exclude_tests,
|
||||
|
||||
self.coverage_output_dir,
|
||||
self.coverage_include,
|
||||
self.coverage_output_file,
|
||||
|
||||
self.django,
|
||||
)
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# parse_cmdline
|
||||
#=======================================================================================================================
|
||||
def parse_cmdline(argv=None):
|
||||
"""
|
||||
Parses command line and returns test directories, verbosity, test filter and test suites
|
||||
|
||||
usage:
|
||||
runfiles.py -v|--verbosity <level> -t|--tests <Test.test1,Test2> dirs|files
|
||||
|
||||
Multiprocessing options:
|
||||
jobs=number (with the number of jobs to be used to run the tests)
|
||||
split_jobs='module'|'tests'
|
||||
if == module, a given job will always receive all the tests from a module
|
||||
if == tests, the tests will be split independently of their originating module (default)
|
||||
|
||||
--exclude_files = comma-separated list of patterns with files to exclude (fnmatch style)
|
||||
--include_files = comma-separated list of patterns with files to include (fnmatch style)
|
||||
--exclude_tests = comma-separated list of patterns with test names to exclude (fnmatch style)
|
||||
|
||||
Note: if --tests is given, --exclude_files, --include_files and --exclude_tests are ignored!
|
||||
"""
|
||||
if argv is None:
|
||||
argv = sys.argv
|
||||
|
||||
verbosity = 2
|
||||
include_tests = None
|
||||
tests = None
|
||||
port = None
|
||||
jobs = 1
|
||||
split_jobs = 'tests'
|
||||
files_to_tests = {}
|
||||
coverage_output_dir = None
|
||||
coverage_include = None
|
||||
exclude_files = None
|
||||
exclude_tests = None
|
||||
include_files = None
|
||||
django = False
|
||||
|
||||
from _pydev_bundle._pydev_getopt import gnu_getopt
|
||||
optlist, dirs = gnu_getopt(
|
||||
argv[1:], "",
|
||||
[
|
||||
"verbosity=",
|
||||
"tests=",
|
||||
|
||||
"port=",
|
||||
"config_file=",
|
||||
|
||||
"jobs=",
|
||||
"split_jobs=",
|
||||
|
||||
"include_tests=",
|
||||
"include_files=",
|
||||
|
||||
"exclude_files=",
|
||||
"exclude_tests=",
|
||||
|
||||
"coverage_output_dir=",
|
||||
"coverage_include=",
|
||||
|
||||
"django="
|
||||
]
|
||||
)
|
||||
|
||||
for opt, value in optlist:
|
||||
if opt in ("-v", "--verbosity"):
|
||||
verbosity = value
|
||||
|
||||
elif opt in ("-p", "--port"):
|
||||
port = int(value)
|
||||
|
||||
elif opt in ("-j", "--jobs"):
|
||||
jobs = int(value)
|
||||
|
||||
elif opt in ("-s", "--split_jobs"):
|
||||
split_jobs = value
|
||||
if split_jobs not in ('module', 'tests'):
|
||||
raise AssertionError('Expected split to be either "module" or "tests". Was :%s' % (split_jobs,))
|
||||
|
||||
elif opt in ("-d", "--coverage_output_dir",):
|
||||
coverage_output_dir = value.strip()
|
||||
|
||||
elif opt in ("-i", "--coverage_include",):
|
||||
coverage_include = value.strip()
|
||||
|
||||
elif opt in ("-I", "--include_tests"):
|
||||
include_tests = value.split(',')
|
||||
|
||||
elif opt in ("-E", "--exclude_files"):
|
||||
exclude_files = value.split(',')
|
||||
|
||||
elif opt in ("-F", "--include_files"):
|
||||
include_files = value.split(',')
|
||||
|
||||
elif opt in ("-e", "--exclude_tests"):
|
||||
exclude_tests = value.split(',')
|
||||
|
||||
elif opt in ("-t", "--tests"):
|
||||
tests = value.split(',')
|
||||
|
||||
elif opt in ("--django",):
|
||||
django = value.strip() in ['true', 'True', '1']
|
||||
|
||||
elif opt in ("-c", "--config_file"):
|
||||
config_file = value.strip()
|
||||
if os.path.exists(config_file):
|
||||
f = open(config_file, 'rU')
|
||||
try:
|
||||
config_file_contents = f.read()
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
if config_file_contents:
|
||||
config_file_contents = config_file_contents.strip()
|
||||
|
||||
if config_file_contents:
|
||||
for line in config_file_contents.splitlines():
|
||||
file_and_test = line.split('|')
|
||||
if len(file_and_test) == 2:
|
||||
file, test = file_and_test
|
||||
if file in files_to_tests:
|
||||
files_to_tests[file].append(test)
|
||||
else:
|
||||
files_to_tests[file] = [test]
|
||||
|
||||
else:
|
||||
sys.stderr.write('Could not find config file: %s\n' % (config_file,))
|
||||
|
||||
if type([]) != type(dirs):
|
||||
dirs = [dirs]
|
||||
|
||||
ret_dirs = []
|
||||
for d in dirs:
|
||||
if '|' in d:
|
||||
#paths may come from the ide separated by |
|
||||
ret_dirs.extend(d.split('|'))
|
||||
else:
|
||||
ret_dirs.append(d)
|
||||
|
||||
verbosity = int(verbosity)
|
||||
|
||||
if tests:
|
||||
if verbosity > 4:
|
||||
sys.stdout.write('--tests provided. Ignoring --exclude_files, --exclude_tests and --include_files\n')
|
||||
exclude_files = exclude_tests = include_files = None
|
||||
|
||||
config = Configuration(
|
||||
ret_dirs,
|
||||
verbosity,
|
||||
include_tests,
|
||||
tests,
|
||||
port,
|
||||
files_to_tests,
|
||||
jobs,
|
||||
split_jobs,
|
||||
coverage_output_dir,
|
||||
coverage_include,
|
||||
exclude_files=exclude_files,
|
||||
exclude_tests=exclude_tests,
|
||||
include_files=include_files,
|
||||
django=django,
|
||||
)
|
||||
|
||||
if verbosity > 5:
|
||||
sys.stdout.write(str(config) + '\n')
|
||||
return config
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# PydevTestRunner
|
||||
#=======================================================================================================================
|
||||
class PydevTestRunner(object):
|
||||
""" finds and runs a file or directory of files as a unit test """
|
||||
|
||||
__py_extensions = ["*.py", "*.pyw"]
|
||||
__exclude_files = ["__init__.*"]
|
||||
|
||||
#Just to check that only this attributes will be written to this file
|
||||
__slots__ = [
|
||||
'verbosity', #Always used
|
||||
|
||||
'files_to_tests', #If this one is given, the ones below are not used
|
||||
|
||||
'files_or_dirs', #Files or directories received in the command line
|
||||
'include_tests', #The filter used to collect the tests
|
||||
'tests', #Strings with the tests to be run
|
||||
|
||||
'jobs', #Integer with the number of jobs that should be used to run the test cases
|
||||
'split_jobs', #String with 'tests' or 'module' (how should the jobs be split)
|
||||
|
||||
'configuration',
|
||||
'coverage',
|
||||
]
|
||||
|
||||
def __init__(self, configuration):
|
||||
self.verbosity = configuration.verbosity
|
||||
|
||||
self.jobs = configuration.jobs
|
||||
self.split_jobs = configuration.split_jobs
|
||||
|
||||
files_to_tests = configuration.files_to_tests
|
||||
if files_to_tests:
|
||||
self.files_to_tests = files_to_tests
|
||||
self.files_or_dirs = list(files_to_tests.keys())
|
||||
self.tests = None
|
||||
else:
|
||||
self.files_to_tests = {}
|
||||
self.files_or_dirs = configuration.files_or_dirs
|
||||
self.tests = configuration.tests
|
||||
|
||||
self.configuration = configuration
|
||||
self.__adjust_path()
|
||||
|
||||
|
||||
def __adjust_path(self):
|
||||
""" add the current file or directory to the python path """
|
||||
path_to_append = None
|
||||
for n in xrange(len(self.files_or_dirs)):
|
||||
dir_name = self.__unixify(self.files_or_dirs[n])
|
||||
if os.path.isdir(dir_name):
|
||||
if not dir_name.endswith("/"):
|
||||
self.files_or_dirs[n] = dir_name + "/"
|
||||
path_to_append = os.path.normpath(dir_name)
|
||||
elif os.path.isfile(dir_name):
|
||||
path_to_append = os.path.dirname(dir_name)
|
||||
else:
|
||||
if not os.path.exists(dir_name):
|
||||
block_line = '*' * 120
|
||||
sys.stderr.write('\n%s\n* PyDev test runner error: %s does not exist.\n%s\n' % (block_line, dir_name, block_line))
|
||||
return
|
||||
msg = ("unknown type. \n%s\nshould be file or a directory.\n" % (dir_name))
|
||||
raise RuntimeError(msg)
|
||||
if path_to_append is not None:
|
||||
#Add it as the last one (so, first things are resolved against the default dirs and
|
||||
#if none resolves, then we try a relative import).
|
||||
sys.path.append(path_to_append)
|
||||
|
||||
def __is_valid_py_file(self, fname):
|
||||
""" tests that a particular file contains the proper file extension
|
||||
and is not in the list of files to exclude """
|
||||
is_valid_fname = 0
|
||||
for invalid_fname in self.__class__.__exclude_files:
|
||||
is_valid_fname += int(not fnmatch.fnmatch(fname, invalid_fname))
|
||||
if_valid_ext = 0
|
||||
for ext in self.__class__.__py_extensions:
|
||||
if_valid_ext += int(fnmatch.fnmatch(fname, ext))
|
||||
return is_valid_fname > 0 and if_valid_ext > 0
|
||||
|
||||
def __unixify(self, s):
|
||||
""" stupid windows. converts the backslash to forwardslash for consistency """
|
||||
return os.path.normpath(s).replace(os.sep, "/")
|
||||
|
||||
def __importify(self, s, dir=False):
|
||||
""" turns directory separators into dots and removes the ".py*" extension
|
||||
so the string can be used as import statement """
|
||||
if not dir:
|
||||
dirname, fname = os.path.split(s)
|
||||
|
||||
if fname.count('.') > 1:
|
||||
#if there's a file named xxx.xx.py, it is not a valid module, so, let's not load it...
|
||||
return
|
||||
|
||||
imp_stmt_pieces = [dirname.replace("\\", "/").replace("/", "."), os.path.splitext(fname)[0]]
|
||||
|
||||
if len(imp_stmt_pieces[0]) == 0:
|
||||
imp_stmt_pieces = imp_stmt_pieces[1:]
|
||||
|
||||
return ".".join(imp_stmt_pieces)
|
||||
|
||||
else: #handle dir
|
||||
return s.replace("\\", "/").replace("/", ".")
|
||||
|
||||
def __add_files(self, pyfiles, root, files):
|
||||
""" if files match, appends them to pyfiles. used by os.path.walk fcn """
|
||||
for fname in files:
|
||||
if self.__is_valid_py_file(fname):
|
||||
name_without_base_dir = self.__unixify(os.path.join(root, fname))
|
||||
pyfiles.append(name_without_base_dir)
|
||||
|
||||
|
||||
def find_import_files(self):
|
||||
""" return a list of files to import """
|
||||
if self.files_to_tests:
|
||||
pyfiles = self.files_to_tests.keys()
|
||||
else:
|
||||
pyfiles = []
|
||||
|
||||
for base_dir in self.files_or_dirs:
|
||||
if os.path.isdir(base_dir):
|
||||
if hasattr(os, 'walk'):
|
||||
for root, dirs, files in os.walk(base_dir):
|
||||
|
||||
#Note: handling directories that should be excluded from the search because
|
||||
#they don't have __init__.py
|
||||
exclude = {}
|
||||
for d in dirs:
|
||||
for init in ['__init__.py', '__init__.pyo', '__init__.pyc', '__init__.pyw', '__init__$py.class']:
|
||||
if os.path.exists(os.path.join(root, d, init).replace('\\', '/')):
|
||||
break
|
||||
else:
|
||||
exclude[d] = 1
|
||||
|
||||
if exclude:
|
||||
new = []
|
||||
for d in dirs:
|
||||
if d not in exclude:
|
||||
new.append(d)
|
||||
|
||||
dirs[:] = new
|
||||
|
||||
self.__add_files(pyfiles, root, files)
|
||||
else:
|
||||
# jython2.1 is too old for os.walk!
|
||||
os.path.walk(base_dir, self.__add_files, pyfiles)
|
||||
|
||||
elif os.path.isfile(base_dir):
|
||||
pyfiles.append(base_dir)
|
||||
|
||||
if self.configuration.exclude_files or self.configuration.include_files:
|
||||
ret = []
|
||||
for f in pyfiles:
|
||||
add = True
|
||||
basename = os.path.basename(f)
|
||||
if self.configuration.include_files:
|
||||
add = False
|
||||
|
||||
for pat in self.configuration.include_files:
|
||||
if fnmatch.fnmatchcase(basename, pat):
|
||||
add = True
|
||||
break
|
||||
|
||||
if not add:
|
||||
if self.verbosity > 3:
|
||||
sys.stdout.write('Skipped file: %s (did not match any include_files pattern: %s)\n' % (f, self.configuration.include_files))
|
||||
|
||||
elif self.configuration.exclude_files:
|
||||
for pat in self.configuration.exclude_files:
|
||||
if fnmatch.fnmatchcase(basename, pat):
|
||||
if self.verbosity > 3:
|
||||
sys.stdout.write('Skipped file: %s (matched exclude_files pattern: %s)\n' % (f, pat))
|
||||
|
||||
elif self.verbosity > 2:
|
||||
sys.stdout.write('Skipped file: %s\n' % (f,))
|
||||
|
||||
add = False
|
||||
break
|
||||
|
||||
if add:
|
||||
if self.verbosity > 3:
|
||||
sys.stdout.write('Adding file: %s for test discovery.\n' % (f,))
|
||||
ret.append(f)
|
||||
|
||||
pyfiles = ret
|
||||
|
||||
|
||||
return pyfiles
|
||||
|
||||
def __get_module_from_str(self, modname, print_exception, pyfile):
|
||||
""" Import the module in the given import path.
|
||||
* Returns the "final" module, so importing "coilib40.subject.visu"
|
||||
returns the "visu" module, not the "coilib40" as returned by __import__ """
|
||||
try:
|
||||
mod = __import__(modname)
|
||||
for part in modname.split('.')[1:]:
|
||||
mod = getattr(mod, part)
|
||||
return mod
|
||||
except:
|
||||
if print_exception:
|
||||
from _pydev_runfiles import pydev_runfiles_xml_rpc
|
||||
from _pydevd_bundle import pydevd_io
|
||||
buf_err = pydevd_io.start_redirect(keep_original_redirection=True, std='stderr')
|
||||
buf_out = pydevd_io.start_redirect(keep_original_redirection=True, std='stdout')
|
||||
try:
|
||||
import traceback;traceback.print_exc()
|
||||
sys.stderr.write('ERROR: Module: %s could not be imported (file: %s).\n' % (modname, pyfile))
|
||||
finally:
|
||||
pydevd_io.end_redirect('stderr')
|
||||
pydevd_io.end_redirect('stdout')
|
||||
|
||||
pydev_runfiles_xml_rpc.notifyTest(
|
||||
'error', buf_out.getvalue(), buf_err.getvalue(), pyfile, modname, 0)
|
||||
|
||||
return None
|
||||
|
||||
def remove_duplicates_keeping_order(self, seq):
|
||||
seen = set()
|
||||
seen_add = seen.add
|
||||
return [x for x in seq if not (x in seen or seen_add(x))]
|
||||
|
||||
def find_modules_from_files(self, pyfiles):
|
||||
""" returns a list of modules given a list of files """
|
||||
#let's make sure that the paths we want are in the pythonpath...
|
||||
imports = [(s, self.__importify(s)) for s in pyfiles]
|
||||
|
||||
sys_path = [os.path.normpath(path) for path in sys.path]
|
||||
sys_path = self.remove_duplicates_keeping_order(sys_path)
|
||||
|
||||
system_paths = []
|
||||
for s in sys_path:
|
||||
system_paths.append(self.__importify(s, True))
|
||||
|
||||
ret = []
|
||||
for pyfile, imp in imports:
|
||||
if imp is None:
|
||||
continue #can happen if a file is not a valid module
|
||||
choices = []
|
||||
for s in system_paths:
|
||||
if imp.startswith(s):
|
||||
add = imp[len(s) + 1:]
|
||||
if add:
|
||||
choices.append(add)
|
||||
#sys.stdout.write(' ' + add + ' ')
|
||||
|
||||
if not choices:
|
||||
sys.stdout.write('PYTHONPATH not found for file: %s\n' % imp)
|
||||
else:
|
||||
for i, import_str in enumerate(choices):
|
||||
print_exception = i == len(choices) - 1
|
||||
mod = self.__get_module_from_str(import_str, print_exception, pyfile)
|
||||
if mod is not None:
|
||||
ret.append((pyfile, mod, import_str))
|
||||
break
|
||||
|
||||
|
||||
return ret
|
||||
|
||||
#===================================================================================================================
|
||||
# GetTestCaseNames
|
||||
#===================================================================================================================
|
||||
class GetTestCaseNames:
|
||||
"""Yes, we need a class for that (cannot use outer context on jython 2.1)"""
|
||||
|
||||
def __init__(self, accepted_classes, accepted_methods):
|
||||
self.accepted_classes = accepted_classes
|
||||
self.accepted_methods = accepted_methods
|
||||
|
||||
def __call__(self, testCaseClass):
|
||||
"""Return a sorted sequence of method names found within testCaseClass"""
|
||||
testFnNames = []
|
||||
className = testCaseClass.__name__
|
||||
|
||||
if className in self.accepted_classes:
|
||||
for attrname in dir(testCaseClass):
|
||||
#If a class is chosen, we select all the 'test' methods'
|
||||
if attrname.startswith('test') and hasattr(getattr(testCaseClass, attrname), '__call__'):
|
||||
testFnNames.append(attrname)
|
||||
|
||||
else:
|
||||
for attrname in dir(testCaseClass):
|
||||
#If we have the class+method name, we must do a full check and have an exact match.
|
||||
if className + '.' + attrname in self.accepted_methods:
|
||||
if hasattr(getattr(testCaseClass, attrname), '__call__'):
|
||||
testFnNames.append(attrname)
|
||||
|
||||
#sorted() is not available in jython 2.1
|
||||
testFnNames.sort()
|
||||
return testFnNames
|
||||
|
||||
|
||||
def _decorate_test_suite(self, suite, pyfile, module_name):
|
||||
import unittest
|
||||
if isinstance(suite, unittest.TestSuite):
|
||||
add = False
|
||||
suite.__pydev_pyfile__ = pyfile
|
||||
suite.__pydev_module_name__ = module_name
|
||||
|
||||
for t in suite._tests:
|
||||
t.__pydev_pyfile__ = pyfile
|
||||
t.__pydev_module_name__ = module_name
|
||||
if self._decorate_test_suite(t, pyfile, module_name):
|
||||
add = True
|
||||
|
||||
return add
|
||||
|
||||
elif isinstance(suite, unittest.TestCase):
|
||||
return True
|
||||
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
|
||||
def find_tests_from_modules(self, file_and_modules_and_module_name):
|
||||
""" returns the unittests given a list of modules """
|
||||
#Use our own suite!
|
||||
from _pydev_runfiles import pydev_runfiles_unittest
|
||||
import unittest
|
||||
unittest.TestLoader.suiteClass = pydev_runfiles_unittest.PydevTestSuite
|
||||
loader = unittest.TestLoader()
|
||||
|
||||
ret = []
|
||||
if self.files_to_tests:
|
||||
for pyfile, m, module_name in file_and_modules_and_module_name:
|
||||
accepted_classes = {}
|
||||
accepted_methods = {}
|
||||
tests = self.files_to_tests[pyfile]
|
||||
for t in tests:
|
||||
accepted_methods[t] = t
|
||||
|
||||
loader.getTestCaseNames = self.GetTestCaseNames(accepted_classes, accepted_methods)
|
||||
|
||||
suite = loader.loadTestsFromModule(m)
|
||||
if self._decorate_test_suite(suite, pyfile, module_name):
|
||||
ret.append(suite)
|
||||
return ret
|
||||
|
||||
|
||||
if self.tests:
|
||||
accepted_classes = {}
|
||||
accepted_methods = {}
|
||||
|
||||
for t in self.tests:
|
||||
splitted = t.split('.')
|
||||
if len(splitted) == 1:
|
||||
accepted_classes[t] = t
|
||||
|
||||
elif len(splitted) == 2:
|
||||
accepted_methods[t] = t
|
||||
|
||||
loader.getTestCaseNames = self.GetTestCaseNames(accepted_classes, accepted_methods)
|
||||
|
||||
|
||||
for pyfile, m, module_name in file_and_modules_and_module_name:
|
||||
suite = loader.loadTestsFromModule(m)
|
||||
if self._decorate_test_suite(suite, pyfile, module_name):
|
||||
ret.append(suite)
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def filter_tests(self, test_objs, internal_call=False):
|
||||
""" based on a filter name, only return those tests that have
|
||||
the test case names that match """
|
||||
import unittest
|
||||
if not internal_call:
|
||||
if not self.configuration.include_tests and not self.tests and not self.configuration.exclude_tests:
|
||||
#No need to filter if we have nothing to filter!
|
||||
return test_objs
|
||||
|
||||
if self.verbosity > 1:
|
||||
if self.configuration.include_tests:
|
||||
sys.stdout.write('Tests to include: %s\n' % (self.configuration.include_tests,))
|
||||
|
||||
if self.tests:
|
||||
sys.stdout.write('Tests to run: %s\n' % (self.tests,))
|
||||
|
||||
if self.configuration.exclude_tests:
|
||||
sys.stdout.write('Tests to exclude: %s\n' % (self.configuration.exclude_tests,))
|
||||
|
||||
test_suite = []
|
||||
for test_obj in test_objs:
|
||||
|
||||
if isinstance(test_obj, unittest.TestSuite):
|
||||
#Note: keep the suites as they are and just 'fix' the tests (so, don't use the iter_tests).
|
||||
if test_obj._tests:
|
||||
test_obj._tests = self.filter_tests(test_obj._tests, True)
|
||||
if test_obj._tests: #Only add the suite if we still have tests there.
|
||||
test_suite.append(test_obj)
|
||||
|
||||
elif isinstance(test_obj, unittest.TestCase):
|
||||
try:
|
||||
testMethodName = test_obj._TestCase__testMethodName
|
||||
except AttributeError:
|
||||
#changed in python 2.5
|
||||
testMethodName = test_obj._testMethodName
|
||||
|
||||
add = True
|
||||
if self.configuration.exclude_tests:
|
||||
for pat in self.configuration.exclude_tests:
|
||||
if fnmatch.fnmatchcase(testMethodName, pat):
|
||||
if self.verbosity > 3:
|
||||
sys.stdout.write('Skipped test: %s (matched exclude_tests pattern: %s)\n' % (testMethodName, pat))
|
||||
|
||||
elif self.verbosity > 2:
|
||||
sys.stdout.write('Skipped test: %s\n' % (testMethodName,))
|
||||
|
||||
add = False
|
||||
break
|
||||
|
||||
if add:
|
||||
if self.__match_tests(self.tests, test_obj, testMethodName):
|
||||
include = True
|
||||
if self.configuration.include_tests:
|
||||
include = False
|
||||
for pat in self.configuration.include_tests:
|
||||
if fnmatch.fnmatchcase(testMethodName, pat):
|
||||
include = True
|
||||
break
|
||||
if include:
|
||||
test_suite.append(test_obj)
|
||||
else:
|
||||
if self.verbosity > 3:
|
||||
sys.stdout.write('Skipped test: %s (did not match any include_tests pattern %s)\n' % (
|
||||
testMethodName, self.configuration.include_tests,))
|
||||
return test_suite
|
||||
|
||||
|
||||
def iter_tests(self, test_objs):
|
||||
#Note: not using yield because of Jython 2.1.
|
||||
import unittest
|
||||
tests = []
|
||||
for test_obj in test_objs:
|
||||
if isinstance(test_obj, unittest.TestSuite):
|
||||
tests.extend(self.iter_tests(test_obj._tests))
|
||||
|
||||
elif isinstance(test_obj, unittest.TestCase):
|
||||
tests.append(test_obj)
|
||||
return tests
|
||||
|
||||
|
||||
def list_test_names(self, test_objs):
|
||||
names = []
|
||||
for tc in self.iter_tests(test_objs):
|
||||
try:
|
||||
testMethodName = tc._TestCase__testMethodName
|
||||
except AttributeError:
|
||||
#changed in python 2.5
|
||||
testMethodName = tc._testMethodName
|
||||
names.append(testMethodName)
|
||||
return names
|
||||
|
||||
|
||||
def __match_tests(self, tests, test_case, test_method_name):
|
||||
if not tests:
|
||||
return 1
|
||||
|
||||
for t in tests:
|
||||
class_and_method = t.split('.')
|
||||
if len(class_and_method) == 1:
|
||||
#only class name
|
||||
if class_and_method[0] == test_case.__class__.__name__:
|
||||
return 1
|
||||
|
||||
elif len(class_and_method) == 2:
|
||||
if class_and_method[0] == test_case.__class__.__name__ and class_and_method[1] == test_method_name:
|
||||
return 1
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
def __match(self, filter_list, name):
|
||||
""" returns whether a test name matches the test filter """
|
||||
if filter_list is None:
|
||||
return 1
|
||||
for f in filter_list:
|
||||
if re.match(f, name):
|
||||
return 1
|
||||
return 0
|
||||
|
||||
|
||||
def run_tests(self, handle_coverage=True):
|
||||
""" runs all tests """
|
||||
sys.stdout.write("Finding files... ")
|
||||
files = self.find_import_files()
|
||||
if self.verbosity > 3:
|
||||
sys.stdout.write('%s ... done.\n' % (self.files_or_dirs))
|
||||
else:
|
||||
sys.stdout.write('done.\n')
|
||||
sys.stdout.write("Importing test modules ... ")
|
||||
|
||||
|
||||
if handle_coverage:
|
||||
coverage_files, coverage = start_coverage_support(self.configuration)
|
||||
|
||||
file_and_modules_and_module_name = self.find_modules_from_files(files)
|
||||
sys.stdout.write("done.\n")
|
||||
|
||||
all_tests = self.find_tests_from_modules(file_and_modules_and_module_name)
|
||||
all_tests = self.filter_tests(all_tests)
|
||||
|
||||
from _pydev_runfiles import pydev_runfiles_unittest
|
||||
test_suite = pydev_runfiles_unittest.PydevTestSuite(all_tests)
|
||||
from _pydev_runfiles import pydev_runfiles_xml_rpc
|
||||
pydev_runfiles_xml_rpc.notifyTestsCollected(test_suite.countTestCases())
|
||||
|
||||
start_time = time.time()
|
||||
|
||||
def run_tests():
|
||||
executed_in_parallel = False
|
||||
if self.jobs > 1:
|
||||
from _pydev_runfiles import pydev_runfiles_parallel
|
||||
|
||||
#What may happen is that the number of jobs needed is lower than the number of jobs requested
|
||||
#(e.g.: 2 jobs were requested for running 1 test) -- in which case execute_tests_in_parallel will
|
||||
#return False and won't run any tests.
|
||||
executed_in_parallel = pydev_runfiles_parallel.execute_tests_in_parallel(
|
||||
all_tests, self.jobs, self.split_jobs, self.verbosity, coverage_files, self.configuration.coverage_include)
|
||||
|
||||
if not executed_in_parallel:
|
||||
#If in coverage, we don't need to pass anything here (coverage is already enabled for this execution).
|
||||
runner = pydev_runfiles_unittest.PydevTextTestRunner(stream=sys.stdout, descriptions=1, verbosity=self.verbosity)
|
||||
sys.stdout.write('\n')
|
||||
runner.run(test_suite)
|
||||
|
||||
if self.configuration.django:
|
||||
get_django_test_suite_runner()(run_tests).run_tests([])
|
||||
else:
|
||||
run_tests()
|
||||
|
||||
if handle_coverage:
|
||||
coverage.stop()
|
||||
coverage.save()
|
||||
|
||||
total_time = 'Finished in: %.2f secs.' % (time.time() - start_time,)
|
||||
pydev_runfiles_xml_rpc.notifyTestRunFinished(total_time)
|
||||
|
||||
|
||||
DJANGO_TEST_SUITE_RUNNER = None
|
||||
|
||||
def get_django_test_suite_runner():
|
||||
global DJANGO_TEST_SUITE_RUNNER
|
||||
if DJANGO_TEST_SUITE_RUNNER:
|
||||
return DJANGO_TEST_SUITE_RUNNER
|
||||
try:
|
||||
# django >= 1.8
|
||||
import django
|
||||
from django.test.runner import DiscoverRunner
|
||||
|
||||
class MyDjangoTestSuiteRunner(DiscoverRunner):
|
||||
|
||||
def __init__(self, on_run_suite):
|
||||
django.setup()
|
||||
DiscoverRunner.__init__(self)
|
||||
self.on_run_suite = on_run_suite
|
||||
|
||||
def build_suite(self, *args, **kwargs):
|
||||
pass
|
||||
|
||||
def suite_result(self, *args, **kwargs):
|
||||
pass
|
||||
|
||||
def run_suite(self, *args, **kwargs):
|
||||
self.on_run_suite()
|
||||
except:
|
||||
# django < 1.8
|
||||
try:
|
||||
from django.test.simple import DjangoTestSuiteRunner
|
||||
except:
|
||||
class DjangoTestSuiteRunner:
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
def run_tests(self, *args, **kwargs):
|
||||
raise AssertionError("Unable to run suite with django.test.runner.DiscoverRunner nor django.test.simple.DjangoTestSuiteRunner because it couldn't be imported.")
|
||||
|
||||
class MyDjangoTestSuiteRunner(DjangoTestSuiteRunner):
|
||||
|
||||
def __init__(self, on_run_suite):
|
||||
DjangoTestSuiteRunner.__init__(self)
|
||||
self.on_run_suite = on_run_suite
|
||||
|
||||
def build_suite(self, *args, **kwargs):
|
||||
pass
|
||||
|
||||
def suite_result(self, *args, **kwargs):
|
||||
pass
|
||||
|
||||
def run_suite(self, *args, **kwargs):
|
||||
self.on_run_suite()
|
||||
|
||||
DJANGO_TEST_SUITE_RUNNER = MyDjangoTestSuiteRunner
|
||||
return DJANGO_TEST_SUITE_RUNNER
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# main
|
||||
#=======================================================================================================================
|
||||
def main(configuration):
|
||||
PydevTestRunner(configuration).run_tests()
|
||||
76
ptvsd/pydevd/_pydev_runfiles/pydev_runfiles_coverage.py
Normal file
76
ptvsd/pydevd/_pydev_runfiles/pydev_runfiles_coverage.py
Normal file
|
|
@ -0,0 +1,76 @@
|
|||
import os.path
|
||||
import sys
|
||||
from _pydevd_bundle.pydevd_constants import Null
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# get_coverage_files
|
||||
#=======================================================================================================================
|
||||
def get_coverage_files(coverage_output_dir, number_of_files):
|
||||
base_dir = coverage_output_dir
|
||||
ret = []
|
||||
i = 0
|
||||
while len(ret) < number_of_files:
|
||||
while True:
|
||||
f = os.path.join(base_dir, '.coverage.%s' % i)
|
||||
i += 1
|
||||
if not os.path.exists(f):
|
||||
ret.append(f)
|
||||
break #Break only inner for.
|
||||
return ret
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# start_coverage_support
|
||||
#=======================================================================================================================
|
||||
def start_coverage_support(configuration):
|
||||
return start_coverage_support_from_params(
|
||||
configuration.coverage_output_dir,
|
||||
configuration.coverage_output_file,
|
||||
configuration.jobs,
|
||||
configuration.coverage_include,
|
||||
)
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# start_coverage_support_from_params
|
||||
#=======================================================================================================================
|
||||
def start_coverage_support_from_params(coverage_output_dir, coverage_output_file, jobs, coverage_include):
|
||||
coverage_files = []
|
||||
coverage_instance = Null()
|
||||
if coverage_output_dir or coverage_output_file:
|
||||
try:
|
||||
import coverage #@UnresolvedImport
|
||||
except:
|
||||
sys.stderr.write('Error: coverage module could not be imported\n')
|
||||
sys.stderr.write('Please make sure that the coverage module (http://nedbatchelder.com/code/coverage/)\n')
|
||||
sys.stderr.write('is properly installed in your interpreter: %s\n' % (sys.executable,))
|
||||
|
||||
import traceback;traceback.print_exc()
|
||||
else:
|
||||
if coverage_output_dir:
|
||||
if not os.path.exists(coverage_output_dir):
|
||||
sys.stderr.write('Error: directory for coverage output (%s) does not exist.\n' % (coverage_output_dir,))
|
||||
|
||||
elif not os.path.isdir(coverage_output_dir):
|
||||
sys.stderr.write('Error: expected (%s) to be a directory.\n' % (coverage_output_dir,))
|
||||
|
||||
else:
|
||||
n = jobs
|
||||
if n <= 0:
|
||||
n += 1
|
||||
n += 1 #Add 1 more for the current process (which will do the initial import).
|
||||
coverage_files = get_coverage_files(coverage_output_dir, n)
|
||||
os.environ['COVERAGE_FILE'] = coverage_files.pop(0)
|
||||
|
||||
coverage_instance = coverage.coverage(source=[coverage_include])
|
||||
coverage_instance.start()
|
||||
|
||||
elif coverage_output_file:
|
||||
#Client of parallel run.
|
||||
os.environ['COVERAGE_FILE'] = coverage_output_file
|
||||
coverage_instance = coverage.coverage(source=[coverage_include])
|
||||
coverage_instance.start()
|
||||
|
||||
return coverage_files, coverage_instance
|
||||
|
||||
182
ptvsd/pydevd/_pydev_runfiles/pydev_runfiles_nose.py
Normal file
182
ptvsd/pydevd/_pydev_runfiles/pydev_runfiles_nose.py
Normal file
|
|
@ -0,0 +1,182 @@
|
|||
from nose.plugins.multiprocess import MultiProcessTestRunner # @UnresolvedImport
|
||||
from nose.plugins.base import Plugin # @UnresolvedImport
|
||||
import sys
|
||||
from _pydev_runfiles import pydev_runfiles_xml_rpc
|
||||
import time
|
||||
from _pydev_runfiles.pydev_runfiles_coverage import start_coverage_support
|
||||
|
||||
#=======================================================================================================================
|
||||
# PydevPlugin
|
||||
#=======================================================================================================================
|
||||
class PydevPlugin(Plugin):
|
||||
|
||||
def __init__(self, configuration):
|
||||
self.configuration = configuration
|
||||
Plugin.__init__(self)
|
||||
|
||||
|
||||
def begin(self):
|
||||
# Called before any test is run (it's always called, with multiprocess or not)
|
||||
self.start_time = time.time()
|
||||
self.coverage_files, self.coverage = start_coverage_support(self.configuration)
|
||||
|
||||
|
||||
def finalize(self, result):
|
||||
# Called after all tests are run (it's always called, with multiprocess or not)
|
||||
self.coverage.stop()
|
||||
self.coverage.save()
|
||||
|
||||
pydev_runfiles_xml_rpc.notifyTestRunFinished('Finished in: %.2f secs.' % (time.time() - self.start_time,))
|
||||
|
||||
|
||||
|
||||
#===================================================================================================================
|
||||
# Methods below are not called with multiprocess (so, we monkey-patch MultiProcessTestRunner.consolidate
|
||||
# so that they're called, but unfortunately we loose some info -- i.e.: the time for each test in this
|
||||
# process).
|
||||
#===================================================================================================================
|
||||
|
||||
|
||||
def report_cond(self, cond, test, captured_output, error=''):
|
||||
'''
|
||||
@param cond: fail, error, ok
|
||||
'''
|
||||
|
||||
# test.address() is something as:
|
||||
# ('D:\\workspaces\\temp\\test_workspace\\pytesting1\\src\\mod1\\hello.py', 'mod1.hello', 'TestCase.testMet1')
|
||||
#
|
||||
# and we must pass: location, test
|
||||
# E.g.: ['D:\\src\\mod1\\hello.py', 'TestCase.testMet1']
|
||||
try:
|
||||
if hasattr(test, 'address'):
|
||||
address = test.address()
|
||||
address = address[0], address[2]
|
||||
else:
|
||||
# multiprocess
|
||||
try:
|
||||
address = test[0], test[1]
|
||||
except TypeError:
|
||||
# It may be an error at setup, in which case it's not really a test, but a Context object.
|
||||
f = test.context.__file__
|
||||
if f.endswith('.pyc'):
|
||||
f = f[:-1]
|
||||
elif f.endswith('$py.class'):
|
||||
f = f[:-len('$py.class')] + '.py'
|
||||
address = f, '?'
|
||||
except:
|
||||
sys.stderr.write("PyDev: Internal pydev error getting test address. Please report at the pydev bug tracker\n")
|
||||
import traceback;traceback.print_exc()
|
||||
sys.stderr.write("\n\n\n")
|
||||
address = '?', '?'
|
||||
|
||||
error_contents = self.get_io_from_error(error)
|
||||
try:
|
||||
time_str = '%.2f' % (time.time() - test._pydev_start_time)
|
||||
except:
|
||||
time_str = '?'
|
||||
|
||||
pydev_runfiles_xml_rpc.notifyTest(cond, captured_output, error_contents, address[0], address[1], time_str)
|
||||
|
||||
|
||||
def startTest(self, test):
|
||||
test._pydev_start_time = time.time()
|
||||
if hasattr(test, 'address'):
|
||||
address = test.address()
|
||||
file, test = address[0], address[2]
|
||||
else:
|
||||
# multiprocess
|
||||
file, test = test
|
||||
pydev_runfiles_xml_rpc.notifyStartTest(file, test)
|
||||
|
||||
|
||||
def get_io_from_error(self, err):
|
||||
if type(err) == type(()):
|
||||
if len(err) != 3:
|
||||
if len(err) == 2:
|
||||
return err[1] # multiprocess
|
||||
try:
|
||||
from StringIO import StringIO
|
||||
except:
|
||||
from io import StringIO
|
||||
s = StringIO()
|
||||
etype, value, tb = err
|
||||
import traceback;traceback.print_exception(etype, value, tb, file=s)
|
||||
return s.getvalue()
|
||||
return err
|
||||
|
||||
|
||||
def get_captured_output(self, test):
|
||||
if hasattr(test, 'capturedOutput') and test.capturedOutput:
|
||||
return test.capturedOutput
|
||||
return ''
|
||||
|
||||
|
||||
def addError(self, test, err):
|
||||
self.report_cond(
|
||||
'error',
|
||||
test,
|
||||
self.get_captured_output(test),
|
||||
err,
|
||||
)
|
||||
|
||||
|
||||
def addFailure(self, test, err):
|
||||
self.report_cond(
|
||||
'fail',
|
||||
test,
|
||||
self.get_captured_output(test),
|
||||
err,
|
||||
)
|
||||
|
||||
|
||||
def addSuccess(self, test):
|
||||
self.report_cond(
|
||||
'ok',
|
||||
test,
|
||||
self.get_captured_output(test),
|
||||
'',
|
||||
)
|
||||
|
||||
|
||||
PYDEV_NOSE_PLUGIN_SINGLETON = None
|
||||
def start_pydev_nose_plugin_singleton(configuration):
|
||||
global PYDEV_NOSE_PLUGIN_SINGLETON
|
||||
PYDEV_NOSE_PLUGIN_SINGLETON = PydevPlugin(configuration)
|
||||
return PYDEV_NOSE_PLUGIN_SINGLETON
|
||||
|
||||
|
||||
|
||||
|
||||
original = MultiProcessTestRunner.consolidate
|
||||
#=======================================================================================================================
|
||||
# new_consolidate
|
||||
#=======================================================================================================================
|
||||
def new_consolidate(self, result, batch_result):
|
||||
'''
|
||||
Used so that it can work with the multiprocess plugin.
|
||||
Monkeypatched because nose seems a bit unsupported at this time (ideally
|
||||
the plugin would have this support by default).
|
||||
'''
|
||||
ret = original(self, result, batch_result)
|
||||
|
||||
parent_frame = sys._getframe().f_back
|
||||
# addr is something as D:\pytesting1\src\mod1\hello.py:TestCase.testMet4
|
||||
# so, convert it to what report_cond expects
|
||||
addr = parent_frame.f_locals['addr']
|
||||
i = addr.rindex(':')
|
||||
addr = [addr[:i], addr[i + 1:]]
|
||||
|
||||
output, testsRun, failures, errors, errorClasses = batch_result
|
||||
if failures or errors:
|
||||
for failure in failures:
|
||||
PYDEV_NOSE_PLUGIN_SINGLETON.report_cond('fail', addr, output, failure)
|
||||
|
||||
for error in errors:
|
||||
PYDEV_NOSE_PLUGIN_SINGLETON.report_cond('error', addr, output, error)
|
||||
else:
|
||||
PYDEV_NOSE_PLUGIN_SINGLETON.report_cond('ok', addr, output)
|
||||
|
||||
|
||||
return ret
|
||||
|
||||
MultiProcessTestRunner.consolidate = new_consolidate
|
||||
295
ptvsd/pydevd/_pydev_runfiles/pydev_runfiles_parallel.py
Normal file
295
ptvsd/pydevd/_pydev_runfiles/pydev_runfiles_parallel.py
Normal file
|
|
@ -0,0 +1,295 @@
|
|||
import unittest
|
||||
from _pydev_imps._pydev_saved_modules import thread
|
||||
try:
|
||||
import Queue
|
||||
except:
|
||||
import queue as Queue #@UnresolvedImport
|
||||
from _pydevd_bundle.pydevd_constants import * #@UnusedWildImport
|
||||
from _pydev_runfiles import pydev_runfiles_xml_rpc
|
||||
import time
|
||||
import os
|
||||
|
||||
#=======================================================================================================================
|
||||
# flatten_test_suite
|
||||
#=======================================================================================================================
|
||||
def flatten_test_suite(test_suite, ret):
|
||||
if isinstance(test_suite, unittest.TestSuite):
|
||||
for t in test_suite._tests:
|
||||
flatten_test_suite(t, ret)
|
||||
|
||||
elif isinstance(test_suite, unittest.TestCase):
|
||||
ret.append(test_suite)
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# execute_tests_in_parallel
|
||||
#=======================================================================================================================
|
||||
def execute_tests_in_parallel(tests, jobs, split, verbosity, coverage_files, coverage_include):
|
||||
'''
|
||||
@param tests: list(PydevTestSuite)
|
||||
A list with the suites to be run
|
||||
|
||||
@param split: str
|
||||
Either 'module' or the number of tests that should be run in each batch
|
||||
|
||||
@param coverage_files: list(file)
|
||||
A list with the files that should be used for giving coverage information (if empty, coverage information
|
||||
should not be gathered).
|
||||
|
||||
@param coverage_include: str
|
||||
The pattern that should be included in the coverage.
|
||||
|
||||
@return: bool
|
||||
Returns True if the tests were actually executed in parallel. If the tests were not executed because only 1
|
||||
should be used (e.g.: 2 jobs were requested for running 1 test), False will be returned and no tests will be
|
||||
run.
|
||||
|
||||
It may also return False if in debug mode (in which case, multi-processes are not accepted)
|
||||
'''
|
||||
try:
|
||||
from _pydevd_bundle.pydevd_comm import get_global_debugger
|
||||
if get_global_debugger() is not None:
|
||||
return False
|
||||
except:
|
||||
pass #Ignore any error here.
|
||||
|
||||
#This queue will receive the tests to be run. Each entry in a queue is a list with the tests to be run together When
|
||||
#split == 'tests', each list will have a single element, when split == 'module', each list will have all the tests
|
||||
#from a given module.
|
||||
tests_queue = []
|
||||
|
||||
queue_elements = []
|
||||
if split == 'module':
|
||||
module_to_tests = {}
|
||||
for test in tests:
|
||||
lst = []
|
||||
flatten_test_suite(test, lst)
|
||||
for test in lst:
|
||||
key = (test.__pydev_pyfile__, test.__pydev_module_name__)
|
||||
module_to_tests.setdefault(key, []).append(test)
|
||||
|
||||
for key, tests in module_to_tests.items():
|
||||
queue_elements.append(tests)
|
||||
|
||||
if len(queue_elements) < jobs:
|
||||
#Don't create jobs we will never use.
|
||||
jobs = len(queue_elements)
|
||||
|
||||
elif split == 'tests':
|
||||
for test in tests:
|
||||
lst = []
|
||||
flatten_test_suite(test, lst)
|
||||
for test in lst:
|
||||
queue_elements.append([test])
|
||||
|
||||
if len(queue_elements) < jobs:
|
||||
#Don't create jobs we will never use.
|
||||
jobs = len(queue_elements)
|
||||
|
||||
else:
|
||||
raise AssertionError('Do not know how to handle: %s' % (split,))
|
||||
|
||||
for test_cases in queue_elements:
|
||||
test_queue_elements = []
|
||||
for test_case in test_cases:
|
||||
try:
|
||||
test_name = test_case.__class__.__name__+"."+test_case._testMethodName
|
||||
except AttributeError:
|
||||
#Support for jython 2.1 (__testMethodName is pseudo-private in the test case)
|
||||
test_name = test_case.__class__.__name__+"."+test_case._TestCase__testMethodName
|
||||
|
||||
test_queue_elements.append(test_case.__pydev_pyfile__+'|'+test_name)
|
||||
|
||||
tests_queue.append(test_queue_elements)
|
||||
|
||||
if jobs < 2:
|
||||
return False
|
||||
|
||||
sys.stdout.write('Running tests in parallel with: %s jobs.\n' %(jobs,))
|
||||
|
||||
|
||||
queue = Queue.Queue()
|
||||
for item in tests_queue:
|
||||
queue.put(item, block=False)
|
||||
|
||||
|
||||
providers = []
|
||||
clients = []
|
||||
for i in xrange(jobs):
|
||||
test_cases_provider = CommunicationThread(queue)
|
||||
providers.append(test_cases_provider)
|
||||
|
||||
test_cases_provider.start()
|
||||
port = test_cases_provider.port
|
||||
|
||||
if coverage_files:
|
||||
clients.append(ClientThread(i, port, verbosity, coverage_files.pop(0), coverage_include))
|
||||
else:
|
||||
clients.append(ClientThread(i, port, verbosity))
|
||||
|
||||
for client in clients:
|
||||
client.start()
|
||||
|
||||
client_alive = True
|
||||
while client_alive:
|
||||
client_alive = False
|
||||
for client in clients:
|
||||
#Wait for all the clients to exit.
|
||||
if not client.finished:
|
||||
client_alive = True
|
||||
time.sleep(.2)
|
||||
break
|
||||
|
||||
for provider in providers:
|
||||
provider.shutdown()
|
||||
|
||||
return True
|
||||
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# CommunicationThread
|
||||
#=======================================================================================================================
|
||||
class CommunicationThread(threading.Thread):
|
||||
|
||||
def __init__(self, tests_queue):
|
||||
threading.Thread.__init__(self)
|
||||
self.setDaemon(True)
|
||||
self.queue = tests_queue
|
||||
self.finished = False
|
||||
from _pydev_bundle.pydev_imports import SimpleXMLRPCServer
|
||||
|
||||
|
||||
# This is a hack to patch slow socket.getfqdn calls that
|
||||
# BaseHTTPServer (and its subclasses) make.
|
||||
# See: http://bugs.python.org/issue6085
|
||||
# See: http://www.answermysearches.com/xmlrpc-server-slow-in-python-how-to-fix/2140/
|
||||
try:
|
||||
import BaseHTTPServer
|
||||
def _bare_address_string(self):
|
||||
host, port = self.client_address[:2]
|
||||
return '%s' % host
|
||||
BaseHTTPServer.BaseHTTPRequestHandler.address_string = _bare_address_string
|
||||
|
||||
except:
|
||||
pass
|
||||
# End hack.
|
||||
|
||||
|
||||
# Create server
|
||||
|
||||
from _pydev_bundle import pydev_localhost
|
||||
server = SimpleXMLRPCServer((pydev_localhost.get_localhost(), 0), logRequests=False)
|
||||
server.register_function(self.GetTestsToRun)
|
||||
server.register_function(self.notifyStartTest)
|
||||
server.register_function(self.notifyTest)
|
||||
server.register_function(self.notifyCommands)
|
||||
self.port = server.socket.getsockname()[1]
|
||||
self.server = server
|
||||
|
||||
|
||||
def GetTestsToRun(self, job_id):
|
||||
'''
|
||||
@param job_id:
|
||||
|
||||
@return: list(str)
|
||||
Each entry is a string in the format: filename|Test.testName
|
||||
'''
|
||||
try:
|
||||
ret = self.queue.get(block=False)
|
||||
return ret
|
||||
except: #Any exception getting from the queue (empty or not) means we finished our work on providing the tests.
|
||||
self.finished = True
|
||||
return []
|
||||
|
||||
|
||||
def notifyCommands(self, job_id, commands):
|
||||
#Batch notification.
|
||||
for command in commands:
|
||||
getattr(self, command[0])(job_id, *command[1], **command[2])
|
||||
|
||||
return True
|
||||
|
||||
def notifyStartTest(self, job_id, *args, **kwargs):
|
||||
pydev_runfiles_xml_rpc.notifyStartTest(*args, **kwargs)
|
||||
return True
|
||||
|
||||
|
||||
def notifyTest(self, job_id, *args, **kwargs):
|
||||
pydev_runfiles_xml_rpc.notifyTest(*args, **kwargs)
|
||||
return True
|
||||
|
||||
def shutdown(self):
|
||||
if hasattr(self.server, 'shutdown'):
|
||||
self.server.shutdown()
|
||||
else:
|
||||
self._shutdown = True
|
||||
|
||||
def run(self):
|
||||
if hasattr(self.server, 'shutdown'):
|
||||
self.server.serve_forever()
|
||||
else:
|
||||
self._shutdown = False
|
||||
while not self._shutdown:
|
||||
self.server.handle_request()
|
||||
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# Client
|
||||
#=======================================================================================================================
|
||||
class ClientThread(threading.Thread):
|
||||
|
||||
def __init__(self, job_id, port, verbosity, coverage_output_file=None, coverage_include=None):
|
||||
threading.Thread.__init__(self)
|
||||
self.setDaemon(True)
|
||||
self.port = port
|
||||
self.job_id = job_id
|
||||
self.verbosity = verbosity
|
||||
self.finished = False
|
||||
self.coverage_output_file = coverage_output_file
|
||||
self.coverage_include = coverage_include
|
||||
|
||||
|
||||
def _reader_thread(self, pipe, target):
|
||||
while True:
|
||||
target.write(pipe.read(1))
|
||||
|
||||
|
||||
def run(self):
|
||||
try:
|
||||
from _pydev_runfiles import pydev_runfiles_parallel_client
|
||||
#TODO: Support Jython:
|
||||
#
|
||||
#For jython, instead of using sys.executable, we should use:
|
||||
#r'D:\bin\jdk_1_5_09\bin\java.exe',
|
||||
#'-classpath',
|
||||
#'D:/bin/jython-2.2.1/jython.jar',
|
||||
#'org.python.util.jython',
|
||||
|
||||
args = [
|
||||
sys.executable,
|
||||
pydev_runfiles_parallel_client.__file__,
|
||||
str(self.job_id),
|
||||
str(self.port),
|
||||
str(self.verbosity),
|
||||
]
|
||||
|
||||
if self.coverage_output_file and self.coverage_include:
|
||||
args.append(self.coverage_output_file)
|
||||
args.append(self.coverage_include)
|
||||
|
||||
import subprocess
|
||||
if False:
|
||||
proc = subprocess.Popen(args, env=os.environ, shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
|
||||
thread.start_new_thread(self._reader_thread,(proc.stdout, sys.stdout))
|
||||
|
||||
thread.start_new_thread(target=self._reader_thread,args=(proc.stderr, sys.stderr))
|
||||
else:
|
||||
proc = subprocess.Popen(args, env=os.environ, shell=False)
|
||||
proc.wait()
|
||||
|
||||
finally:
|
||||
self.finished = True
|
||||
|
||||
214
ptvsd/pydevd/_pydev_runfiles/pydev_runfiles_parallel_client.py
Normal file
214
ptvsd/pydevd/_pydev_runfiles/pydev_runfiles_parallel_client.py
Normal file
|
|
@ -0,0 +1,214 @@
|
|||
from _pydevd_bundle.pydevd_constants import * #@UnusedWildImport
|
||||
from _pydev_bundle.pydev_imports import xmlrpclib, _queue
|
||||
Queue = _queue.Queue
|
||||
import traceback
|
||||
from _pydev_runfiles.pydev_runfiles_coverage import start_coverage_support_from_params
|
||||
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# ParallelNotification
|
||||
#=======================================================================================================================
|
||||
class ParallelNotification(object):
|
||||
|
||||
def __init__(self, method, args, kwargs):
|
||||
self.method = method
|
||||
self.args = args
|
||||
self.kwargs = kwargs
|
||||
|
||||
def to_tuple(self):
|
||||
return self.method, self.args, self.kwargs
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# KillServer
|
||||
#=======================================================================================================================
|
||||
class KillServer(object):
|
||||
pass
|
||||
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# ServerComm
|
||||
#=======================================================================================================================
|
||||
class ServerComm(threading.Thread):
|
||||
|
||||
|
||||
|
||||
def __init__(self, job_id, server):
|
||||
self.notifications_queue = Queue()
|
||||
threading.Thread.__init__(self)
|
||||
self.setDaemon(False) #Wait for all the notifications to be passed before exiting!
|
||||
assert job_id is not None
|
||||
assert port is not None
|
||||
self.job_id = job_id
|
||||
|
||||
self.finished = False
|
||||
self.server = server
|
||||
|
||||
|
||||
def run(self):
|
||||
while True:
|
||||
kill_found = False
|
||||
commands = []
|
||||
command = self.notifications_queue.get(block=True)
|
||||
if isinstance(command, KillServer):
|
||||
kill_found = True
|
||||
else:
|
||||
assert isinstance(command, ParallelNotification)
|
||||
commands.append(command.to_tuple())
|
||||
|
||||
try:
|
||||
while True:
|
||||
command = self.notifications_queue.get(block=False) #No block to create a batch.
|
||||
if isinstance(command, KillServer):
|
||||
kill_found = True
|
||||
else:
|
||||
assert isinstance(command, ParallelNotification)
|
||||
commands.append(command.to_tuple())
|
||||
except:
|
||||
pass #That's OK, we're getting it until it becomes empty so that we notify multiple at once.
|
||||
|
||||
|
||||
if commands:
|
||||
try:
|
||||
#Batch notification.
|
||||
self.server.lock.acquire()
|
||||
try:
|
||||
self.server.notifyCommands(self.job_id, commands)
|
||||
finally:
|
||||
self.server.lock.release()
|
||||
except:
|
||||
traceback.print_exc()
|
||||
|
||||
if kill_found:
|
||||
self.finished = True
|
||||
return
|
||||
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# ServerFacade
|
||||
#=======================================================================================================================
|
||||
class ServerFacade(object):
|
||||
|
||||
|
||||
def __init__(self, notifications_queue):
|
||||
self.notifications_queue = notifications_queue
|
||||
|
||||
|
||||
def notifyTestsCollected(self, *args, **kwargs):
|
||||
pass #This notification won't be passed
|
||||
|
||||
|
||||
def notifyTestRunFinished(self, *args, **kwargs):
|
||||
pass #This notification won't be passed
|
||||
|
||||
|
||||
def notifyStartTest(self, *args, **kwargs):
|
||||
self.notifications_queue.put_nowait(ParallelNotification('notifyStartTest', args, kwargs))
|
||||
|
||||
|
||||
def notifyTest(self, *args, **kwargs):
|
||||
self.notifications_queue.put_nowait(ParallelNotification('notifyTest', args, kwargs))
|
||||
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# run_client
|
||||
#=======================================================================================================================
|
||||
def run_client(job_id, port, verbosity, coverage_output_file, coverage_include):
|
||||
job_id = int(job_id)
|
||||
|
||||
from _pydev_bundle import pydev_localhost
|
||||
server = xmlrpclib.Server('http://%s:%s' % (pydev_localhost.get_localhost(), port))
|
||||
server.lock = threading.Lock()
|
||||
|
||||
|
||||
server_comm = ServerComm(job_id, server)
|
||||
server_comm.start()
|
||||
|
||||
try:
|
||||
server_facade = ServerFacade(server_comm.notifications_queue)
|
||||
from _pydev_runfiles import pydev_runfiles
|
||||
from _pydev_runfiles import pydev_runfiles_xml_rpc
|
||||
pydev_runfiles_xml_rpc.set_server(server_facade)
|
||||
|
||||
#Starts None and when the 1st test is gotten, it's started (because a server may be initiated and terminated
|
||||
#before receiving any test -- which would mean a different process got all the tests to run).
|
||||
coverage = None
|
||||
|
||||
try:
|
||||
tests_to_run = [1]
|
||||
while tests_to_run:
|
||||
#Investigate: is it dangerous to use the same xmlrpclib server from different threads?
|
||||
#It seems it should be, as it creates a new connection for each request...
|
||||
server.lock.acquire()
|
||||
try:
|
||||
tests_to_run = server.GetTestsToRun(job_id)
|
||||
finally:
|
||||
server.lock.release()
|
||||
|
||||
if not tests_to_run:
|
||||
break
|
||||
|
||||
if coverage is None:
|
||||
_coverage_files, coverage = start_coverage_support_from_params(
|
||||
None, coverage_output_file, 1, coverage_include)
|
||||
|
||||
|
||||
files_to_tests = {}
|
||||
for test in tests_to_run:
|
||||
filename_and_test = test.split('|')
|
||||
if len(filename_and_test) == 2:
|
||||
files_to_tests.setdefault(filename_and_test[0], []).append(filename_and_test[1])
|
||||
|
||||
configuration = pydev_runfiles.Configuration(
|
||||
'',
|
||||
verbosity,
|
||||
None,
|
||||
None,
|
||||
None,
|
||||
files_to_tests,
|
||||
1, #Always single job here
|
||||
None,
|
||||
|
||||
#The coverage is handled in this loop.
|
||||
coverage_output_file=None,
|
||||
coverage_include=None,
|
||||
)
|
||||
test_runner = pydev_runfiles.PydevTestRunner(configuration)
|
||||
sys.stdout.flush()
|
||||
test_runner.run_tests(handle_coverage=False)
|
||||
finally:
|
||||
if coverage is not None:
|
||||
coverage.stop()
|
||||
coverage.save()
|
||||
|
||||
|
||||
except:
|
||||
traceback.print_exc()
|
||||
server_comm.notifications_queue.put_nowait(KillServer())
|
||||
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# main
|
||||
#=======================================================================================================================
|
||||
if __name__ == '__main__':
|
||||
if len(sys.argv) -1 == 3:
|
||||
job_id, port, verbosity = sys.argv[1:]
|
||||
coverage_output_file, coverage_include = None, None
|
||||
|
||||
elif len(sys.argv) -1 == 5:
|
||||
job_id, port, verbosity, coverage_output_file, coverage_include = sys.argv[1:]
|
||||
|
||||
else:
|
||||
raise AssertionError('Could not find out how to handle the parameters: '+sys.argv[1:])
|
||||
|
||||
job_id = int(job_id)
|
||||
port = int(port)
|
||||
verbosity = int(verbosity)
|
||||
run_client(job_id, port, verbosity, coverage_output_file, coverage_include)
|
||||
|
||||
|
||||
278
ptvsd/pydevd/_pydev_runfiles/pydev_runfiles_pytest2.py
Normal file
278
ptvsd/pydevd/_pydev_runfiles/pydev_runfiles_pytest2.py
Normal file
|
|
@ -0,0 +1,278 @@
|
|||
from _pydev_runfiles import pydev_runfiles_xml_rpc
|
||||
import pickle
|
||||
import zlib
|
||||
import base64
|
||||
import os
|
||||
import py
|
||||
from pydevd_file_utils import _NormFile
|
||||
import pytest
|
||||
import sys
|
||||
import time
|
||||
|
||||
|
||||
#=========================================================================
|
||||
# Load filters with tests we should skip
|
||||
#=========================================================================
|
||||
py_test_accept_filter = None
|
||||
|
||||
|
||||
def _load_filters():
|
||||
global py_test_accept_filter
|
||||
if py_test_accept_filter is None:
|
||||
py_test_accept_filter = os.environ.get('PYDEV_PYTEST_SKIP')
|
||||
if py_test_accept_filter:
|
||||
py_test_accept_filter = pickle.loads(
|
||||
zlib.decompress(base64.b64decode(py_test_accept_filter)))
|
||||
else:
|
||||
py_test_accept_filter = {}
|
||||
|
||||
|
||||
def is_in_xdist_node():
|
||||
main_pid = os.environ.get('PYDEV_MAIN_PID')
|
||||
if main_pid and main_pid != str(os.getpid()):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
connected = False
|
||||
def connect_to_server_for_communication_to_xml_rpc_on_xdist():
|
||||
global connected
|
||||
if connected:
|
||||
return
|
||||
connected = True
|
||||
if is_in_xdist_node():
|
||||
port = os.environ.get('PYDEV_PYTEST_SERVER')
|
||||
if not port:
|
||||
sys.stderr.write(
|
||||
'Error: no PYDEV_PYTEST_SERVER environment variable defined.\n')
|
||||
else:
|
||||
pydev_runfiles_xml_rpc.initialize_server(int(port), daemon=True)
|
||||
|
||||
|
||||
PY2 = sys.version_info[0] <= 2
|
||||
PY3 = not PY2
|
||||
|
||||
|
||||
class State:
|
||||
start_time = time.time()
|
||||
buf_err = None
|
||||
buf_out = None
|
||||
|
||||
|
||||
def start_redirect():
|
||||
if State.buf_out is not None:
|
||||
return
|
||||
from _pydevd_bundle import pydevd_io
|
||||
State.buf_err = pydevd_io.start_redirect(keep_original_redirection=True, std='stderr')
|
||||
State.buf_out = pydevd_io.start_redirect(keep_original_redirection=True, std='stdout')
|
||||
|
||||
|
||||
def get_curr_output():
|
||||
return State.buf_out.getvalue(), State.buf_err.getvalue()
|
||||
|
||||
|
||||
def pytest_unconfigure():
|
||||
if is_in_xdist_node():
|
||||
return
|
||||
# Only report that it finished when on the main node (we don't want to report
|
||||
# the finish on each separate node).
|
||||
pydev_runfiles_xml_rpc.notifyTestRunFinished(
|
||||
'Finished in: %.2f secs.' % (time.time() - State.start_time,))
|
||||
|
||||
|
||||
def pytest_collection_modifyitems(session, config, items):
|
||||
# A note: in xdist, this is not called on the main process, only in the
|
||||
# secondary nodes, so, we'll actually make the filter and report it multiple
|
||||
# times.
|
||||
connect_to_server_for_communication_to_xml_rpc_on_xdist()
|
||||
|
||||
_load_filters()
|
||||
if not py_test_accept_filter:
|
||||
pydev_runfiles_xml_rpc.notifyTestsCollected(len(items))
|
||||
return # Keep on going (nothing to filter)
|
||||
|
||||
new_items = []
|
||||
for item in items:
|
||||
f = _NormFile(str(item.parent.fspath))
|
||||
name = item.name
|
||||
|
||||
if f not in py_test_accept_filter:
|
||||
# print('Skip file: %s' % (f,))
|
||||
continue # Skip the file
|
||||
|
||||
accept_tests = py_test_accept_filter[f]
|
||||
|
||||
if item.cls is not None:
|
||||
class_name = item.cls.__name__
|
||||
else:
|
||||
class_name = None
|
||||
for test in accept_tests:
|
||||
# This happens when parameterizing pytest tests.
|
||||
i = name.find('[')
|
||||
if i > 0:
|
||||
name = name[:i]
|
||||
if test == name:
|
||||
# Direct match of the test (just go on with the default
|
||||
# loading)
|
||||
new_items.append(item)
|
||||
break
|
||||
|
||||
if class_name is not None:
|
||||
if test == class_name + '.' + name:
|
||||
new_items.append(item)
|
||||
break
|
||||
|
||||
if class_name == test:
|
||||
new_items.append(item)
|
||||
break
|
||||
else:
|
||||
pass
|
||||
# print('Skip test: %s.%s. Accept: %s' % (class_name, name, accept_tests))
|
||||
|
||||
# Modify the original list
|
||||
items[:] = new_items
|
||||
pydev_runfiles_xml_rpc.notifyTestsCollected(len(items))
|
||||
|
||||
|
||||
from py.io import TerminalWriter
|
||||
|
||||
def _get_error_contents_from_report(report):
|
||||
if report.longrepr is not None:
|
||||
tw = TerminalWriter(stringio=True)
|
||||
tw.hasmarkup = False
|
||||
report.toterminal(tw)
|
||||
exc = tw.stringio.getvalue()
|
||||
s = exc.strip()
|
||||
if s:
|
||||
return s
|
||||
|
||||
return ''
|
||||
|
||||
def pytest_collectreport(report):
|
||||
error_contents = _get_error_contents_from_report(report)
|
||||
if error_contents:
|
||||
report_test('fail', '<collect errors>', '<collect errors>', '', error_contents, 0.0)
|
||||
|
||||
def append_strings(s1, s2):
|
||||
if s1.__class__ == s2.__class__:
|
||||
return s1 + s2
|
||||
|
||||
if sys.version_info[0] == 2:
|
||||
if not isinstance(s1, basestring):
|
||||
s1 = str(s1)
|
||||
|
||||
if not isinstance(s2, basestring):
|
||||
s2 = str(s2)
|
||||
|
||||
# Prefer bytes
|
||||
if isinstance(s1, unicode):
|
||||
s1 = s1.encode('utf-8')
|
||||
|
||||
if isinstance(s2, unicode):
|
||||
s2 = s2.encode('utf-8')
|
||||
|
||||
return s1 + s2
|
||||
else:
|
||||
# Prefer str
|
||||
if isinstance(s1, bytes):
|
||||
s1 = s1.decode('utf-8', 'replace')
|
||||
|
||||
if isinstance(s2, bytes):
|
||||
s2 = s2.decode('utf-8', 'replace')
|
||||
|
||||
return s1 + s2
|
||||
|
||||
|
||||
|
||||
def pytest_runtest_logreport(report):
|
||||
if is_in_xdist_node():
|
||||
# When running with xdist, we don't want the report to be called from the node, only
|
||||
# from the main process.
|
||||
return
|
||||
report_duration = report.duration
|
||||
report_when = report.when
|
||||
report_outcome = report.outcome
|
||||
|
||||
if hasattr(report, 'wasxfail'):
|
||||
if report_outcome != 'skipped':
|
||||
report_outcome = 'passed'
|
||||
|
||||
if report_outcome == 'passed':
|
||||
# passed on setup/teardown: no need to report if in setup or teardown
|
||||
# (only on the actual test if it passed).
|
||||
if report_when in ('setup', 'teardown'):
|
||||
return
|
||||
|
||||
status = 'ok'
|
||||
|
||||
elif report_outcome == 'skipped':
|
||||
status = 'skip'
|
||||
|
||||
else:
|
||||
# It has only passed, skipped and failed (no error), so, let's consider
|
||||
# error if not on call.
|
||||
if report_when in ('setup', 'teardown'):
|
||||
status = 'error'
|
||||
|
||||
else:
|
||||
# any error in the call (not in setup or teardown) is considered a
|
||||
# regular failure.
|
||||
status = 'fail'
|
||||
|
||||
# This will work if pytest is not capturing it, if it is, nothing will
|
||||
# come from here...
|
||||
captured_output, error_contents = getattr(report, 'pydev_captured_output', ''), getattr(report, 'pydev_error_contents', '')
|
||||
for type_section, value in report.sections:
|
||||
if value:
|
||||
if type_section in ('err', 'stderr', 'Captured stderr call'):
|
||||
error_contents = append_strings(error_contents, value)
|
||||
else:
|
||||
captured_output = append_strings(error_contents, value)
|
||||
|
||||
filename = getattr(report, 'pydev_fspath_strpath', '<unable to get>')
|
||||
test = report.location[2]
|
||||
|
||||
if report_outcome != 'skipped':
|
||||
# On skipped, we'll have a traceback for the skip, which is not what we
|
||||
# want.
|
||||
exc = _get_error_contents_from_report(report)
|
||||
if exc:
|
||||
if error_contents:
|
||||
error_contents = append_strings(error_contents, '----------------------------- Exceptions -----------------------------\n')
|
||||
error_contents = append_strings(error_contents, exc)
|
||||
|
||||
report_test(status, filename, test, captured_output, error_contents, report_duration)
|
||||
|
||||
|
||||
def report_test(status, filename, test, captured_output, error_contents, duration):
|
||||
'''
|
||||
@param filename: 'D:\\src\\mod1\\hello.py'
|
||||
@param test: 'TestCase.testMet1'
|
||||
@param status: fail, error, ok
|
||||
'''
|
||||
time_str = '%.2f' % (duration,)
|
||||
pydev_runfiles_xml_rpc.notifyTest(
|
||||
status, captured_output, error_contents, filename, test, time_str)
|
||||
|
||||
|
||||
@pytest.hookimpl(hookwrapper=True)
|
||||
def pytest_runtest_makereport(item, call):
|
||||
outcome = yield
|
||||
report = outcome.get_result()
|
||||
report.pydev_fspath_strpath = item.fspath.strpath
|
||||
report.pydev_captured_output, report.pydev_error_contents = get_curr_output()
|
||||
|
||||
|
||||
@pytest.mark.tryfirst
|
||||
def pytest_runtest_setup(item):
|
||||
'''
|
||||
Note: with xdist will be on a secondary process.
|
||||
'''
|
||||
# We have our own redirection: if xdist does its redirection, we'll have
|
||||
# nothing in our contents (which is OK), but if it does, we'll get nothing
|
||||
# from pytest but will get our own here.
|
||||
start_redirect()
|
||||
filename = item.fspath.strpath
|
||||
test = item.location[2]
|
||||
|
||||
pydev_runfiles_xml_rpc.notifyStartTest(filename, test)
|
||||
185
ptvsd/pydevd/_pydev_runfiles/pydev_runfiles_unittest.py
Normal file
185
ptvsd/pydevd/_pydev_runfiles/pydev_runfiles_unittest.py
Normal file
|
|
@ -0,0 +1,185 @@
|
|||
try:
|
||||
import unittest2 as python_unittest # @UnresolvedImport
|
||||
except:
|
||||
import unittest as python_unittest
|
||||
|
||||
from _pydev_runfiles import pydev_runfiles_xml_rpc
|
||||
import time
|
||||
from _pydevd_bundle import pydevd_io
|
||||
import traceback
|
||||
from _pydevd_bundle.pydevd_constants import * #@UnusedWildImport
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# PydevTextTestRunner
|
||||
#=======================================================================================================================
|
||||
class PydevTextTestRunner(python_unittest.TextTestRunner):
|
||||
|
||||
def _makeResult(self):
|
||||
return PydevTestResult(self.stream, self.descriptions, self.verbosity)
|
||||
|
||||
|
||||
_PythonTextTestResult = python_unittest.TextTestRunner()._makeResult().__class__
|
||||
|
||||
#=======================================================================================================================
|
||||
# PydevTestResult
|
||||
#=======================================================================================================================
|
||||
class PydevTestResult(_PythonTextTestResult):
|
||||
|
||||
def addSubTest(self, test, subtest, err):
|
||||
"""Called at the end of a subtest.
|
||||
'err' is None if the subtest ended successfully, otherwise it's a
|
||||
tuple of values as returned by sys.exc_info().
|
||||
"""
|
||||
_PythonTextTestResult.addSubTest(self, test, subtest, err)
|
||||
if err is not None:
|
||||
subdesc = subtest._subDescription()
|
||||
error = (test, self._exc_info_to_string(err, test))
|
||||
self._reportErrors([error], [], '', '%s %s' % (self.get_test_name(test), subdesc))
|
||||
|
||||
|
||||
def startTest(self, test):
|
||||
_PythonTextTestResult.startTest(self, test)
|
||||
self.buf = pydevd_io.start_redirect(keep_original_redirection=True, std='both')
|
||||
self.start_time = time.time()
|
||||
self._current_errors_stack = []
|
||||
self._current_failures_stack = []
|
||||
|
||||
try:
|
||||
test_name = test.__class__.__name__+"."+test._testMethodName
|
||||
except AttributeError:
|
||||
#Support for jython 2.1 (__testMethodName is pseudo-private in the test case)
|
||||
test_name = test.__class__.__name__+"."+test._TestCase__testMethodName
|
||||
|
||||
pydev_runfiles_xml_rpc.notifyStartTest(
|
||||
test.__pydev_pyfile__, test_name)
|
||||
|
||||
|
||||
|
||||
|
||||
def get_test_name(self, test):
|
||||
try:
|
||||
try:
|
||||
test_name = test.__class__.__name__ + "." + test._testMethodName
|
||||
except AttributeError:
|
||||
#Support for jython 2.1 (__testMethodName is pseudo-private in the test case)
|
||||
try:
|
||||
test_name = test.__class__.__name__ + "." + test._TestCase__testMethodName
|
||||
#Support for class/module exceptions (test is instance of _ErrorHolder)
|
||||
except:
|
||||
test_name = test.description.split()[1][1:-1] + ' <' + test.description.split()[0] + '>'
|
||||
except:
|
||||
traceback.print_exc()
|
||||
return '<unable to get test name>'
|
||||
return test_name
|
||||
|
||||
|
||||
def stopTest(self, test):
|
||||
end_time = time.time()
|
||||
pydevd_io.end_redirect(std='both')
|
||||
|
||||
_PythonTextTestResult.stopTest(self, test)
|
||||
|
||||
captured_output = self.buf.getvalue()
|
||||
del self.buf
|
||||
error_contents = ''
|
||||
test_name = self.get_test_name(test)
|
||||
|
||||
|
||||
diff_time = '%.2f' % (end_time - self.start_time)
|
||||
if not self._current_errors_stack and not self._current_failures_stack:
|
||||
pydev_runfiles_xml_rpc.notifyTest(
|
||||
'ok', captured_output, error_contents, test.__pydev_pyfile__, test_name, diff_time)
|
||||
else:
|
||||
self._reportErrors(self._current_errors_stack, self._current_failures_stack, captured_output, test_name)
|
||||
|
||||
|
||||
def _reportErrors(self, errors, failures, captured_output, test_name, diff_time=''):
|
||||
error_contents = []
|
||||
for test, s in errors+failures:
|
||||
if type(s) == type((1,)): #If it's a tuple (for jython 2.1)
|
||||
sio = StringIO()
|
||||
traceback.print_exception(s[0], s[1], s[2], file=sio)
|
||||
s = sio.getvalue()
|
||||
error_contents.append(s)
|
||||
|
||||
sep = '\n'+self.separator1
|
||||
error_contents = sep.join(error_contents)
|
||||
|
||||
if errors and not failures:
|
||||
try:
|
||||
pydev_runfiles_xml_rpc.notifyTest(
|
||||
'error', captured_output, error_contents, test.__pydev_pyfile__, test_name, diff_time)
|
||||
except:
|
||||
file_start = error_contents.find('File "')
|
||||
file_end = error_contents.find('", ', file_start)
|
||||
if file_start != -1 and file_end != -1:
|
||||
file = error_contents[file_start+6:file_end]
|
||||
else:
|
||||
file = '<unable to get file>'
|
||||
pydev_runfiles_xml_rpc.notifyTest(
|
||||
'error', captured_output, error_contents, file, test_name, diff_time)
|
||||
|
||||
elif failures and not errors:
|
||||
pydev_runfiles_xml_rpc.notifyTest(
|
||||
'fail', captured_output, error_contents, test.__pydev_pyfile__, test_name, diff_time)
|
||||
|
||||
else: #Ok, we got both, errors and failures. Let's mark it as an error in the end.
|
||||
pydev_runfiles_xml_rpc.notifyTest(
|
||||
'error', captured_output, error_contents, test.__pydev_pyfile__, test_name, diff_time)
|
||||
|
||||
|
||||
|
||||
def addError(self, test, err):
|
||||
_PythonTextTestResult.addError(self, test, err)
|
||||
#Support for class/module exceptions (test is instance of _ErrorHolder)
|
||||
if not hasattr(self, '_current_errors_stack') or test.__class__.__name__ == '_ErrorHolder':
|
||||
#Not in start...end, so, report error now (i.e.: django pre/post-setup)
|
||||
self._reportErrors([self.errors[-1]], [], '', self.get_test_name(test))
|
||||
else:
|
||||
self._current_errors_stack.append(self.errors[-1])
|
||||
|
||||
|
||||
def addFailure(self, test, err):
|
||||
_PythonTextTestResult.addFailure(self, test, err)
|
||||
if not hasattr(self, '_current_failures_stack'):
|
||||
#Not in start...end, so, report error now (i.e.: django pre/post-setup)
|
||||
self._reportErrors([], [self.failures[-1]], '', self.get_test_name(test))
|
||||
else:
|
||||
self._current_failures_stack.append(self.failures[-1])
|
||||
|
||||
|
||||
try:
|
||||
#Version 2.7 onwards has a different structure... Let's not make any changes in it for now
|
||||
#(waiting for bug: http://bugs.python.org/issue11798)
|
||||
try:
|
||||
from unittest2 import suite
|
||||
except ImportError:
|
||||
from unittest import suite
|
||||
#===================================================================================================================
|
||||
# PydevTestSuite
|
||||
#===================================================================================================================
|
||||
class PydevTestSuite(python_unittest.TestSuite):
|
||||
pass
|
||||
|
||||
|
||||
except ImportError:
|
||||
|
||||
#===================================================================================================================
|
||||
# PydevTestSuite
|
||||
#===================================================================================================================
|
||||
class PydevTestSuite(python_unittest.TestSuite):
|
||||
|
||||
|
||||
def run(self, result):
|
||||
for index, test in enumerate(self._tests):
|
||||
if result.shouldStop:
|
||||
break
|
||||
test(result)
|
||||
|
||||
# Let the memory be released!
|
||||
self._tests[index] = None
|
||||
|
||||
return result
|
||||
|
||||
|
||||
281
ptvsd/pydevd/_pydev_runfiles/pydev_runfiles_xml_rpc.py
Normal file
281
ptvsd/pydevd/_pydev_runfiles/pydev_runfiles_xml_rpc.py
Normal file
|
|
@ -0,0 +1,281 @@
|
|||
import threading
|
||||
import traceback
|
||||
import warnings
|
||||
|
||||
from _pydev_bundle._pydev_filesystem_encoding import getfilesystemencoding
|
||||
from _pydev_bundle.pydev_imports import xmlrpclib, _queue
|
||||
Queue = _queue.Queue
|
||||
from _pydevd_bundle.pydevd_constants import *
|
||||
|
||||
#This may happen in IronPython (in Python it shouldn't happen as there are
|
||||
#'fast' replacements that are used in xmlrpclib.py)
|
||||
warnings.filterwarnings(
|
||||
'ignore', 'The xmllib module is obsolete.*', DeprecationWarning)
|
||||
|
||||
|
||||
file_system_encoding = getfilesystemencoding()
|
||||
|
||||
#=======================================================================================================================
|
||||
# _ServerHolder
|
||||
#=======================================================================================================================
|
||||
class _ServerHolder:
|
||||
'''
|
||||
Helper so that we don't have to use a global here.
|
||||
'''
|
||||
SERVER = None
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# set_server
|
||||
#=======================================================================================================================
|
||||
def set_server(server):
|
||||
_ServerHolder.SERVER = server
|
||||
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# ParallelNotification
|
||||
#=======================================================================================================================
|
||||
class ParallelNotification(object):
|
||||
|
||||
def __init__(self, method, args):
|
||||
self.method = method
|
||||
self.args = args
|
||||
|
||||
def to_tuple(self):
|
||||
return self.method, self.args
|
||||
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# KillServer
|
||||
#=======================================================================================================================
|
||||
class KillServer(object):
|
||||
pass
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# ServerFacade
|
||||
#=======================================================================================================================
|
||||
class ServerFacade(object):
|
||||
|
||||
|
||||
def __init__(self, notifications_queue):
|
||||
self.notifications_queue = notifications_queue
|
||||
|
||||
|
||||
def notifyTestsCollected(self, *args):
|
||||
self.notifications_queue.put_nowait(ParallelNotification('notifyTestsCollected', args))
|
||||
|
||||
def notifyConnected(self, *args):
|
||||
self.notifications_queue.put_nowait(ParallelNotification('notifyConnected', args))
|
||||
|
||||
|
||||
def notifyTestRunFinished(self, *args):
|
||||
self.notifications_queue.put_nowait(ParallelNotification('notifyTestRunFinished', args))
|
||||
|
||||
|
||||
def notifyStartTest(self, *args):
|
||||
self.notifications_queue.put_nowait(ParallelNotification('notifyStartTest', args))
|
||||
|
||||
|
||||
def notifyTest(self, *args):
|
||||
new_args = []
|
||||
for arg in args:
|
||||
new_args.append(_encode_if_needed(arg))
|
||||
args = tuple(new_args)
|
||||
self.notifications_queue.put_nowait(ParallelNotification('notifyTest', args))
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# ServerComm
|
||||
#=======================================================================================================================
|
||||
class ServerComm(threading.Thread):
|
||||
|
||||
|
||||
|
||||
def __init__(self, notifications_queue, port, daemon=False):
|
||||
threading.Thread.__init__(self)
|
||||
self.setDaemon(daemon) # If False, wait for all the notifications to be passed before exiting!
|
||||
self.finished = False
|
||||
self.notifications_queue = notifications_queue
|
||||
|
||||
from _pydev_bundle import pydev_localhost
|
||||
|
||||
# It is necessary to specify an encoding, that matches
|
||||
# the encoding of all bytes-strings passed into an
|
||||
# XMLRPC call: "All 8-bit strings in the data structure are assumed to use the
|
||||
# packet encoding. Unicode strings are automatically converted,
|
||||
# where necessary."
|
||||
# Byte strings most likely come from file names.
|
||||
encoding = file_system_encoding
|
||||
if encoding == "mbcs":
|
||||
# Windos symbolic name for the system encoding CP_ACP.
|
||||
# We need to convert it into a encoding that is recognized by Java.
|
||||
# Unfortunately this is not always possible. You could use
|
||||
# GetCPInfoEx and get a name similar to "windows-1251". Then
|
||||
# you need a table to translate on a best effort basis. Much to complicated.
|
||||
# ISO-8859-1 is good enough.
|
||||
encoding = "ISO-8859-1"
|
||||
|
||||
self.server = xmlrpclib.Server('http://%s:%s' % (pydev_localhost.get_localhost(), port),
|
||||
encoding=encoding)
|
||||
|
||||
|
||||
def run(self):
|
||||
while True:
|
||||
kill_found = False
|
||||
commands = []
|
||||
command = self.notifications_queue.get(block=True)
|
||||
if isinstance(command, KillServer):
|
||||
kill_found = True
|
||||
else:
|
||||
assert isinstance(command, ParallelNotification)
|
||||
commands.append(command.to_tuple())
|
||||
|
||||
try:
|
||||
while True:
|
||||
command = self.notifications_queue.get(block=False) #No block to create a batch.
|
||||
if isinstance(command, KillServer):
|
||||
kill_found = True
|
||||
else:
|
||||
assert isinstance(command, ParallelNotification)
|
||||
commands.append(command.to_tuple())
|
||||
except:
|
||||
pass #That's OK, we're getting it until it becomes empty so that we notify multiple at once.
|
||||
|
||||
|
||||
if commands:
|
||||
try:
|
||||
self.server.notifyCommands(commands)
|
||||
except:
|
||||
traceback.print_exc()
|
||||
|
||||
if kill_found:
|
||||
self.finished = True
|
||||
return
|
||||
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# initialize_server
|
||||
#=======================================================================================================================
|
||||
def initialize_server(port, daemon=False):
|
||||
if _ServerHolder.SERVER is None:
|
||||
if port is not None:
|
||||
notifications_queue = Queue()
|
||||
_ServerHolder.SERVER = ServerFacade(notifications_queue)
|
||||
_ServerHolder.SERVER_COMM = ServerComm(notifications_queue, port, daemon)
|
||||
_ServerHolder.SERVER_COMM.start()
|
||||
else:
|
||||
#Create a null server, so that we keep the interface even without any connection.
|
||||
_ServerHolder.SERVER = Null()
|
||||
_ServerHolder.SERVER_COMM = Null()
|
||||
|
||||
try:
|
||||
_ServerHolder.SERVER.notifyConnected()
|
||||
except:
|
||||
traceback.print_exc()
|
||||
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# notifyTest
|
||||
#=======================================================================================================================
|
||||
def notifyTestsCollected(tests_count):
|
||||
assert tests_count is not None
|
||||
try:
|
||||
_ServerHolder.SERVER.notifyTestsCollected(tests_count)
|
||||
except:
|
||||
traceback.print_exc()
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# notifyStartTest
|
||||
#=======================================================================================================================
|
||||
def notifyStartTest(file, test):
|
||||
'''
|
||||
@param file: the tests file (c:/temp/test.py)
|
||||
@param test: the test ran (i.e.: TestCase.test1)
|
||||
'''
|
||||
assert file is not None
|
||||
if test is None:
|
||||
test = '' #Could happen if we have an import error importing module.
|
||||
|
||||
try:
|
||||
_ServerHolder.SERVER.notifyStartTest(file, test)
|
||||
except:
|
||||
traceback.print_exc()
|
||||
|
||||
|
||||
def _encode_if_needed(obj):
|
||||
# In the java side we expect strings to be ISO-8859-1 (org.python.pydev.debug.pyunit.PyUnitServer.initializeDispatches().new Dispatch() {...}.getAsStr(Object))
|
||||
if not IS_PY3K:
|
||||
if isinstance(obj, str):
|
||||
try:
|
||||
return xmlrpclib.Binary(obj.decode(sys.stdin.encoding).encode('ISO-8859-1', 'xmlcharrefreplace'))
|
||||
except:
|
||||
return xmlrpclib.Binary(obj)
|
||||
|
||||
elif isinstance(obj, unicode):
|
||||
return xmlrpclib.Binary(obj.encode('ISO-8859-1', 'xmlcharrefreplace'))
|
||||
|
||||
else:
|
||||
if isinstance(obj, str): # Unicode in py3
|
||||
return xmlrpclib.Binary(obj.encode('ISO-8859-1', 'xmlcharrefreplace'))
|
||||
|
||||
elif isinstance(obj, bytes):
|
||||
try:
|
||||
return xmlrpclib.Binary(obj.decode(sys.stdin.encoding).encode('ISO-8859-1', 'xmlcharrefreplace'))
|
||||
except:
|
||||
return xmlrpclib.Binary(obj) #bytes already
|
||||
|
||||
return obj
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# notifyTest
|
||||
#=======================================================================================================================
|
||||
def notifyTest(cond, captured_output, error_contents, file, test, time):
|
||||
'''
|
||||
@param cond: ok, fail, error
|
||||
@param captured_output: output captured from stdout
|
||||
@param captured_output: output captured from stderr
|
||||
@param file: the tests file (c:/temp/test.py)
|
||||
@param test: the test ran (i.e.: TestCase.test1)
|
||||
@param time: float with the number of seconds elapsed
|
||||
'''
|
||||
assert cond is not None
|
||||
assert captured_output is not None
|
||||
assert error_contents is not None
|
||||
assert file is not None
|
||||
if test is None:
|
||||
test = '' #Could happen if we have an import error importing module.
|
||||
assert time is not None
|
||||
try:
|
||||
captured_output = _encode_if_needed(captured_output)
|
||||
error_contents = _encode_if_needed(error_contents)
|
||||
|
||||
_ServerHolder.SERVER.notifyTest(cond, captured_output, error_contents, file, test, time)
|
||||
except:
|
||||
traceback.print_exc()
|
||||
|
||||
#=======================================================================================================================
|
||||
# notifyTestRunFinished
|
||||
#=======================================================================================================================
|
||||
def notifyTestRunFinished(total_time):
|
||||
assert total_time is not None
|
||||
try:
|
||||
_ServerHolder.SERVER.notifyTestRunFinished(total_time)
|
||||
except:
|
||||
traceback.print_exc()
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# force_server_kill
|
||||
#=======================================================================================================================
|
||||
def force_server_kill():
|
||||
_ServerHolder.SERVER_COMM.notifications_queue.put_nowait(KillServer())
|
||||
0
ptvsd/pydevd/_pydevd_bundle/__init__.py
Normal file
0
ptvsd/pydevd/_pydevd_bundle/__init__.py
Normal file
513
ptvsd/pydevd/_pydevd_bundle/pydevconsole_code_for_ironpython.py
Normal file
513
ptvsd/pydevd/_pydevd_bundle/pydevconsole_code_for_ironpython.py
Normal file
|
|
@ -0,0 +1,513 @@
|
|||
"""Utilities needed to emulate Python's interactive interpreter.
|
||||
|
||||
"""
|
||||
|
||||
# Inspired by similar code by Jeff Epler and Fredrik Lundh.
|
||||
|
||||
|
||||
import sys
|
||||
import traceback
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
#START --------------------------- from codeop import CommandCompiler, compile_command
|
||||
#START --------------------------- from codeop import CommandCompiler, compile_command
|
||||
#START --------------------------- from codeop import CommandCompiler, compile_command
|
||||
#START --------------------------- from codeop import CommandCompiler, compile_command
|
||||
#START --------------------------- from codeop import CommandCompiler, compile_command
|
||||
r"""Utilities to compile possibly incomplete Python source code.
|
||||
|
||||
This module provides two interfaces, broadly similar to the builtin
|
||||
function compile(), which take program text, a filename and a 'mode'
|
||||
and:
|
||||
|
||||
- Return code object if the command is complete and valid
|
||||
- Return None if the command is incomplete
|
||||
- Raise SyntaxError, ValueError or OverflowError if the command is a
|
||||
syntax error (OverflowError and ValueError can be produced by
|
||||
malformed literals).
|
||||
|
||||
Approach:
|
||||
|
||||
First, check if the source consists entirely of blank lines and
|
||||
comments; if so, replace it with 'pass', because the built-in
|
||||
parser doesn't always do the right thing for these.
|
||||
|
||||
Compile three times: as is, with \n, and with \n\n appended. If it
|
||||
compiles as is, it's complete. If it compiles with one \n appended,
|
||||
we expect more. If it doesn't compile either way, we compare the
|
||||
error we get when compiling with \n or \n\n appended. If the errors
|
||||
are the same, the code is broken. But if the errors are different, we
|
||||
expect more. Not intuitive; not even guaranteed to hold in future
|
||||
releases; but this matches the compiler's behavior from Python 1.4
|
||||
through 2.2, at least.
|
||||
|
||||
Caveat:
|
||||
|
||||
It is possible (but not likely) that the parser stops parsing with a
|
||||
successful outcome before reaching the end of the source; in this
|
||||
case, trailing symbols may be ignored instead of causing an error.
|
||||
For example, a backslash followed by two newlines may be followed by
|
||||
arbitrary garbage. This will be fixed once the API for the parser is
|
||||
better.
|
||||
|
||||
The two interfaces are:
|
||||
|
||||
compile_command(source, filename, symbol):
|
||||
|
||||
Compiles a single command in the manner described above.
|
||||
|
||||
CommandCompiler():
|
||||
|
||||
Instances of this class have __call__ methods identical in
|
||||
signature to compile_command; the difference is that if the
|
||||
instance compiles program text containing a __future__ statement,
|
||||
the instance 'remembers' and compiles all subsequent program texts
|
||||
with the statement in force.
|
||||
|
||||
The module also provides another class:
|
||||
|
||||
Compile():
|
||||
|
||||
Instances of this class act like the built-in function compile,
|
||||
but with 'memory' in the sense described above.
|
||||
"""
|
||||
|
||||
import __future__
|
||||
|
||||
_features = [getattr(__future__, fname)
|
||||
for fname in __future__.all_feature_names]
|
||||
|
||||
__all__ = ["compile_command", "Compile", "CommandCompiler"]
|
||||
|
||||
PyCF_DONT_IMPLY_DEDENT = 0x200 # Matches pythonrun.h
|
||||
|
||||
def _maybe_compile(compiler, source, filename, symbol):
|
||||
# Check for source consisting of only blank lines and comments
|
||||
for line in source.split("\n"):
|
||||
line = line.strip()
|
||||
if line and line[0] != '#':
|
||||
break # Leave it alone
|
||||
else:
|
||||
if symbol != "eval":
|
||||
source = "pass" # Replace it with a 'pass' statement
|
||||
|
||||
err = err1 = err2 = None
|
||||
code = code1 = code2 = None
|
||||
|
||||
try:
|
||||
code = compiler(source, filename, symbol)
|
||||
except SyntaxError, err:
|
||||
pass
|
||||
|
||||
try:
|
||||
code1 = compiler(source + "\n", filename, symbol)
|
||||
except SyntaxError, err1:
|
||||
pass
|
||||
|
||||
try:
|
||||
code2 = compiler(source + "\n\n", filename, symbol)
|
||||
except SyntaxError, err2:
|
||||
pass
|
||||
|
||||
if code:
|
||||
return code
|
||||
if not code1 and repr(err1) == repr(err2):
|
||||
raise SyntaxError, err1
|
||||
|
||||
def _compile(source, filename, symbol):
|
||||
return compile(source, filename, symbol, PyCF_DONT_IMPLY_DEDENT)
|
||||
|
||||
def compile_command(source, filename="<input>", symbol="single"):
|
||||
r"""Compile a command and determine whether it is incomplete.
|
||||
|
||||
Arguments:
|
||||
|
||||
source -- the source string; may contain \n characters
|
||||
filename -- optional filename from which source was read; default
|
||||
"<input>"
|
||||
symbol -- optional grammar start symbol; "single" (default) or "eval"
|
||||
|
||||
Return value / exceptions raised:
|
||||
|
||||
- Return a code object if the command is complete and valid
|
||||
- Return None if the command is incomplete
|
||||
- Raise SyntaxError, ValueError or OverflowError if the command is a
|
||||
syntax error (OverflowError and ValueError can be produced by
|
||||
malformed literals).
|
||||
"""
|
||||
return _maybe_compile(_compile, source, filename, symbol)
|
||||
|
||||
class Compile:
|
||||
"""Instances of this class behave much like the built-in compile
|
||||
function, but if one is used to compile text containing a future
|
||||
statement, it "remembers" and compiles all subsequent program texts
|
||||
with the statement in force."""
|
||||
def __init__(self):
|
||||
self.flags = PyCF_DONT_IMPLY_DEDENT
|
||||
|
||||
def __call__(self, source, filename, symbol):
|
||||
codeob = compile(source, filename, symbol, self.flags, 1)
|
||||
for feature in _features:
|
||||
if codeob.co_flags & feature.compiler_flag:
|
||||
self.flags |= feature.compiler_flag
|
||||
return codeob
|
||||
|
||||
class CommandCompiler:
|
||||
"""Instances of this class have __call__ methods identical in
|
||||
signature to compile_command; the difference is that if the
|
||||
instance compiles program text containing a __future__ statement,
|
||||
the instance 'remembers' and compiles all subsequent program texts
|
||||
with the statement in force."""
|
||||
|
||||
def __init__(self,):
|
||||
self.compiler = Compile()
|
||||
|
||||
def __call__(self, source, filename="<input>", symbol="single"):
|
||||
r"""Compile a command and determine whether it is incomplete.
|
||||
|
||||
Arguments:
|
||||
|
||||
source -- the source string; may contain \n characters
|
||||
filename -- optional filename from which source was read;
|
||||
default "<input>"
|
||||
symbol -- optional grammar start symbol; "single" (default) or
|
||||
"eval"
|
||||
|
||||
Return value / exceptions raised:
|
||||
|
||||
- Return a code object if the command is complete and valid
|
||||
- Return None if the command is incomplete
|
||||
- Raise SyntaxError, ValueError or OverflowError if the command is a
|
||||
syntax error (OverflowError and ValueError can be produced by
|
||||
malformed literals).
|
||||
"""
|
||||
return _maybe_compile(self.compiler, source, filename, symbol)
|
||||
|
||||
#END --------------------------- from codeop import CommandCompiler, compile_command
|
||||
#END --------------------------- from codeop import CommandCompiler, compile_command
|
||||
#END --------------------------- from codeop import CommandCompiler, compile_command
|
||||
#END --------------------------- from codeop import CommandCompiler, compile_command
|
||||
#END --------------------------- from codeop import CommandCompiler, compile_command
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
__all__ = ["InteractiveInterpreter", "InteractiveConsole", "interact",
|
||||
"compile_command"]
|
||||
|
||||
def softspace(file, newvalue):
|
||||
oldvalue = 0
|
||||
try:
|
||||
oldvalue = file.softspace
|
||||
except AttributeError:
|
||||
pass
|
||||
try:
|
||||
file.softspace = newvalue
|
||||
except (AttributeError, TypeError):
|
||||
# "attribute-less object" or "read-only attributes"
|
||||
pass
|
||||
return oldvalue
|
||||
|
||||
class InteractiveInterpreter:
|
||||
"""Base class for InteractiveConsole.
|
||||
|
||||
This class deals with parsing and interpreter state (the user's
|
||||
namespace); it doesn't deal with input buffering or prompting or
|
||||
input file naming (the filename is always passed in explicitly).
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, locals=None):
|
||||
"""Constructor.
|
||||
|
||||
The optional 'locals' argument specifies the dictionary in
|
||||
which code will be executed; it defaults to a newly created
|
||||
dictionary with key "__name__" set to "__console__" and key
|
||||
"__doc__" set to None.
|
||||
|
||||
"""
|
||||
if locals is None:
|
||||
locals = {"__name__": "__console__", "__doc__": None}
|
||||
self.locals = locals
|
||||
self.compile = CommandCompiler()
|
||||
|
||||
def runsource(self, source, filename="<input>", symbol="single"):
|
||||
"""Compile and run some source in the interpreter.
|
||||
|
||||
Arguments are as for compile_command().
|
||||
|
||||
One several things can happen:
|
||||
|
||||
1) The input is incorrect; compile_command() raised an
|
||||
exception (SyntaxError or OverflowError). A syntax traceback
|
||||
will be printed by calling the showsyntaxerror() method.
|
||||
|
||||
2) The input is incomplete, and more input is required;
|
||||
compile_command() returned None. Nothing happens.
|
||||
|
||||
3) The input is complete; compile_command() returned a code
|
||||
object. The code is executed by calling self.runcode() (which
|
||||
also handles run-time exceptions, except for SystemExit).
|
||||
|
||||
The return value is True in case 2, False in the other cases (unless
|
||||
an exception is raised). The return value can be used to
|
||||
decide whether to use sys.ps1 or sys.ps2 to prompt the next
|
||||
line.
|
||||
|
||||
"""
|
||||
try:
|
||||
code = self.compile(source, filename, symbol)
|
||||
except (OverflowError, SyntaxError, ValueError):
|
||||
# Case 1
|
||||
self.showsyntaxerror(filename)
|
||||
return False
|
||||
|
||||
if code is None:
|
||||
# Case 2
|
||||
return True
|
||||
|
||||
# Case 3
|
||||
self.runcode(code)
|
||||
return False
|
||||
|
||||
def runcode(self, code):
|
||||
"""Execute a code object.
|
||||
|
||||
When an exception occurs, self.showtraceback() is called to
|
||||
display a traceback. All exceptions are caught except
|
||||
SystemExit, which is reraised.
|
||||
|
||||
A note about KeyboardInterrupt: this exception may occur
|
||||
elsewhere in this code, and may not always be caught. The
|
||||
caller should be prepared to deal with it.
|
||||
|
||||
"""
|
||||
try:
|
||||
exec code in self.locals
|
||||
except SystemExit:
|
||||
raise
|
||||
except:
|
||||
self.showtraceback()
|
||||
else:
|
||||
if softspace(sys.stdout, 0):
|
||||
sys.stdout.write('\n')
|
||||
|
||||
def showsyntaxerror(self, filename=None):
|
||||
"""Display the syntax error that just occurred.
|
||||
|
||||
This doesn't display a stack trace because there isn't one.
|
||||
|
||||
If a filename is given, it is stuffed in the exception instead
|
||||
of what was there before (because Python's parser always uses
|
||||
"<string>" when reading from a string).
|
||||
|
||||
The output is written by self.write(), below.
|
||||
|
||||
"""
|
||||
type, value, sys.last_traceback = sys.exc_info()
|
||||
sys.last_type = type
|
||||
sys.last_value = value
|
||||
if filename and type is SyntaxError:
|
||||
# Work hard to stuff the correct filename in the exception
|
||||
try:
|
||||
msg, (dummy_filename, lineno, offset, line) = value
|
||||
except:
|
||||
# Not the format we expect; leave it alone
|
||||
pass
|
||||
else:
|
||||
# Stuff in the right filename
|
||||
value = SyntaxError(msg, (filename, lineno, offset, line))
|
||||
sys.last_value = value
|
||||
list = traceback.format_exception_only(type, value)
|
||||
map(self.write, list)
|
||||
|
||||
def showtraceback(self):
|
||||
"""Display the exception that just occurred.
|
||||
|
||||
We remove the first stack item because it is our own code.
|
||||
|
||||
The output is written by self.write(), below.
|
||||
|
||||
"""
|
||||
try:
|
||||
type, value, tb = sys.exc_info()
|
||||
sys.last_type = type
|
||||
sys.last_value = value
|
||||
sys.last_traceback = tb
|
||||
tblist = traceback.extract_tb(tb)
|
||||
del tblist[:1]
|
||||
list = traceback.format_list(tblist)
|
||||
if list:
|
||||
list.insert(0, "Traceback (most recent call last):\n")
|
||||
list[len(list):] = traceback.format_exception_only(type, value)
|
||||
finally:
|
||||
tblist = tb = None
|
||||
map(self.write, list)
|
||||
|
||||
def write(self, data):
|
||||
"""Write a string.
|
||||
|
||||
The base implementation writes to sys.stderr; a subclass may
|
||||
replace this with a different implementation.
|
||||
|
||||
"""
|
||||
sys.stderr.write(data)
|
||||
|
||||
|
||||
class InteractiveConsole(InteractiveInterpreter):
|
||||
"""Closely emulate the behavior of the interactive Python interpreter.
|
||||
|
||||
This class builds on InteractiveInterpreter and adds prompting
|
||||
using the familiar sys.ps1 and sys.ps2, and input buffering.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, locals=None, filename="<console>"):
|
||||
"""Constructor.
|
||||
|
||||
The optional locals argument will be passed to the
|
||||
InteractiveInterpreter base class.
|
||||
|
||||
The optional filename argument should specify the (file)name
|
||||
of the input stream; it will show up in tracebacks.
|
||||
|
||||
"""
|
||||
InteractiveInterpreter.__init__(self, locals)
|
||||
self.filename = filename
|
||||
self.resetbuffer()
|
||||
|
||||
def resetbuffer(self):
|
||||
"""Reset the input buffer."""
|
||||
self.buffer = []
|
||||
|
||||
def interact(self, banner=None):
|
||||
"""Closely emulate the interactive Python console.
|
||||
|
||||
The optional banner argument specify the banner to print
|
||||
before the first interaction; by default it prints a banner
|
||||
similar to the one printed by the real Python interpreter,
|
||||
followed by the current class name in parentheses (so as not
|
||||
to confuse this with the real interpreter -- since it's so
|
||||
close!).
|
||||
|
||||
"""
|
||||
try:
|
||||
sys.ps1 #@UndefinedVariable
|
||||
except AttributeError:
|
||||
sys.ps1 = ">>> "
|
||||
try:
|
||||
sys.ps2 #@UndefinedVariable
|
||||
except AttributeError:
|
||||
sys.ps2 = "... "
|
||||
cprt = 'Type "help", "copyright", "credits" or "license" for more information.'
|
||||
if banner is None:
|
||||
self.write("Python %s on %s\n%s\n(%s)\n" %
|
||||
(sys.version, sys.platform, cprt,
|
||||
self.__class__.__name__))
|
||||
else:
|
||||
self.write("%s\n" % str(banner))
|
||||
more = 0
|
||||
while 1:
|
||||
try:
|
||||
if more:
|
||||
prompt = sys.ps2 #@UndefinedVariable
|
||||
else:
|
||||
prompt = sys.ps1 #@UndefinedVariable
|
||||
try:
|
||||
line = self.raw_input(prompt)
|
||||
# Can be None if sys.stdin was redefined
|
||||
encoding = getattr(sys.stdin, "encoding", None)
|
||||
if encoding and not isinstance(line, unicode):
|
||||
line = line.decode(encoding)
|
||||
except EOFError:
|
||||
self.write("\n")
|
||||
break
|
||||
else:
|
||||
more = self.push(line)
|
||||
except KeyboardInterrupt:
|
||||
self.write("\nKeyboardInterrupt\n")
|
||||
self.resetbuffer()
|
||||
more = 0
|
||||
|
||||
def push(self, line):
|
||||
"""Push a line to the interpreter.
|
||||
|
||||
The line should not have a trailing newline; it may have
|
||||
internal newlines. The line is appended to a buffer and the
|
||||
interpreter's runsource() method is called with the
|
||||
concatenated contents of the buffer as source. If this
|
||||
indicates that the command was executed or invalid, the buffer
|
||||
is reset; otherwise, the command is incomplete, and the buffer
|
||||
is left as it was after the line was appended. The return
|
||||
value is 1 if more input is required, 0 if the line was dealt
|
||||
with in some way (this is the same as runsource()).
|
||||
|
||||
"""
|
||||
self.buffer.append(line)
|
||||
source = "\n".join(self.buffer)
|
||||
more = self.runsource(source, self.filename)
|
||||
if not more:
|
||||
self.resetbuffer()
|
||||
return more
|
||||
|
||||
def raw_input(self, prompt=""):
|
||||
"""Write a prompt and read a line.
|
||||
|
||||
The returned line does not include the trailing newline.
|
||||
When the user enters the EOF key sequence, EOFError is raised.
|
||||
|
||||
The base implementation uses the built-in function
|
||||
raw_input(); a subclass may replace this with a different
|
||||
implementation.
|
||||
|
||||
"""
|
||||
return raw_input(prompt)
|
||||
|
||||
|
||||
def interact(banner=None, readfunc=None, local=None):
|
||||
"""Closely emulate the interactive Python interpreter.
|
||||
|
||||
This is a backwards compatible interface to the InteractiveConsole
|
||||
class. When readfunc is not specified, it attempts to import the
|
||||
readline module to enable GNU readline if it is available.
|
||||
|
||||
Arguments (all optional, all default to None):
|
||||
|
||||
banner -- passed to InteractiveConsole.interact()
|
||||
readfunc -- if not None, replaces InteractiveConsole.raw_input()
|
||||
local -- passed to InteractiveInterpreter.__init__()
|
||||
|
||||
"""
|
||||
console = InteractiveConsole(local)
|
||||
if readfunc is not None:
|
||||
console.raw_input = readfunc
|
||||
else:
|
||||
try:
|
||||
import readline
|
||||
except ImportError:
|
||||
pass
|
||||
console.interact(banner)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
import pdb
|
||||
pdb.run("interact()\n")
|
||||
23
ptvsd/pydevd/_pydevd_bundle/pydevd_additional_thread_info.py
Normal file
23
ptvsd/pydevd/_pydevd_bundle/pydevd_additional_thread_info.py
Normal file
|
|
@ -0,0 +1,23 @@
|
|||
# Defines which version of the PyDBAdditionalThreadInfo we'll use.
|
||||
|
||||
import os
|
||||
use_cython = os.getenv('PYDEVD_USE_CYTHON', None)
|
||||
|
||||
if use_cython == 'YES':
|
||||
# We must import the cython version if forcing cython
|
||||
from _pydevd_bundle.pydevd_cython_wrapper import PyDBAdditionalThreadInfo # @UnusedImport
|
||||
|
||||
elif use_cython == 'NO':
|
||||
# Use the regular version if not forcing cython
|
||||
from _pydevd_bundle.pydevd_additional_thread_info_regular import PyDBAdditionalThreadInfo # @UnusedImport @Reimport
|
||||
|
||||
elif use_cython is None:
|
||||
# Regular: use fallback if not found (message is already given elsewhere).
|
||||
try:
|
||||
from _pydevd_bundle.pydevd_cython_wrapper import PyDBAdditionalThreadInfo
|
||||
except ImportError:
|
||||
from _pydevd_bundle.pydevd_additional_thread_info_regular import PyDBAdditionalThreadInfo # @UnusedImport
|
||||
else:
|
||||
raise RuntimeError('Unexpected value for PYDEVD_USE_CYTHON: %s (accepted: YES, NO)' % (use_cython,))
|
||||
|
||||
|
||||
|
|
@ -0,0 +1,124 @@
|
|||
import sys
|
||||
from _pydevd_bundle.pydevd_constants import STATE_RUN, PYTHON_SUSPEND, IS_JYTHON, IS_IRONPYTHON
|
||||
# IFDEF CYTHON
|
||||
# ELSE
|
||||
from _pydevd_bundle.pydevd_frame import PyDBFrame
|
||||
# ENDIF
|
||||
|
||||
version = 4
|
||||
|
||||
if not hasattr(sys, '_current_frames'):
|
||||
|
||||
# Some versions of Jython don't have it (but we can provide a replacement)
|
||||
if IS_JYTHON:
|
||||
from java.lang import NoSuchFieldException
|
||||
from org.python.core import ThreadStateMapping
|
||||
try:
|
||||
cachedThreadState = ThreadStateMapping.getDeclaredField('globalThreadStates') # Dev version
|
||||
except NoSuchFieldException:
|
||||
cachedThreadState = ThreadStateMapping.getDeclaredField('cachedThreadState') # Release Jython 2.7.0
|
||||
cachedThreadState.accessible = True
|
||||
thread_states = cachedThreadState.get(ThreadStateMapping)
|
||||
|
||||
def _current_frames():
|
||||
as_array = thread_states.entrySet().toArray()
|
||||
ret = {}
|
||||
for thread_to_state in as_array:
|
||||
thread = thread_to_state.getKey()
|
||||
if thread is None:
|
||||
continue
|
||||
thread_state = thread_to_state.getValue()
|
||||
if thread_state is None:
|
||||
continue
|
||||
|
||||
frame = thread_state.frame
|
||||
if frame is None:
|
||||
continue
|
||||
|
||||
ret[thread.getId()] = frame
|
||||
return ret
|
||||
|
||||
elif IS_IRONPYTHON:
|
||||
_tid_to_last_frame = {}
|
||||
|
||||
# IronPython doesn't have it. Let's use our workaround...
|
||||
def _current_frames():
|
||||
return _tid_to_last_frame
|
||||
|
||||
else:
|
||||
raise RuntimeError('Unable to proceed (sys._current_frames not available in this Python implementation).')
|
||||
else:
|
||||
_current_frames = sys._current_frames
|
||||
|
||||
#=======================================================================================================================
|
||||
# PyDBAdditionalThreadInfo
|
||||
#=======================================================================================================================
|
||||
# IFDEF CYTHON
|
||||
# cdef class PyDBAdditionalThreadInfo:
|
||||
# ELSE
|
||||
class PyDBAdditionalThreadInfo(object):
|
||||
# ENDIF
|
||||
|
||||
# IFDEF CYTHON
|
||||
# cdef public int pydev_state;
|
||||
# cdef public object pydev_step_stop; # Actually, it's a frame or None
|
||||
# cdef public int pydev_step_cmd;
|
||||
# cdef public bint pydev_notify_kill;
|
||||
# cdef public object pydev_smart_step_stop; # Actually, it's a frame or None
|
||||
# cdef public bint pydev_django_resolve_frame;
|
||||
# cdef public object pydev_call_from_jinja2;
|
||||
# cdef public object pydev_call_inside_jinja2;
|
||||
# cdef public bint is_tracing;
|
||||
# cdef public tuple conditional_breakpoint_exception;
|
||||
# cdef public str pydev_message;
|
||||
# cdef public int suspend_type;
|
||||
# cdef public int pydev_next_line;
|
||||
# cdef public str pydev_func_name;
|
||||
# ELSE
|
||||
__slots__ = [
|
||||
'pydev_state',
|
||||
'pydev_step_stop',
|
||||
'pydev_step_cmd',
|
||||
'pydev_notify_kill',
|
||||
'pydev_smart_step_stop',
|
||||
'pydev_django_resolve_frame',
|
||||
'pydev_call_from_jinja2',
|
||||
'pydev_call_inside_jinja2',
|
||||
'is_tracing',
|
||||
'conditional_breakpoint_exception',
|
||||
'pydev_message',
|
||||
'suspend_type',
|
||||
'pydev_next_line',
|
||||
'pydev_func_name',
|
||||
]
|
||||
# ENDIF
|
||||
|
||||
def __init__(self):
|
||||
self.pydev_state = STATE_RUN
|
||||
self.pydev_step_stop = None
|
||||
self.pydev_step_cmd = -1 # Something as CMD_STEP_INTO, CMD_STEP_OVER, etc.
|
||||
self.pydev_notify_kill = False
|
||||
self.pydev_smart_step_stop = None
|
||||
self.pydev_django_resolve_frame = False
|
||||
self.pydev_call_from_jinja2 = None
|
||||
self.pydev_call_inside_jinja2 = None
|
||||
self.is_tracing = False
|
||||
self.conditional_breakpoint_exception = None
|
||||
self.pydev_message = ''
|
||||
self.suspend_type = PYTHON_SUSPEND
|
||||
self.pydev_next_line = -1
|
||||
self.pydev_func_name = '.invalid.' # Must match the type in cython
|
||||
|
||||
|
||||
def iter_frames(self, t):
|
||||
#sys._current_frames(): dictionary with thread id -> topmost frame
|
||||
current_frames = _current_frames()
|
||||
v = current_frames.get(t.ident)
|
||||
if v is not None:
|
||||
return [v]
|
||||
return []
|
||||
|
||||
def __str__(self):
|
||||
return 'State:%s Stop:%s Cmd: %s Kill:%s' % (
|
||||
self.pydev_state, self.pydev_step_stop, self.pydev_step_cmd, self.pydev_notify_kill)
|
||||
|
||||
185
ptvsd/pydevd/_pydevd_bundle/pydevd_breakpoints.py
Normal file
185
ptvsd/pydevd/_pydevd_bundle/pydevd_breakpoints.py
Normal file
|
|
@ -0,0 +1,185 @@
|
|||
from _pydevd_bundle.pydevd_constants import dict_iter_values, IS_PY24
|
||||
import pydevd_tracing
|
||||
import sys
|
||||
from _pydev_bundle import pydev_log
|
||||
from _pydevd_bundle import pydevd_import_class
|
||||
|
||||
_original_excepthook = None
|
||||
_handle_exceptions = None
|
||||
|
||||
|
||||
from _pydev_imps._pydev_saved_modules import threading
|
||||
|
||||
threadingCurrentThread = threading.currentThread
|
||||
|
||||
from _pydevd_bundle.pydevd_comm import get_global_debugger
|
||||
|
||||
class ExceptionBreakpoint:
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
qname,
|
||||
notify_always,
|
||||
notify_on_terminate,
|
||||
notify_on_first_raise_only,
|
||||
ignore_libraries
|
||||
):
|
||||
exctype = _get_class(qname)
|
||||
self.qname = qname
|
||||
if exctype is not None:
|
||||
self.name = exctype.__name__
|
||||
else:
|
||||
self.name = None
|
||||
|
||||
self.notify_on_terminate = notify_on_terminate
|
||||
self.notify_always = notify_always
|
||||
self.notify_on_first_raise_only = notify_on_first_raise_only
|
||||
self.ignore_libraries = ignore_libraries
|
||||
|
||||
self.type = exctype
|
||||
|
||||
|
||||
def __str__(self):
|
||||
return self.qname
|
||||
|
||||
|
||||
class LineBreakpoint(object):
|
||||
def __init__(self, line, condition, func_name, expression, suspend_policy="NONE"):
|
||||
self.line = line
|
||||
self.condition = condition
|
||||
self.func_name = func_name
|
||||
self.expression = expression
|
||||
self.suspend_policy = suspend_policy
|
||||
# need for frame evaluation: list of code objects, which bytecode was modified by this breakpoint
|
||||
self.code_objects = set()
|
||||
|
||||
def get_exception_full_qname(exctype):
|
||||
if not exctype:
|
||||
return None
|
||||
return str(exctype.__module__) + '.' + exctype.__name__
|
||||
|
||||
def get_exception_name(exctype):
|
||||
if not exctype:
|
||||
return None
|
||||
return exctype.__name__
|
||||
|
||||
|
||||
def get_exception_breakpoint(exctype, exceptions):
|
||||
exception_full_qname = get_exception_full_qname(exctype)
|
||||
|
||||
exc = None
|
||||
if exceptions is not None:
|
||||
try:
|
||||
return exceptions[exception_full_qname]
|
||||
except KeyError:
|
||||
for exception_breakpoint in dict_iter_values(exceptions):
|
||||
if exception_breakpoint.type is not None and issubclass(exctype, exception_breakpoint.type):
|
||||
if exc is None or issubclass(exception_breakpoint.type, exc.type):
|
||||
exc = exception_breakpoint
|
||||
return exc
|
||||
|
||||
|
||||
def _set_additional_info_if_needed(thread):
|
||||
try:
|
||||
additional_info = thread.additional_info
|
||||
if additional_info is None:
|
||||
raise AttributeError()
|
||||
except:
|
||||
from _pydevd_bundle.pydevd_additional_thread_info import PyDBAdditionalThreadInfo
|
||||
thread.additional_info = PyDBAdditionalThreadInfo()
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# _excepthook
|
||||
#=======================================================================================================================
|
||||
def _excepthook(exctype, value, tb):
|
||||
global _handle_exceptions
|
||||
if _handle_exceptions:
|
||||
exception_breakpoint = get_exception_breakpoint(exctype, _handle_exceptions)
|
||||
else:
|
||||
exception_breakpoint = None
|
||||
|
||||
#Always call the original excepthook before going on to call the debugger post mortem to show it.
|
||||
_original_excepthook(exctype, value, tb)
|
||||
|
||||
if not exception_breakpoint:
|
||||
return
|
||||
|
||||
if tb is None: #sometimes it can be None, e.g. with GTK
|
||||
return
|
||||
|
||||
if exctype is KeyboardInterrupt:
|
||||
return
|
||||
|
||||
frames = []
|
||||
debugger = get_global_debugger()
|
||||
user_frame = None
|
||||
|
||||
while tb:
|
||||
frame = tb.tb_frame
|
||||
if exception_breakpoint.ignore_libraries and not debugger.not_in_scope(frame.f_code.co_filename):
|
||||
user_frame = tb.tb_frame
|
||||
frames.append(tb.tb_frame)
|
||||
tb = tb.tb_next
|
||||
|
||||
thread = threadingCurrentThread()
|
||||
frames_byid = dict([(id(frame),frame) for frame in frames])
|
||||
if exception_breakpoint.ignore_libraries and user_frame is not None:
|
||||
frame = user_frame
|
||||
else:
|
||||
frame = frames[-1]
|
||||
exception = (exctype, value, tb)
|
||||
_set_additional_info_if_needed(thread)
|
||||
try:
|
||||
thread.additional_info.pydev_message = exception_breakpoint.qname
|
||||
except:
|
||||
thread.additional_info.pydev_message = exception_breakpoint.qname.encode('utf-8')
|
||||
|
||||
pydevd_tracing.SetTrace(None) #no tracing from here
|
||||
|
||||
pydev_log.debug('Handling post-mortem stop on exception breakpoint %s' % exception_breakpoint.qname)
|
||||
|
||||
debugger.handle_post_mortem_stop(thread, frame, frames_byid, exception)
|
||||
|
||||
#=======================================================================================================================
|
||||
# _set_pm_excepthook
|
||||
#=======================================================================================================================
|
||||
def _set_pm_excepthook(handle_exceptions_dict=None):
|
||||
'''
|
||||
Should be called to register the excepthook to be used.
|
||||
|
||||
It's only useful for uncaught exceptions. I.e.: exceptions that go up to the excepthook.
|
||||
|
||||
@param handle_exceptions: dict(exception -> ExceptionBreakpoint)
|
||||
The exceptions that should be handled.
|
||||
'''
|
||||
global _handle_exceptions
|
||||
global _original_excepthook
|
||||
if sys.excepthook != _excepthook:
|
||||
#Only keep the original if it's not our own _excepthook (if called many times).
|
||||
_original_excepthook = sys.excepthook
|
||||
|
||||
_handle_exceptions = handle_exceptions_dict
|
||||
sys.excepthook = _excepthook
|
||||
|
||||
def _restore_pm_excepthook():
|
||||
global _original_excepthook
|
||||
if _original_excepthook:
|
||||
sys.excepthook = _original_excepthook
|
||||
_original_excepthook = None
|
||||
|
||||
|
||||
def update_exception_hook(dbg):
|
||||
if dbg.break_on_uncaught_exceptions:
|
||||
_set_pm_excepthook(dbg.break_on_uncaught_exceptions)
|
||||
else:
|
||||
_restore_pm_excepthook()
|
||||
|
||||
def _get_class( kls ):
|
||||
if IS_PY24 and "BaseException" == kls:
|
||||
kls = "Exception"
|
||||
|
||||
try:
|
||||
return eval(kls)
|
||||
except:
|
||||
return pydevd_import_class.import_name(kls)
|
||||
1441
ptvsd/pydevd/_pydevd_bundle/pydevd_comm.py
Normal file
1441
ptvsd/pydevd/_pydevd_bundle/pydevd_comm.py
Normal file
File diff suppressed because it is too large
Load diff
147
ptvsd/pydevd/_pydevd_bundle/pydevd_command_line_handling.py
Normal file
147
ptvsd/pydevd/_pydevd_bundle/pydevd_command_line_handling.py
Normal file
|
|
@ -0,0 +1,147 @@
|
|||
class ArgHandlerWithParam:
|
||||
'''
|
||||
Handler for some arguments which needs a value
|
||||
'''
|
||||
|
||||
def __init__(self, arg_name, convert_val=None, default_val=None):
|
||||
self.arg_name = arg_name
|
||||
self.arg_v_rep = '--%s' % (arg_name,)
|
||||
self.convert_val = convert_val
|
||||
self.default_val = default_val
|
||||
|
||||
def to_argv(self, lst, setup):
|
||||
v = setup.get(self.arg_name)
|
||||
if v is not None and v != self.default_val:
|
||||
lst.append(self.arg_v_rep)
|
||||
lst.append('%s' % (v,))
|
||||
|
||||
def handle_argv(self, argv, i, setup):
|
||||
assert argv[i] == self.arg_v_rep
|
||||
del argv[i]
|
||||
|
||||
val = argv[i]
|
||||
if self.convert_val:
|
||||
val = self.convert_val(val)
|
||||
|
||||
setup[self.arg_name] = val
|
||||
del argv[i]
|
||||
|
||||
class ArgHandlerBool:
|
||||
'''
|
||||
If a given flag is received, mark it as 'True' in setup.
|
||||
'''
|
||||
|
||||
def __init__(self, arg_name, default_val=False):
|
||||
self.arg_name = arg_name
|
||||
self.arg_v_rep = '--%s' % (arg_name,)
|
||||
self.default_val = default_val
|
||||
|
||||
def to_argv(self, lst, setup):
|
||||
v = setup.get(self.arg_name)
|
||||
if v:
|
||||
lst.append(self.arg_v_rep)
|
||||
|
||||
def handle_argv(self, argv, i, setup):
|
||||
assert argv[i] == self.arg_v_rep
|
||||
del argv[i]
|
||||
setup[self.arg_name] = True
|
||||
|
||||
|
||||
ACCEPTED_ARG_HANDLERS = [
|
||||
ArgHandlerWithParam('port', int, 0),
|
||||
ArgHandlerWithParam('vm_type'),
|
||||
ArgHandlerWithParam('client'),
|
||||
|
||||
ArgHandlerBool('server'),
|
||||
ArgHandlerBool('DEBUG_RECORD_SOCKET_READS'),
|
||||
ArgHandlerBool('multiproc'), # Used by PyCharm (reuses connection: ssh tunneling)
|
||||
ArgHandlerBool('multiprocess'), # Used by PyDev (creates new connection to ide)
|
||||
ArgHandlerBool('save-signatures'),
|
||||
ArgHandlerBool('save-threading'),
|
||||
ArgHandlerBool('save-asyncio'),
|
||||
ArgHandlerBool('print-in-debugger-startup'),
|
||||
ArgHandlerBool('cmd-line'),
|
||||
ArgHandlerBool('module'),
|
||||
]
|
||||
|
||||
ARGV_REP_TO_HANDLER = {}
|
||||
for handler in ACCEPTED_ARG_HANDLERS:
|
||||
ARGV_REP_TO_HANDLER[handler.arg_v_rep] = handler
|
||||
|
||||
def get_pydevd_file():
|
||||
import pydevd
|
||||
f = pydevd.__file__
|
||||
if f.endswith('.pyc'):
|
||||
f = f[:-1]
|
||||
elif f.endswith('$py.class'):
|
||||
f = f[:-len('$py.class')] + '.py'
|
||||
return f
|
||||
|
||||
def setup_to_argv(setup):
|
||||
'''
|
||||
:param dict setup:
|
||||
A dict previously gotten from process_command_line.
|
||||
|
||||
:note: does not handle --file nor --DEBUG.
|
||||
'''
|
||||
ret = [get_pydevd_file()]
|
||||
|
||||
for handler in ACCEPTED_ARG_HANDLERS:
|
||||
if handler.arg_name in setup:
|
||||
handler.to_argv(ret, setup)
|
||||
return ret
|
||||
|
||||
def process_command_line(argv):
|
||||
""" parses the arguments.
|
||||
removes our arguments from the command line """
|
||||
setup = {}
|
||||
for handler in ACCEPTED_ARG_HANDLERS:
|
||||
setup[handler.arg_name] = handler.default_val
|
||||
setup['file'] = ''
|
||||
setup['qt-support'] = ''
|
||||
|
||||
i = 0
|
||||
del argv[0]
|
||||
while i < len(argv):
|
||||
handler = ARGV_REP_TO_HANDLER.get(argv[i])
|
||||
if handler is not None:
|
||||
handler.handle_argv(argv, i, setup)
|
||||
|
||||
elif argv[i].startswith('--qt-support'):
|
||||
# The --qt-support is special because we want to keep backward compatibility:
|
||||
# Previously, just passing '--qt-support' meant that we should use the auto-discovery mode
|
||||
# whereas now, if --qt-support is passed, it should be passed as --qt-support=<mode>, where
|
||||
# mode can be one of 'auto', 'none', 'pyqt5', 'pyqt4', 'pyside'.
|
||||
if argv[i] == '--qt-support':
|
||||
setup['qt-support'] = 'auto'
|
||||
|
||||
elif argv[i].startswith('--qt-support='):
|
||||
qt_support = argv[i][len('--qt-support='):]
|
||||
valid_modes = ('none', 'auto', 'pyqt5', 'pyqt4', 'pyside')
|
||||
if qt_support not in valid_modes:
|
||||
raise ValueError("qt-support mode invalid: " + qt_support)
|
||||
if qt_support == 'none':
|
||||
# On none, actually set an empty string to evaluate to False.
|
||||
setup['qt-support'] = ''
|
||||
else:
|
||||
setup['qt-support'] = qt_support
|
||||
else:
|
||||
raise ValueError("Unexpected definition for qt-support flag: " + argv[i])
|
||||
|
||||
del argv[i]
|
||||
|
||||
|
||||
elif argv[i] == '--file':
|
||||
# --file is special because it's the last one (so, no handler for it).
|
||||
del argv[i]
|
||||
setup['file'] = argv[i]
|
||||
i = len(argv) # pop out, file is our last argument
|
||||
|
||||
elif argv[i] == '--DEBUG':
|
||||
from pydevd import set_debug
|
||||
del argv[i]
|
||||
set_debug(setup)
|
||||
|
||||
else:
|
||||
raise ValueError("Unexpected option: " + argv[i])
|
||||
return setup
|
||||
247
ptvsd/pydevd/_pydevd_bundle/pydevd_console.py
Normal file
247
ptvsd/pydevd/_pydevd_bundle/pydevd_console.py
Normal file
|
|
@ -0,0 +1,247 @@
|
|||
'''An helper file for the pydev debugger (REPL) console
|
||||
'''
|
||||
import sys
|
||||
import traceback
|
||||
from code import InteractiveConsole
|
||||
|
||||
from _pydev_bundle import _pydev_completer
|
||||
from _pydev_bundle.pydev_console_utils import BaseInterpreterInterface, BaseStdIn
|
||||
from _pydev_bundle.pydev_imports import Exec
|
||||
from _pydev_bundle.pydev_override import overrides
|
||||
from _pydevd_bundle import pydevd_save_locals
|
||||
from _pydevd_bundle.pydevd_io import IOBuf
|
||||
from pydevd_tracing import get_exception_traceback_str
|
||||
from _pydevd_bundle.pydevd_xml import make_valid_xml_value
|
||||
|
||||
CONSOLE_OUTPUT = "output"
|
||||
CONSOLE_ERROR = "error"
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# ConsoleMessage
|
||||
#=======================================================================================================================
|
||||
class ConsoleMessage:
|
||||
"""Console Messages
|
||||
"""
|
||||
def __init__(self):
|
||||
self.more = False
|
||||
# List of tuple [('error', 'error_message'), ('message_list', 'output_message')]
|
||||
self.console_messages = []
|
||||
|
||||
def add_console_message(self, message_type, message):
|
||||
"""add messages in the console_messages list
|
||||
"""
|
||||
for m in message.split("\n"):
|
||||
if m.strip():
|
||||
self.console_messages.append((message_type, m))
|
||||
|
||||
def update_more(self, more):
|
||||
"""more is set to true if further input is required from the user
|
||||
else more is set to false
|
||||
"""
|
||||
self.more = more
|
||||
|
||||
def to_xml(self):
|
||||
"""Create an XML for console message_list, error and more (true/false)
|
||||
<xml>
|
||||
<message_list>console message_list</message_list>
|
||||
<error>console error</error>
|
||||
<more>true/false</more>
|
||||
</xml>
|
||||
"""
|
||||
makeValid = make_valid_xml_value
|
||||
|
||||
xml = '<xml><more>%s</more>' % (self.more)
|
||||
|
||||
for message_type, message in self.console_messages:
|
||||
xml += '<%s message="%s"></%s>' % (message_type, makeValid(message), message_type)
|
||||
|
||||
xml += '</xml>'
|
||||
|
||||
return xml
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# DebugConsoleStdIn
|
||||
#=======================================================================================================================
|
||||
class DebugConsoleStdIn(BaseStdIn):
|
||||
|
||||
overrides(BaseStdIn.readline)
|
||||
def readline(self, *args, **kwargs):
|
||||
sys.stderr.write('Warning: Reading from stdin is still not supported in this console.\n')
|
||||
return '\n'
|
||||
|
||||
#=======================================================================================================================
|
||||
# DebugConsole
|
||||
#=======================================================================================================================
|
||||
class DebugConsole(InteractiveConsole, BaseInterpreterInterface):
|
||||
"""Wrapper around code.InteractiveConsole, in order to send
|
||||
errors and outputs to the debug console
|
||||
"""
|
||||
|
||||
overrides(BaseInterpreterInterface.create_std_in)
|
||||
def create_std_in(self, *args, **kwargs):
|
||||
try:
|
||||
if not self.__buffer_output:
|
||||
return sys.stdin
|
||||
except:
|
||||
pass
|
||||
|
||||
return DebugConsoleStdIn() #If buffered, raw_input is not supported in this console.
|
||||
|
||||
|
||||
overrides(InteractiveConsole.push)
|
||||
def push(self, line, frame, buffer_output=True):
|
||||
"""Change built-in stdout and stderr methods by the
|
||||
new custom StdMessage.
|
||||
execute the InteractiveConsole.push.
|
||||
Change the stdout and stderr back be the original built-ins
|
||||
|
||||
:param buffer_output: if False won't redirect the output.
|
||||
|
||||
Return boolean (True if more input is required else False),
|
||||
output_messages and input_messages
|
||||
"""
|
||||
self.__buffer_output = buffer_output
|
||||
more = False
|
||||
if buffer_output:
|
||||
original_stdout = sys.stdout
|
||||
original_stderr = sys.stderr
|
||||
try:
|
||||
try:
|
||||
self.frame = frame
|
||||
if buffer_output:
|
||||
out = sys.stdout = IOBuf()
|
||||
err = sys.stderr = IOBuf()
|
||||
more = self.add_exec(line)
|
||||
except Exception:
|
||||
exc = get_exception_traceback_str()
|
||||
if buffer_output:
|
||||
err.buflist.append("Internal Error: %s" % (exc,))
|
||||
else:
|
||||
sys.stderr.write("Internal Error: %s\n" % (exc,))
|
||||
finally:
|
||||
#Remove frame references.
|
||||
self.frame = None
|
||||
frame = None
|
||||
if buffer_output:
|
||||
sys.stdout = original_stdout
|
||||
sys.stderr = original_stderr
|
||||
|
||||
if buffer_output:
|
||||
return more, out.buflist, err.buflist
|
||||
else:
|
||||
return more, [], []
|
||||
|
||||
|
||||
overrides(BaseInterpreterInterface.do_add_exec)
|
||||
def do_add_exec(self, line):
|
||||
return InteractiveConsole.push(self, line)
|
||||
|
||||
|
||||
overrides(InteractiveConsole.runcode)
|
||||
def runcode(self, code):
|
||||
"""Execute a code object.
|
||||
|
||||
When an exception occurs, self.showtraceback() is called to
|
||||
display a traceback. All exceptions are caught except
|
||||
SystemExit, which is reraised.
|
||||
|
||||
A note about KeyboardInterrupt: this exception may occur
|
||||
elsewhere in this code, and may not always be caught. The
|
||||
caller should be prepared to deal with it.
|
||||
|
||||
"""
|
||||
try:
|
||||
Exec(code, self.frame.f_globals, self.frame.f_locals)
|
||||
pydevd_save_locals.save_locals(self.frame)
|
||||
except SystemExit:
|
||||
raise
|
||||
except:
|
||||
self.showtraceback()
|
||||
|
||||
def get_namespace(self):
|
||||
dbg_namespace = {}
|
||||
dbg_namespace.update(self.frame.f_globals)
|
||||
dbg_namespace.update(self.frame.f_locals) # locals later because it has precedence over the actual globals
|
||||
return dbg_namespace
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# InteractiveConsoleCache
|
||||
#=======================================================================================================================
|
||||
class InteractiveConsoleCache:
|
||||
|
||||
thread_id = None
|
||||
frame_id = None
|
||||
interactive_console_instance = None
|
||||
|
||||
|
||||
#Note: On Jython 2.1 we can't use classmethod or staticmethod, so, just make the functions below free-functions.
|
||||
def get_interactive_console(thread_id, frame_id, frame, console_message):
|
||||
"""returns the global interactive console.
|
||||
interactive console should have been initialized by this time
|
||||
:rtype: DebugConsole
|
||||
"""
|
||||
if InteractiveConsoleCache.thread_id == thread_id and InteractiveConsoleCache.frame_id == frame_id:
|
||||
return InteractiveConsoleCache.interactive_console_instance
|
||||
|
||||
InteractiveConsoleCache.interactive_console_instance = DebugConsole()
|
||||
InteractiveConsoleCache.thread_id = thread_id
|
||||
InteractiveConsoleCache.frame_id = frame_id
|
||||
|
||||
console_stacktrace = traceback.extract_stack(frame, limit=1)
|
||||
if console_stacktrace:
|
||||
current_context = console_stacktrace[0] # top entry from stacktrace
|
||||
context_message = 'File "%s", line %s, in %s' % (current_context[0], current_context[1], current_context[2])
|
||||
console_message.add_console_message(CONSOLE_OUTPUT, "[Current context]: %s" % (context_message,))
|
||||
return InteractiveConsoleCache.interactive_console_instance
|
||||
|
||||
|
||||
def clear_interactive_console():
|
||||
InteractiveConsoleCache.thread_id = None
|
||||
InteractiveConsoleCache.frame_id = None
|
||||
InteractiveConsoleCache.interactive_console_instance = None
|
||||
|
||||
|
||||
def execute_console_command(frame, thread_id, frame_id, line, buffer_output=True):
|
||||
"""fetch an interactive console instance from the cache and
|
||||
push the received command to the console.
|
||||
|
||||
create and return an instance of console_message
|
||||
"""
|
||||
console_message = ConsoleMessage()
|
||||
|
||||
interpreter = get_interactive_console(thread_id, frame_id, frame, console_message)
|
||||
more, output_messages, error_messages = interpreter.push(line, frame, buffer_output)
|
||||
console_message.update_more(more)
|
||||
|
||||
for message in output_messages:
|
||||
console_message.add_console_message(CONSOLE_OUTPUT, message)
|
||||
|
||||
for message in error_messages:
|
||||
console_message.add_console_message(CONSOLE_ERROR, message)
|
||||
|
||||
return console_message
|
||||
|
||||
|
||||
def get_description(frame, thread_id, frame_id, expression):
|
||||
console_message = ConsoleMessage()
|
||||
interpreter = get_interactive_console(thread_id, frame_id, frame, console_message)
|
||||
try:
|
||||
interpreter.frame = frame
|
||||
return interpreter.getDescription(expression)
|
||||
finally:
|
||||
interpreter.frame = None
|
||||
|
||||
|
||||
def get_completions(frame, act_tok):
|
||||
""" fetch all completions, create xml for the same
|
||||
return the completions xml
|
||||
"""
|
||||
return _pydev_completer.generate_completions_as_xml(frame, act_tok)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
337
ptvsd/pydevd/_pydevd_bundle/pydevd_constants.py
Normal file
337
ptvsd/pydevd/_pydevd_bundle/pydevd_constants.py
Normal file
|
|
@ -0,0 +1,337 @@
|
|||
'''
|
||||
This module holds the constants used for specifying the states of the debugger.
|
||||
'''
|
||||
from __future__ import nested_scopes
|
||||
|
||||
STATE_RUN = 1
|
||||
STATE_SUSPEND = 2
|
||||
|
||||
PYTHON_SUSPEND = 1
|
||||
DJANGO_SUSPEND = 2
|
||||
JINJA2_SUSPEND = 3
|
||||
|
||||
|
||||
class DebugInfoHolder:
|
||||
#we have to put it here because it can be set through the command line (so, the
|
||||
#already imported references would not have it).
|
||||
DEBUG_RECORD_SOCKET_READS = False
|
||||
DEBUG_TRACE_LEVEL = -1
|
||||
DEBUG_TRACE_BREAKPOINTS = -1
|
||||
|
||||
#Hold a reference to the original _getframe (because psyco will change that as soon as it's imported)
|
||||
import sys #Note: the sys import must be here anyways (others depend on it)
|
||||
try:
|
||||
get_frame = sys._getframe
|
||||
except AttributeError:
|
||||
def get_frame():
|
||||
raise AssertionError('sys._getframe not available (possible causes: enable -X:Frames on IronPython?)')
|
||||
|
||||
#Used to determine the maximum size of each variable passed to eclipse -- having a big value here may make
|
||||
#the communication slower -- as the variables are being gathered lazily in the latest version of eclipse,
|
||||
#this value was raised from 200 to 1000.
|
||||
MAXIMUM_VARIABLE_REPRESENTATION_SIZE = 1000
|
||||
# Prefix for saving functions return values in locals
|
||||
RETURN_VALUES_DICT = '__pydevd_ret_val_dict'
|
||||
|
||||
import os
|
||||
|
||||
from _pydevd_bundle import pydevd_vm_type
|
||||
|
||||
IS_JYTHON = pydevd_vm_type.get_vm_type() == pydevd_vm_type.PydevdVmType.JYTHON
|
||||
IS_IRONPYTHON = sys.platform == 'cli'
|
||||
|
||||
IS_JYTH_LESS25 = False
|
||||
if IS_JYTHON:
|
||||
if sys.version_info[0] == 2 and sys.version_info[1] < 5:
|
||||
IS_JYTH_LESS25 = True
|
||||
|
||||
IS_PYTHON_STACKLESS = "stackless" in sys.version.lower()
|
||||
CYTHON_SUPPORTED = False
|
||||
|
||||
try:
|
||||
import platform
|
||||
python_implementation = platform.python_implementation()
|
||||
except:
|
||||
pass
|
||||
else:
|
||||
if python_implementation == 'CPython' and not IS_PYTHON_STACKLESS:
|
||||
# Only available for CPython!
|
||||
if (
|
||||
(sys.version_info[0] == 2 and sys.version_info[1] >= 7)
|
||||
or (sys.version_info[0] == 3 and sys.version_info[1] >= 3)
|
||||
or (sys.version_info[0] > 3)
|
||||
):
|
||||
# Supported in 2.7 or 3.3 onwards (32 or 64)
|
||||
CYTHON_SUPPORTED = True
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# Python 3?
|
||||
#=======================================================================================================================
|
||||
IS_PY3K = False
|
||||
IS_PY34_OLDER = False
|
||||
IS_PY2 = True
|
||||
IS_PY27 = False
|
||||
IS_PY24 = False
|
||||
try:
|
||||
if sys.version_info[0] >= 3:
|
||||
IS_PY3K = True
|
||||
IS_PY2 = False
|
||||
if (sys.version_info[0] == 3 and sys.version_info[1] >= 4) or sys.version_info[0] > 3:
|
||||
IS_PY34_OLDER = True
|
||||
elif sys.version_info[0] == 2 and sys.version_info[1] == 7:
|
||||
IS_PY27 = True
|
||||
elif sys.version_info[0] == 2 and sys.version_info[1] == 4:
|
||||
IS_PY24 = True
|
||||
except AttributeError:
|
||||
pass #Not all versions have sys.version_info
|
||||
|
||||
try:
|
||||
SUPPORT_GEVENT = os.getenv('GEVENT_SUPPORT', 'False') == 'True'
|
||||
except:
|
||||
# Jython 2.1 doesn't accept that construct
|
||||
SUPPORT_GEVENT = False
|
||||
|
||||
# At the moment gevent supports Python >= 2.6 and Python >= 3.3
|
||||
USE_LIB_COPY = SUPPORT_GEVENT and \
|
||||
((not IS_PY3K and sys.version_info[1] >= 6) or
|
||||
(IS_PY3K and sys.version_info[1] >= 3))
|
||||
|
||||
|
||||
INTERACTIVE_MODE_AVAILABLE = sys.platform in ('darwin', 'win32') or os.getenv('DISPLAY') is not None
|
||||
|
||||
|
||||
def protect_libraries_from_patching():
|
||||
"""
|
||||
In this function we delete some modules from `sys.modules` dictionary and import them again inside
|
||||
`_pydev_saved_modules` in order to save their original copies there. After that we can use these
|
||||
saved modules within the debugger to protect them from patching by external libraries (e.g. gevent).
|
||||
"""
|
||||
patched = ['threading', 'thread', '_thread', 'time', 'socket', 'Queue', 'queue', 'select',
|
||||
'xmlrpclib', 'SimpleXMLRPCServer', 'BaseHTTPServer', 'SocketServer',
|
||||
'xmlrpc.client', 'xmlrpc.server', 'http.server', 'socketserver']
|
||||
|
||||
for name in patched:
|
||||
try:
|
||||
__import__(name)
|
||||
except:
|
||||
pass
|
||||
|
||||
patched_modules = dict([(k, v) for k, v in sys.modules.items()
|
||||
if k in patched])
|
||||
|
||||
for name in patched_modules:
|
||||
del sys.modules[name]
|
||||
|
||||
# import for side effects
|
||||
import _pydev_imps._pydev_saved_modules
|
||||
|
||||
for name in patched_modules:
|
||||
sys.modules[name] = patched_modules[name]
|
||||
|
||||
|
||||
if USE_LIB_COPY:
|
||||
protect_libraries_from_patching()
|
||||
|
||||
|
||||
from _pydev_imps._pydev_saved_modules import thread
|
||||
_nextThreadIdLock = thread.allocate_lock()
|
||||
|
||||
if IS_PY3K:
|
||||
def dict_keys(d):
|
||||
return list(d.keys())
|
||||
|
||||
def dict_values(d):
|
||||
return list(d.values())
|
||||
|
||||
dict_iter_values = dict.values
|
||||
|
||||
def dict_iter_items(d):
|
||||
return d.items()
|
||||
|
||||
def dict_items(d):
|
||||
return list(d.items())
|
||||
|
||||
else:
|
||||
dict_keys = None
|
||||
try:
|
||||
dict_keys = dict.keys
|
||||
except:
|
||||
pass
|
||||
|
||||
if IS_JYTHON or not dict_keys:
|
||||
def dict_keys(d):
|
||||
return d.keys()
|
||||
|
||||
try:
|
||||
dict_iter_values = dict.itervalues
|
||||
except:
|
||||
try:
|
||||
dict_iter_values = dict.values #Older versions don't have the itervalues
|
||||
except:
|
||||
def dict_iter_values(d):
|
||||
return d.values()
|
||||
|
||||
try:
|
||||
dict_values = dict.values
|
||||
except:
|
||||
def dict_values(d):
|
||||
return d.values()
|
||||
|
||||
def dict_iter_items(d):
|
||||
try:
|
||||
return d.iteritems()
|
||||
except:
|
||||
return d.items()
|
||||
|
||||
def dict_items(d):
|
||||
return d.items()
|
||||
|
||||
|
||||
try:
|
||||
xrange = xrange
|
||||
except:
|
||||
#Python 3k does not have it
|
||||
xrange = range
|
||||
|
||||
try:
|
||||
import itertools
|
||||
izip = itertools.izip
|
||||
except:
|
||||
izip = zip
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# StringIO
|
||||
#=======================================================================================================================
|
||||
try:
|
||||
from StringIO import StringIO
|
||||
except:
|
||||
from io import StringIO
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# get_pid
|
||||
#=======================================================================================================================
|
||||
def get_pid():
|
||||
try:
|
||||
return os.getpid()
|
||||
except AttributeError:
|
||||
try:
|
||||
#Jython does not have it!
|
||||
import java.lang.management.ManagementFactory #@UnresolvedImport -- just for jython
|
||||
pid = java.lang.management.ManagementFactory.getRuntimeMXBean().getName()
|
||||
return pid.replace('@', '_')
|
||||
except:
|
||||
#ok, no pid available (will be unable to debug multiple processes)
|
||||
return '000001'
|
||||
|
||||
def clear_cached_thread_id(thread):
|
||||
try:
|
||||
del thread.__pydevd_id__
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
#=======================================================================================================================
|
||||
# get_thread_id
|
||||
#=======================================================================================================================
|
||||
def get_thread_id(thread):
|
||||
try:
|
||||
tid = thread.__pydevd_id__
|
||||
if tid is None:
|
||||
# Fix for https://sw-brainwy.rhcloud.com/tracker/PyDev/645
|
||||
# if __pydevd_id__ is None, recalculate it... also, use an heuristic
|
||||
# that gives us always the same id for the thread (using thread.ident or id(thread)).
|
||||
raise AttributeError()
|
||||
except AttributeError:
|
||||
_nextThreadIdLock.acquire()
|
||||
try:
|
||||
#We do a new check with the lock in place just to be sure that nothing changed
|
||||
tid = getattr(thread, '__pydevd_id__', None)
|
||||
if tid is None:
|
||||
pid = get_pid()
|
||||
try:
|
||||
tid = thread.__pydevd_id__ = 'pid_%s_id_%s' % (pid, thread.get_ident())
|
||||
except:
|
||||
# thread.ident isn't always there... (use id(thread) instead if it's not there).
|
||||
tid = thread.__pydevd_id__ = 'pid_%s_id_%s' % (pid, id(thread))
|
||||
finally:
|
||||
_nextThreadIdLock.release()
|
||||
|
||||
return tid
|
||||
|
||||
#===============================================================================
|
||||
# Null
|
||||
#===============================================================================
|
||||
class Null:
|
||||
"""
|
||||
Gotten from: http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/68205
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
return None
|
||||
|
||||
def __call__(self, *args, **kwargs):
|
||||
return self
|
||||
|
||||
def __getattr__(self, mname):
|
||||
if len(mname) > 4 and mname[:2] == '__' and mname[-2:] == '__':
|
||||
# Don't pretend to implement special method names.
|
||||
raise AttributeError(mname)
|
||||
return self
|
||||
|
||||
def __setattr__(self, name, value):
|
||||
return self
|
||||
|
||||
def __delattr__(self, name):
|
||||
return self
|
||||
|
||||
def __repr__(self):
|
||||
return "<Null>"
|
||||
|
||||
def __str__(self):
|
||||
return "Null"
|
||||
|
||||
def __len__(self):
|
||||
return 0
|
||||
|
||||
def __getitem__(self):
|
||||
return self
|
||||
|
||||
def __setitem__(self, *args, **kwargs):
|
||||
pass
|
||||
|
||||
def write(self, *args, **kwargs):
|
||||
pass
|
||||
|
||||
def __nonzero__(self):
|
||||
return 0
|
||||
|
||||
def __iter__(self):
|
||||
return iter(())
|
||||
|
||||
|
||||
def call_only_once(func):
|
||||
'''
|
||||
To be used as a decorator
|
||||
|
||||
@call_only_once
|
||||
def func():
|
||||
print 'Calling func only this time'
|
||||
|
||||
Actually, in PyDev it must be called as:
|
||||
|
||||
func = call_only_once(func) to support older versions of Python.
|
||||
'''
|
||||
def new_func(*args, **kwargs):
|
||||
if not new_func._called:
|
||||
new_func._called = True
|
||||
return func(*args, **kwargs)
|
||||
|
||||
new_func._called = False
|
||||
return new_func
|
||||
|
||||
if __name__ == '__main__':
|
||||
if Null():
|
||||
sys.stdout.write('here\n')
|
||||
|
||||
133
ptvsd/pydevd/_pydevd_bundle/pydevd_custom_frames.py
Normal file
133
ptvsd/pydevd/_pydevd_bundle/pydevd_custom_frames.py
Normal file
|
|
@ -0,0 +1,133 @@
|
|||
from _pydevd_bundle.pydevd_constants import get_thread_id, Null
|
||||
from pydevd_file_utils import get_abs_path_real_path_and_base_from_frame
|
||||
from _pydev_imps._pydev_saved_modules import thread, threading
|
||||
import sys
|
||||
|
||||
DEBUG = False
|
||||
|
||||
#=======================================================================================================================
|
||||
# CustomFramesContainer
|
||||
#=======================================================================================================================
|
||||
class CustomFramesContainer:
|
||||
|
||||
# Actual Values initialized later on.
|
||||
custom_frames_lock = None #: :type custom_frames_lock: threading.Lock
|
||||
|
||||
custom_frames = None
|
||||
|
||||
_next_frame_id = None
|
||||
|
||||
_py_db_command_thread_event = None
|
||||
|
||||
|
||||
def custom_frames_container_init(): #Note: no staticmethod on jython 2.1 (so, use free-function)
|
||||
|
||||
CustomFramesContainer.custom_frames_lock = thread.allocate_lock()
|
||||
|
||||
# custom_frames can only be accessed if properly locked with custom_frames_lock!
|
||||
# Key is a string identifying the frame (as well as the thread it belongs to).
|
||||
# Value is a CustomFrame.
|
||||
#
|
||||
CustomFramesContainer.custom_frames = {}
|
||||
|
||||
# Only to be used in this module
|
||||
CustomFramesContainer._next_frame_id = 0
|
||||
|
||||
# This is the event we must set to release an internal process events. It's later set by the actual debugger
|
||||
# when we do create the debugger.
|
||||
CustomFramesContainer._py_db_command_thread_event = Null()
|
||||
|
||||
#Initialize it the first time (it may be reinitialized later on when dealing with a fork).
|
||||
custom_frames_container_init()
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# CustomFrame
|
||||
#=======================================================================================================================
|
||||
class CustomFrame:
|
||||
|
||||
def __init__(self, name, frame, thread_id):
|
||||
# 0 = string with the representation of that frame
|
||||
self.name = name
|
||||
|
||||
# 1 = the frame to show
|
||||
self.frame = frame
|
||||
|
||||
# 2 = an integer identifying the last time the frame was changed.
|
||||
self.mod_time = 0
|
||||
|
||||
# 3 = the thread id of the given frame
|
||||
self.thread_id = thread_id
|
||||
|
||||
|
||||
def add_custom_frame(frame, name, thread_id):
|
||||
CustomFramesContainer.custom_frames_lock.acquire()
|
||||
try:
|
||||
curr_thread_id = get_thread_id(threading.currentThread())
|
||||
next_id = CustomFramesContainer._next_frame_id = CustomFramesContainer._next_frame_id + 1
|
||||
|
||||
# Note: the frame id kept contains an id and thread information on the thread where the frame was added
|
||||
# so that later on we can check if the frame is from the current thread by doing frame_id.endswith('|'+thread_id).
|
||||
frame_id = '__frame__:%s|%s' % (next_id, curr_thread_id)
|
||||
if DEBUG:
|
||||
sys.stderr.write('add_custom_frame: %s (%s) %s %s\n' % (
|
||||
frame_id, get_abs_path_real_path_and_base_from_frame(frame)[-1], frame.f_lineno, frame.f_code.co_name))
|
||||
|
||||
CustomFramesContainer.custom_frames[frame_id] = CustomFrame(name, frame, thread_id)
|
||||
CustomFramesContainer._py_db_command_thread_event.set()
|
||||
return frame_id
|
||||
finally:
|
||||
CustomFramesContainer.custom_frames_lock.release()
|
||||
|
||||
addCustomFrame = add_custom_frame # Backward compatibility
|
||||
|
||||
def update_custom_frame(frame_id, frame, thread_id, name=None):
|
||||
CustomFramesContainer.custom_frames_lock.acquire()
|
||||
try:
|
||||
if DEBUG:
|
||||
sys.stderr.write('update_custom_frame: %s\n' % frame_id)
|
||||
try:
|
||||
old = CustomFramesContainer.custom_frames[frame_id]
|
||||
if name is not None:
|
||||
old.name = name
|
||||
old.mod_time += 1
|
||||
old.thread_id = thread_id
|
||||
except:
|
||||
sys.stderr.write('Unable to get frame to replace: %s\n' % (frame_id,))
|
||||
import traceback;traceback.print_exc()
|
||||
|
||||
CustomFramesContainer._py_db_command_thread_event.set()
|
||||
finally:
|
||||
CustomFramesContainer.custom_frames_lock.release()
|
||||
|
||||
|
||||
def get_custom_frame(thread_id, frame_id):
|
||||
'''
|
||||
:param thread_id: This should actually be the frame_id which is returned by add_custom_frame.
|
||||
:param frame_id: This is the actual id() of the frame
|
||||
'''
|
||||
|
||||
CustomFramesContainer.custom_frames_lock.acquire()
|
||||
try:
|
||||
frame_id = int(frame_id)
|
||||
f = CustomFramesContainer.custom_frames[thread_id].frame
|
||||
while f is not None:
|
||||
if id(f) == frame_id:
|
||||
return f
|
||||
f = f.f_back
|
||||
finally:
|
||||
f = None
|
||||
CustomFramesContainer.custom_frames_lock.release()
|
||||
|
||||
|
||||
def remove_custom_frame(frame_id):
|
||||
CustomFramesContainer.custom_frames_lock.acquire()
|
||||
try:
|
||||
if DEBUG:
|
||||
sys.stderr.write('remove_custom_frame: %s\n' % frame_id)
|
||||
CustomFramesContainer.custom_frames.pop(frame_id, None)
|
||||
CustomFramesContainer._py_db_command_thread_event.set()
|
||||
finally:
|
||||
CustomFramesContainer.custom_frames_lock.release()
|
||||
|
||||
removeCustomFrame = remove_custom_frame # Backward compatibility
|
||||
30941
ptvsd/pydevd/_pydevd_bundle/pydevd_cython.c
Normal file
30941
ptvsd/pydevd/_pydevd_bundle/pydevd_cython.c
Normal file
File diff suppressed because it is too large
Load diff
1138
ptvsd/pydevd/_pydevd_bundle/pydevd_cython.pyx
Normal file
1138
ptvsd/pydevd/_pydevd_bundle/pydevd_cython.pyx
Normal file
File diff suppressed because it is too large
Load diff
35
ptvsd/pydevd/_pydevd_bundle/pydevd_cython_wrapper.py
Normal file
35
ptvsd/pydevd/_pydevd_bundle/pydevd_cython_wrapper.py
Normal file
|
|
@ -0,0 +1,35 @@
|
|||
try:
|
||||
from _pydevd_bundle.pydevd_cython import trace_dispatch, PyDBAdditionalThreadInfo, global_cache_skips, global_cache_frame_skips
|
||||
import _pydevd_bundle.pydevd_cython
|
||||
# this version number can be unavailable in old versions of compiled extensions
|
||||
version = getattr(_pydevd_bundle.pydevd_cython, 'version', 0)
|
||||
except ImportError:
|
||||
try:
|
||||
import struct
|
||||
import sys
|
||||
try:
|
||||
is_python_64bit = (struct.calcsize('P') == 8)
|
||||
except:
|
||||
# In Jython this call fails, but this is Ok, we don't support Jython for speedups anyways.
|
||||
raise ImportError
|
||||
plat = '32'
|
||||
if is_python_64bit:
|
||||
plat = '64'
|
||||
|
||||
# We also accept things as:
|
||||
#
|
||||
# _pydevd_bundle.pydevd_cython_win32_27_32
|
||||
# _pydevd_bundle.pydevd_cython_win32_34_64
|
||||
#
|
||||
# to have multiple pre-compiled pyds distributed along the IDE
|
||||
# (generated by build_tools/build_binaries_windows.py).
|
||||
|
||||
mod_name = 'pydevd_cython_%s_%s%s_%s' % (sys.platform, sys.version_info[0], sys.version_info[1], plat)
|
||||
check_name = '_pydevd_bundle.%s' % (mod_name,)
|
||||
mod = __import__(check_name)
|
||||
mod = getattr(mod, mod_name)
|
||||
trace_dispatch, PyDBAdditionalThreadInfo, global_cache_skips, global_cache_frame_skips = \
|
||||
mod.trace_dispatch, mod.PyDBAdditionalThreadInfo, mod.global_cache_skips, mod.global_cache_frame_skips
|
||||
version = getattr(mod, 'version', 0)
|
||||
except ImportError:
|
||||
raise
|
||||
123
ptvsd/pydevd/_pydevd_bundle/pydevd_dont_trace.py
Normal file
123
ptvsd/pydevd/_pydevd_bundle/pydevd_dont_trace.py
Normal file
|
|
@ -0,0 +1,123 @@
|
|||
'''
|
||||
Support for a tag that allows skipping over functions while debugging.
|
||||
'''
|
||||
import linecache
|
||||
import re
|
||||
|
||||
# To suppress tracing a method, add the tag @DontTrace
|
||||
# to a comment either preceding or on the same line as
|
||||
# the method definition
|
||||
#
|
||||
# E.g.:
|
||||
# #@DontTrace
|
||||
# def test1():
|
||||
# pass
|
||||
#
|
||||
# ... or ...
|
||||
#
|
||||
# def test2(): #@DontTrace
|
||||
# pass
|
||||
DONT_TRACE_TAG = '@DontTrace'
|
||||
|
||||
# Regular expression to match a decorator (at the beginning
|
||||
# of a line).
|
||||
RE_DECORATOR = re.compile(r'^\s*@')
|
||||
|
||||
# Mapping from code object to bool.
|
||||
# If the key exists, the value is the cached result of should_trace_hook
|
||||
_filename_to_ignored_lines = {}
|
||||
|
||||
def default_should_trace_hook(frame, filename):
|
||||
'''
|
||||
Return True if this frame should be traced, False if tracing should be blocked.
|
||||
'''
|
||||
# First, check whether this code object has a cached value
|
||||
ignored_lines = _filename_to_ignored_lines.get(filename)
|
||||
if ignored_lines is None:
|
||||
# Now, look up that line of code and check for a @DontTrace
|
||||
# preceding or on the same line as the method.
|
||||
# E.g.:
|
||||
# #@DontTrace
|
||||
# def test():
|
||||
# pass
|
||||
# ... or ...
|
||||
# def test(): #@DontTrace
|
||||
# pass
|
||||
ignored_lines = {}
|
||||
lines = linecache.getlines(filename)
|
||||
for i_line, line in enumerate(lines):
|
||||
j = line.find('#')
|
||||
if j >= 0:
|
||||
comment = line[j:]
|
||||
if DONT_TRACE_TAG in comment:
|
||||
ignored_lines[i_line] = 1
|
||||
|
||||
#Note: when it's found in the comment, mark it up and down for the decorator lines found.
|
||||
k = i_line - 1
|
||||
while k >= 0:
|
||||
if RE_DECORATOR.match(lines[k]):
|
||||
ignored_lines[k] = 1
|
||||
k -= 1
|
||||
else:
|
||||
break
|
||||
|
||||
k = i_line + 1
|
||||
while k <= len(lines):
|
||||
if RE_DECORATOR.match(lines[k]):
|
||||
ignored_lines[k] = 1
|
||||
k += 1
|
||||
else:
|
||||
break
|
||||
|
||||
|
||||
_filename_to_ignored_lines[filename] = ignored_lines
|
||||
|
||||
func_line = frame.f_code.co_firstlineno - 1 # co_firstlineno is 1-based, so -1 is needed
|
||||
return not (
|
||||
func_line - 1 in ignored_lines or #-1 to get line before method
|
||||
func_line in ignored_lines) #method line
|
||||
|
||||
|
||||
should_trace_hook = None
|
||||
|
||||
|
||||
def clear_trace_filter_cache():
|
||||
'''
|
||||
Clear the trace filter cache.
|
||||
Call this after reloading.
|
||||
'''
|
||||
global should_trace_hook
|
||||
try:
|
||||
# Need to temporarily disable a hook because otherwise
|
||||
# _filename_to_ignored_lines.clear() will never complete.
|
||||
old_hook = should_trace_hook
|
||||
should_trace_hook = None
|
||||
|
||||
# Clear the linecache
|
||||
linecache.clearcache()
|
||||
_filename_to_ignored_lines.clear()
|
||||
|
||||
finally:
|
||||
should_trace_hook = old_hook
|
||||
|
||||
|
||||
def trace_filter(mode):
|
||||
'''
|
||||
Set the trace filter mode.
|
||||
|
||||
mode: Whether to enable the trace hook.
|
||||
True: Trace filtering on (skipping methods tagged @DontTrace)
|
||||
False: Trace filtering off (trace methods tagged @DontTrace)
|
||||
None/default: Toggle trace filtering.
|
||||
'''
|
||||
global should_trace_hook
|
||||
if mode is None:
|
||||
mode = should_trace_hook is None
|
||||
|
||||
if mode:
|
||||
should_trace_hook = default_should_trace_hook
|
||||
else:
|
||||
should_trace_hook = None
|
||||
|
||||
return mode
|
||||
|
||||
119
ptvsd/pydevd/_pydevd_bundle/pydevd_dont_trace_files.py
Normal file
119
ptvsd/pydevd/_pydevd_bundle/pydevd_dont_trace_files.py
Normal file
|
|
@ -0,0 +1,119 @@
|
|||
# Important: Autogenerated file.
|
||||
|
||||
# DO NOT edit manually!
|
||||
# DO NOT edit manually!
|
||||
|
||||
from _pydevd_bundle.pydevd_constants import IS_PY3K
|
||||
|
||||
LIB_FILE = 1
|
||||
PYDEV_FILE = 2
|
||||
|
||||
DONT_TRACE = {
|
||||
# commonly used things from the stdlib that we don't want to trace
|
||||
'Queue.py':LIB_FILE,
|
||||
'queue.py':LIB_FILE,
|
||||
'socket.py':LIB_FILE,
|
||||
'weakref.py':LIB_FILE,
|
||||
'_weakrefset.py':LIB_FILE,
|
||||
'linecache.py':LIB_FILE,
|
||||
'threading.py':LIB_FILE,
|
||||
'dis.py':LIB_FILE,
|
||||
|
||||
#things from pydev that we don't want to trace
|
||||
'_pydev_execfile.py':PYDEV_FILE,
|
||||
'_pydev_BaseHTTPServer.py': PYDEV_FILE,
|
||||
'_pydev_SimpleXMLRPCServer.py': PYDEV_FILE,
|
||||
'_pydev_SocketServer.py': PYDEV_FILE,
|
||||
'_pydev_calltip_util.py': PYDEV_FILE,
|
||||
'_pydev_completer.py': PYDEV_FILE,
|
||||
'_pydev_execfile.py': PYDEV_FILE,
|
||||
'_pydev_filesystem_encoding.py': PYDEV_FILE,
|
||||
'_pydev_getopt.py': PYDEV_FILE,
|
||||
'_pydev_imports_tipper.py': PYDEV_FILE,
|
||||
'_pydev_inspect.py': PYDEV_FILE,
|
||||
'_pydev_jy_imports_tipper.py': PYDEV_FILE,
|
||||
'_pydev_log.py': PYDEV_FILE,
|
||||
'_pydev_pkgutil_old.py': PYDEV_FILE,
|
||||
'_pydev_saved_modules.py': PYDEV_FILE,
|
||||
'_pydev_sys_patch.py': PYDEV_FILE,
|
||||
'_pydev_tipper_common.py': PYDEV_FILE,
|
||||
'_pydev_uuid_old.py': PYDEV_FILE,
|
||||
'_pydev_xmlrpclib.py': PYDEV_FILE,
|
||||
'django_debug.py': PYDEV_FILE,
|
||||
'fix_getpass.py': PYDEV_FILE,
|
||||
'jinja2_debug.py': PYDEV_FILE,
|
||||
'pycompletionserver.py': PYDEV_FILE,
|
||||
'pydev_app_engine_debug_startup.py': PYDEV_FILE,
|
||||
'pydev_console_utils.py': PYDEV_FILE,
|
||||
'pydev_import_hook.py': PYDEV_FILE,
|
||||
'pydev_imports.py': PYDEV_FILE,
|
||||
'pydev_ipython_console.py': PYDEV_FILE,
|
||||
'pydev_ipython_console_011.py': PYDEV_FILE,
|
||||
'pydev_is_thread_alive.py': PYDEV_FILE,
|
||||
'pydev_localhost.py': PYDEV_FILE,
|
||||
'pydev_log.py': PYDEV_FILE,
|
||||
'pydev_monkey.py': PYDEV_FILE,
|
||||
'pydev_monkey_qt.py': PYDEV_FILE,
|
||||
'pydev_override.py': PYDEV_FILE,
|
||||
'pydev_run_in_console.py': PYDEV_FILE,
|
||||
'pydev_umd.py': PYDEV_FILE,
|
||||
'pydev_versioncheck.py': PYDEV_FILE,
|
||||
'pydevconsole.py': PYDEV_FILE,
|
||||
'pydevconsole_code_for_ironpython.py': PYDEV_FILE,
|
||||
'pydevd.py': PYDEV_FILE,
|
||||
'pydevd_additional_thread_info.py': PYDEV_FILE,
|
||||
'pydevd_additional_thread_info_regular.py': PYDEV_FILE,
|
||||
'pydevd_breakpoints.py': PYDEV_FILE,
|
||||
'pydevd_comm.py': PYDEV_FILE,
|
||||
'pydevd_command_line_handling.py': PYDEV_FILE,
|
||||
'pydevd_concurrency_logger.py': PYDEV_FILE,
|
||||
'pydevd_console.py': PYDEV_FILE,
|
||||
'pydevd_constants.py': PYDEV_FILE,
|
||||
'pydevd_custom_frames.py': PYDEV_FILE,
|
||||
'pydevd_cython_wrapper.py': PYDEV_FILE,
|
||||
'pydevd_dont_trace.py': PYDEV_FILE,
|
||||
'pydevd_dont_trace_files.py': PYDEV_FILE,
|
||||
'pydevd_exec.py': PYDEV_FILE,
|
||||
'pydevd_exec2.py': PYDEV_FILE,
|
||||
'pydevd_extension_api.py': PYDEV_FILE,
|
||||
'pydevd_extension_utils.py': PYDEV_FILE,
|
||||
'pydevd_file_utils.py': PYDEV_FILE,
|
||||
'pydevd_frame.py': PYDEV_FILE,
|
||||
'pydevd_frame_eval_cython_wrapper.py': PYDEV_FILE,
|
||||
'pydevd_frame_eval_main.py': PYDEV_FILE,
|
||||
'pydevd_frame_tracing.py': PYDEV_FILE,
|
||||
'pydevd_frame_utils.py': PYDEV_FILE,
|
||||
'pydevd_helpers.py': PYDEV_FILE,
|
||||
'pydevd_import_class.py': PYDEV_FILE,
|
||||
'pydevd_io.py': PYDEV_FILE,
|
||||
'pydevd_kill_all_pydevd_threads.py': PYDEV_FILE,
|
||||
'pydevd_modify_bytecode.py': PYDEV_FILE,
|
||||
'pydevd_plugin_numpy_types.py': PYDEV_FILE,
|
||||
'pydevd_plugin_utils.py': PYDEV_FILE,
|
||||
'pydevd_plugins_django_form_str.py': PYDEV_FILE,
|
||||
'pydevd_process_net_command.py': PYDEV_FILE,
|
||||
'pydevd_referrers.py': PYDEV_FILE,
|
||||
'pydevd_reload.py': PYDEV_FILE,
|
||||
'pydevd_resolver.py': PYDEV_FILE,
|
||||
'pydevd_save_locals.py': PYDEV_FILE,
|
||||
'pydevd_signature.py': PYDEV_FILE,
|
||||
'pydevd_stackless.py': PYDEV_FILE,
|
||||
'pydevd_thread_wrappers.py': PYDEV_FILE,
|
||||
'pydevd_trace_api.py': PYDEV_FILE,
|
||||
'pydevd_trace_dispatch.py': PYDEV_FILE,
|
||||
'pydevd_trace_dispatch_regular.py': PYDEV_FILE,
|
||||
'pydevd_traceproperty.py': PYDEV_FILE,
|
||||
'pydevd_tracing.py': PYDEV_FILE,
|
||||
'pydevd_utils.py': PYDEV_FILE,
|
||||
'pydevd_vars.py': PYDEV_FILE,
|
||||
'pydevd_vm_type.py': PYDEV_FILE,
|
||||
'pydevd_xml.py': PYDEV_FILE,
|
||||
}
|
||||
|
||||
if IS_PY3K:
|
||||
# if we try to trace io.py it seems it can get halted (see http://bugs.python.org/issue4716)
|
||||
DONT_TRACE['io.py'] = LIB_FILE
|
||||
|
||||
# Don't trace common encodings too
|
||||
DONT_TRACE['cp1252.py'] = LIB_FILE
|
||||
DONT_TRACE['utf_8.py'] = LIB_FILE
|
||||
5
ptvsd/pydevd/_pydevd_bundle/pydevd_exec.py
Normal file
5
ptvsd/pydevd/_pydevd_bundle/pydevd_exec.py
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
def Exec(exp, global_vars, local_vars=None):
|
||||
if local_vars is not None:
|
||||
exec exp in global_vars, local_vars
|
||||
else:
|
||||
exec exp in global_vars
|
||||
5
ptvsd/pydevd/_pydevd_bundle/pydevd_exec2.py
Normal file
5
ptvsd/pydevd/_pydevd_bundle/pydevd_exec2.py
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
def Exec(exp, global_vars, local_vars=None):
|
||||
if local_vars is not None:
|
||||
exec(exp, global_vars, local_vars)
|
||||
else:
|
||||
exec(exp, global_vars)
|
||||
87
ptvsd/pydevd/_pydevd_bundle/pydevd_extension_api.py
Normal file
87
ptvsd/pydevd/_pydevd_bundle/pydevd_extension_api.py
Normal file
|
|
@ -0,0 +1,87 @@
|
|||
import abc
|
||||
|
||||
|
||||
# borrowed from from six
|
||||
def _with_metaclass(meta, *bases):
|
||||
"""Create a base class with a metaclass."""
|
||||
|
||||
class metaclass(meta):
|
||||
def __new__(cls, name, this_bases, d):
|
||||
return meta(name, bases, d)
|
||||
|
||||
return type.__new__(metaclass, 'temporary_class', (), {})
|
||||
|
||||
|
||||
# =======================================================================================================================
|
||||
# AbstractResolver
|
||||
# =======================================================================================================================
|
||||
class _AbstractResolver(_with_metaclass(abc.ABCMeta)):
|
||||
"""
|
||||
This class exists only for documentation purposes to explain how to create a resolver.
|
||||
|
||||
Some examples on how to resolve things:
|
||||
- list: get_dictionary could return a dict with index->item and use the index to resolve it later
|
||||
- set: get_dictionary could return a dict with id(object)->object and reiterate in that array to resolve it later
|
||||
- arbitrary instance: get_dictionary could return dict with attr_name->attr and use getattr to resolve it later
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def resolve(self, var, attribute):
|
||||
"""
|
||||
In this method, we'll resolve some child item given the string representation of the item in the key
|
||||
representing the previously asked dictionary.
|
||||
|
||||
@param var: this is the actual variable to be resolved.
|
||||
@param attribute: this is the string representation of a key previously returned in get_dictionary.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
@abc.abstractmethod
|
||||
def get_dictionary(self, var):
|
||||
"""
|
||||
@param var: this is the variable that should have its children gotten.
|
||||
|
||||
@return: a dictionary where each pair key, value should be shown to the user as children items
|
||||
in the variables view for the given var.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class _AbstractProvider(_with_metaclass(abc.ABCMeta)):
|
||||
@abc.abstractmethod
|
||||
def can_provide(self, type_object, type_name):
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
# =======================================================================================================================
|
||||
# API CLASSES:
|
||||
# =======================================================================================================================
|
||||
|
||||
class TypeResolveProvider(_AbstractResolver, _AbstractProvider):
|
||||
"""
|
||||
Implement this in an extension to provide a custom resolver, see _AbstractResolver
|
||||
"""
|
||||
|
||||
|
||||
class StrPresentationProvider(_AbstractProvider):
|
||||
"""
|
||||
Implement this in an extension to provide a str presentation for a type
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def get_str(self, val):
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class DebuggerEventHandler(_with_metaclass(abc.ABCMeta)):
|
||||
"""
|
||||
Implement this to receive lifecycle events from the debugger
|
||||
"""
|
||||
|
||||
def on_debugger_modules_loaded(self, **kwargs):
|
||||
"""
|
||||
This method invoked after all debugger modules are loaded. Useful for importing and/or patching debugger
|
||||
modules at a safe time
|
||||
:param kwargs: This is intended to be flexible dict passed from the debugger.
|
||||
Currently passes the debugger version
|
||||
"""
|
||||
61
ptvsd/pydevd/_pydevd_bundle/pydevd_extension_utils.py
Normal file
61
ptvsd/pydevd/_pydevd_bundle/pydevd_extension_utils.py
Normal file
|
|
@ -0,0 +1,61 @@
|
|||
import pkgutil
|
||||
import sys
|
||||
from _pydev_bundle import pydev_log
|
||||
import pydevd_plugins.extensions
|
||||
|
||||
class ExtensionManager(object):
|
||||
|
||||
def __init__(self):
|
||||
self.loaded_extensions = None
|
||||
self.type_to_instance = {}
|
||||
|
||||
def _load_modules(self):
|
||||
self.loaded_extensions = []
|
||||
for module_loader, name, ispkg in pkgutil.walk_packages(pydevd_plugins.extensions.__path__,
|
||||
pydevd_plugins.extensions.__name__ + '.'):
|
||||
mod_name = name.split('.')[-1]
|
||||
if not ispkg and mod_name.startswith('pydevd_plugin'):
|
||||
try:
|
||||
__import__(name)
|
||||
module = sys.modules[name]
|
||||
self.loaded_extensions.append(module)
|
||||
except ImportError:
|
||||
pydev_log.error('Unable to load extension ' + name)
|
||||
|
||||
def _ensure_loaded(self):
|
||||
if self.loaded_extensions is None:
|
||||
self._load_modules()
|
||||
|
||||
def _iter_attr(self):
|
||||
for extension in self.loaded_extensions:
|
||||
dunder_all = getattr(extension, '__all__', None)
|
||||
for attr_name in dir(extension):
|
||||
if not attr_name.startswith('_'):
|
||||
if dunder_all is None or attr_name in dunder_all:
|
||||
yield attr_name, getattr(extension, attr_name)
|
||||
|
||||
def get_extension_classes(self, extension_type):
|
||||
self._ensure_loaded()
|
||||
if extension_type in self.type_to_instance:
|
||||
return self.type_to_instance[extension_type]
|
||||
handlers = self.type_to_instance.setdefault(extension_type, [])
|
||||
for attr_name, attr in self._iter_attr():
|
||||
if isinstance(attr, type) and issubclass(attr, extension_type) and attr is not extension_type:
|
||||
try:
|
||||
handlers.append(attr())
|
||||
except:
|
||||
pydev_log.error('Unable to load extension class' + attr_name, tb=True)
|
||||
return handlers
|
||||
|
||||
|
||||
EXTENSION_MANAGER_INSTANCE = ExtensionManager()
|
||||
|
||||
def extensions_of_type(extension_type):
|
||||
"""
|
||||
|
||||
:param T extension_type: The type of the extension hook
|
||||
:rtype: list[T]
|
||||
"""
|
||||
return EXTENSION_MANAGER_INSTANCE.get_extension_classes(extension_type)
|
||||
|
||||
|
||||
781
ptvsd/pydevd/_pydevd_bundle/pydevd_frame.py
Normal file
781
ptvsd/pydevd/_pydevd_bundle/pydevd_frame.py
Normal file
|
|
@ -0,0 +1,781 @@
|
|||
import linecache
|
||||
import os.path
|
||||
import re
|
||||
import sys
|
||||
import traceback # @Reimport
|
||||
|
||||
from _pydev_bundle import pydev_log
|
||||
from _pydevd_bundle import pydevd_dont_trace
|
||||
from _pydevd_bundle import pydevd_vars
|
||||
from _pydevd_bundle.pydevd_breakpoints import get_exception_breakpoint
|
||||
from _pydevd_bundle.pydevd_comm import CMD_STEP_CAUGHT_EXCEPTION, CMD_STEP_RETURN, CMD_STEP_OVER, CMD_SET_BREAK, \
|
||||
CMD_STEP_INTO, CMD_SMART_STEP_INTO, CMD_RUN_TO_LINE, CMD_SET_NEXT_STATEMENT, CMD_STEP_INTO_MY_CODE
|
||||
from _pydevd_bundle.pydevd_constants import STATE_SUSPEND, get_thread_id, STATE_RUN, dict_iter_values, IS_PY3K, \
|
||||
RETURN_VALUES_DICT
|
||||
from _pydevd_bundle.pydevd_dont_trace_files import DONT_TRACE, PYDEV_FILE
|
||||
from _pydevd_bundle.pydevd_frame_utils import add_exception_to_frame, just_raised
|
||||
from _pydevd_bundle.pydevd_utils import get_clsname_for_code
|
||||
from pydevd_file_utils import get_abs_path_real_path_and_base_from_frame
|
||||
try:
|
||||
from inspect import CO_GENERATOR
|
||||
except:
|
||||
CO_GENERATOR = 0
|
||||
|
||||
try:
|
||||
from _pydevd_bundle.pydevd_signature import send_signature_call_trace, send_signature_return_trace
|
||||
except ImportError:
|
||||
def send_signature_call_trace(*args, **kwargs):
|
||||
pass
|
||||
|
||||
basename = os.path.basename
|
||||
|
||||
IGNORE_EXCEPTION_TAG = re.compile('[^#]*#.*@IgnoreException')
|
||||
DEBUG_START = ('pydevd.py', 'run')
|
||||
DEBUG_START_PY3K = ('_pydev_execfile.py', 'execfile')
|
||||
TRACE_PROPERTY = 'pydevd_traceproperty.py'
|
||||
get_file_type = DONT_TRACE.get
|
||||
|
||||
|
||||
def handle_breakpoint_condition(py_db, info, breakpoint, new_frame, default_return_value):
|
||||
condition = breakpoint.condition
|
||||
try:
|
||||
val = eval(condition, new_frame.f_globals, new_frame.f_locals)
|
||||
if not val:
|
||||
return default_return_value
|
||||
|
||||
except:
|
||||
if type(condition) != type(''):
|
||||
if hasattr(condition, 'encode'):
|
||||
condition = condition.encode('utf-8')
|
||||
|
||||
msg = 'Error while evaluating expression: %s\n' % (condition,)
|
||||
sys.stderr.write(msg)
|
||||
traceback.print_exc()
|
||||
if not py_db.suspend_on_breakpoint_exception:
|
||||
return default_return_value
|
||||
else:
|
||||
stop = True
|
||||
try:
|
||||
# add exception_type and stacktrace into thread additional info
|
||||
etype, value, tb = sys.exc_info()
|
||||
try:
|
||||
error = ''.join(traceback.format_exception_only(etype, value))
|
||||
stack = traceback.extract_stack(f=tb.tb_frame.f_back)
|
||||
|
||||
# On self.set_suspend(thread, CMD_SET_BREAK) this info will be
|
||||
# sent to the client.
|
||||
info.conditional_breakpoint_exception = \
|
||||
('Condition:\n' + condition + '\n\nError:\n' + error, stack)
|
||||
finally:
|
||||
etype, value, tb = None, None, None
|
||||
except:
|
||||
traceback.print_exc()
|
||||
|
||||
|
||||
def handle_breakpoint_expression(breakpoint, info, new_frame):
|
||||
try:
|
||||
try:
|
||||
val = eval(breakpoint.expression, new_frame.f_globals, new_frame.f_locals)
|
||||
except:
|
||||
val = sys.exc_info()[1]
|
||||
finally:
|
||||
if val is not None:
|
||||
info.pydev_message = str(val)
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# PyDBFrame
|
||||
#=======================================================================================================================
|
||||
# IFDEF CYTHON
|
||||
# cdef class PyDBFrame:
|
||||
# ELSE
|
||||
class PyDBFrame:
|
||||
'''This makes the tracing for a given frame, so, the trace_dispatch
|
||||
is used initially when we enter into a new context ('call') and then
|
||||
is reused for the entire context.
|
||||
'''
|
||||
# ENDIF
|
||||
|
||||
|
||||
#Note: class (and not instance) attributes.
|
||||
|
||||
#Same thing in the main debugger but only considering the file contents, while the one in the main debugger
|
||||
#considers the user input (so, the actual result must be a join of both).
|
||||
filename_to_lines_where_exceptions_are_ignored = {}
|
||||
filename_to_stat_info = {}
|
||||
|
||||
# IFDEF CYTHON
|
||||
# cdef tuple _args
|
||||
# cdef int should_skip
|
||||
# def __init__(self, tuple args):
|
||||
# self._args = args # In the cython version we don't need to pass the frame
|
||||
# self.should_skip = -1 # On cythonized version, put in instance.
|
||||
# ELSE
|
||||
should_skip = -1 # Default value in class (put in instance on set).
|
||||
|
||||
def __init__(self, args):
|
||||
#args = main_debugger, filename, base, info, t, frame
|
||||
#yeap, much faster than putting in self and then getting it from self later on
|
||||
self._args = args
|
||||
# ENDIF
|
||||
|
||||
def set_suspend(self, *args, **kwargs):
|
||||
self._args[0].set_suspend(*args, **kwargs)
|
||||
|
||||
def do_wait_suspend(self, *args, **kwargs):
|
||||
self._args[0].do_wait_suspend(*args, **kwargs)
|
||||
|
||||
# IFDEF CYTHON
|
||||
# def trace_exception(self, frame, str event, arg):
|
||||
# cdef bint flag;
|
||||
# ELSE
|
||||
def trace_exception(self, frame, event, arg):
|
||||
# ENDIF
|
||||
if event == 'exception':
|
||||
flag, frame = self.should_stop_on_exception(frame, event, arg)
|
||||
|
||||
if flag:
|
||||
self.handle_exception(frame, event, arg)
|
||||
return self.trace_dispatch
|
||||
|
||||
return self.trace_exception
|
||||
|
||||
def trace_return(self, frame, event, arg):
|
||||
if event == 'return':
|
||||
main_debugger, filename = self._args[0], self._args[1]
|
||||
send_signature_return_trace(main_debugger, frame, filename, arg)
|
||||
return self.trace_return
|
||||
|
||||
# IFDEF CYTHON
|
||||
# def should_stop_on_exception(self, frame, str event, arg):
|
||||
# cdef PyDBAdditionalThreadInfo info;
|
||||
# cdef bint flag;
|
||||
# ELSE
|
||||
def should_stop_on_exception(self, frame, event, arg):
|
||||
# ENDIF
|
||||
|
||||
# main_debugger, _filename, info, _thread = self._args
|
||||
main_debugger = self._args[0]
|
||||
info = self._args[2]
|
||||
flag = False
|
||||
|
||||
# STATE_SUSPEND = 2
|
||||
if info.pydev_state != 2: #and breakpoint is not None:
|
||||
exception, value, trace = arg
|
||||
|
||||
if trace is not None: #on jython trace is None on the first event
|
||||
exception_breakpoint = get_exception_breakpoint(
|
||||
exception, main_debugger.break_on_caught_exceptions)
|
||||
|
||||
if exception_breakpoint is not None:
|
||||
if exception_breakpoint.ignore_libraries:
|
||||
if exception_breakpoint.notify_on_first_raise_only:
|
||||
if main_debugger.first_appearance_in_scope(trace):
|
||||
add_exception_to_frame(frame, (exception, value, trace))
|
||||
try:
|
||||
info.pydev_message = exception_breakpoint.qname
|
||||
except:
|
||||
info.pydev_message = exception_breakpoint.qname.encode('utf-8')
|
||||
flag = True
|
||||
else:
|
||||
pydev_log.debug("Ignore exception %s in library %s" % (exception, frame.f_code.co_filename))
|
||||
flag = False
|
||||
else:
|
||||
if not exception_breakpoint.notify_on_first_raise_only or just_raised(trace):
|
||||
add_exception_to_frame(frame, (exception, value, trace))
|
||||
try:
|
||||
info.pydev_message = exception_breakpoint.qname
|
||||
except:
|
||||
info.pydev_message = exception_breakpoint.qname.encode('utf-8')
|
||||
flag = True
|
||||
else:
|
||||
flag = False
|
||||
else:
|
||||
try:
|
||||
if main_debugger.plugin is not None:
|
||||
result = main_debugger.plugin.exception_break(main_debugger, self, frame, self._args, arg)
|
||||
if result:
|
||||
flag, frame = result
|
||||
except:
|
||||
flag = False
|
||||
|
||||
return flag, frame
|
||||
|
||||
def handle_exception(self, frame, event, arg):
|
||||
try:
|
||||
# print 'handle_exception', frame.f_lineno, frame.f_code.co_name
|
||||
|
||||
# We have 3 things in arg: exception type, description, traceback object
|
||||
trace_obj = arg[2]
|
||||
main_debugger = self._args[0]
|
||||
|
||||
if not hasattr(trace_obj, 'tb_next'):
|
||||
return #Not always there on Jython...
|
||||
|
||||
initial_trace_obj = trace_obj
|
||||
if trace_obj.tb_next is None and trace_obj.tb_frame is frame:
|
||||
#I.e.: tb_next should be only None in the context it was thrown (trace_obj.tb_frame is frame is just a double check).
|
||||
|
||||
if main_debugger.break_on_exceptions_thrown_in_same_context:
|
||||
#Option: Don't break if an exception is caught in the same function from which it is thrown
|
||||
return
|
||||
else:
|
||||
#Get the trace_obj from where the exception was raised...
|
||||
while trace_obj.tb_next is not None:
|
||||
trace_obj = trace_obj.tb_next
|
||||
|
||||
|
||||
if main_debugger.ignore_exceptions_thrown_in_lines_with_ignore_exception:
|
||||
for check_trace_obj in (initial_trace_obj, trace_obj):
|
||||
filename = get_abs_path_real_path_and_base_from_frame(check_trace_obj.tb_frame)[1]
|
||||
|
||||
|
||||
filename_to_lines_where_exceptions_are_ignored = self.filename_to_lines_where_exceptions_are_ignored
|
||||
|
||||
|
||||
lines_ignored = filename_to_lines_where_exceptions_are_ignored.get(filename)
|
||||
if lines_ignored is None:
|
||||
lines_ignored = filename_to_lines_where_exceptions_are_ignored[filename] = {}
|
||||
|
||||
try:
|
||||
curr_stat = os.stat(filename)
|
||||
curr_stat = (curr_stat.st_size, curr_stat.st_mtime)
|
||||
except:
|
||||
curr_stat = None
|
||||
|
||||
last_stat = self.filename_to_stat_info.get(filename)
|
||||
if last_stat != curr_stat:
|
||||
self.filename_to_stat_info[filename] = curr_stat
|
||||
lines_ignored.clear()
|
||||
try:
|
||||
linecache.checkcache(filename)
|
||||
except:
|
||||
#Jython 2.1
|
||||
linecache.checkcache()
|
||||
|
||||
from_user_input = main_debugger.filename_to_lines_where_exceptions_are_ignored.get(filename)
|
||||
if from_user_input:
|
||||
merged = {}
|
||||
merged.update(lines_ignored)
|
||||
#Override what we have with the related entries that the user entered
|
||||
merged.update(from_user_input)
|
||||
else:
|
||||
merged = lines_ignored
|
||||
|
||||
exc_lineno = check_trace_obj.tb_lineno
|
||||
|
||||
# print ('lines ignored', lines_ignored)
|
||||
# print ('user input', from_user_input)
|
||||
# print ('merged', merged, 'curr', exc_lineno)
|
||||
|
||||
if exc_lineno not in merged: #Note: check on merged but update lines_ignored.
|
||||
try:
|
||||
line = linecache.getline(filename, exc_lineno, check_trace_obj.tb_frame.f_globals)
|
||||
except:
|
||||
#Jython 2.1
|
||||
line = linecache.getline(filename, exc_lineno)
|
||||
|
||||
if IGNORE_EXCEPTION_TAG.match(line) is not None:
|
||||
lines_ignored[exc_lineno] = 1
|
||||
return
|
||||
else:
|
||||
#Put in the cache saying not to ignore
|
||||
lines_ignored[exc_lineno] = 0
|
||||
else:
|
||||
#Ok, dict has it already cached, so, let's check it...
|
||||
if merged.get(exc_lineno, 0):
|
||||
return
|
||||
|
||||
|
||||
thread = self._args[3]
|
||||
|
||||
try:
|
||||
frame_id_to_frame = {}
|
||||
frame_id_to_frame[id(frame)] = frame
|
||||
f = trace_obj.tb_frame
|
||||
while f is not None:
|
||||
frame_id_to_frame[id(f)] = f
|
||||
f = f.f_back
|
||||
f = None
|
||||
|
||||
thread_id = get_thread_id(thread)
|
||||
pydevd_vars.add_additional_frame_by_id(thread_id, frame_id_to_frame)
|
||||
try:
|
||||
main_debugger.send_caught_exception_stack(thread, arg, id(frame))
|
||||
self.set_suspend(thread, CMD_STEP_CAUGHT_EXCEPTION)
|
||||
self.do_wait_suspend(thread, frame, event, arg)
|
||||
main_debugger.send_caught_exception_stack_proceeded(thread)
|
||||
|
||||
finally:
|
||||
pydevd_vars.remove_additional_frame_by_id(thread_id)
|
||||
except:
|
||||
traceback.print_exc()
|
||||
|
||||
main_debugger.set_trace_for_frame_and_parents(frame)
|
||||
finally:
|
||||
#Clear some local variables...
|
||||
trace_obj = None
|
||||
initial_trace_obj = None
|
||||
check_trace_obj = None
|
||||
f = None
|
||||
frame_id_to_frame = None
|
||||
main_debugger = None
|
||||
thread = None
|
||||
|
||||
def get_func_name(self, frame):
|
||||
code_obj = frame.f_code
|
||||
func_name = code_obj.co_name
|
||||
try:
|
||||
cls_name = get_clsname_for_code(code_obj, frame)
|
||||
if cls_name is not None:
|
||||
return "%s.%s" % (cls_name, func_name)
|
||||
else:
|
||||
return func_name
|
||||
except:
|
||||
traceback.print_exc()
|
||||
return func_name
|
||||
|
||||
def show_return_values(self, frame, arg):
|
||||
try:
|
||||
try:
|
||||
f_locals_back = getattr(frame.f_back, "f_locals", None)
|
||||
if f_locals_back is not None:
|
||||
return_values_dict = f_locals_back.get(RETURN_VALUES_DICT, None)
|
||||
if return_values_dict is None:
|
||||
return_values_dict = {}
|
||||
f_locals_back[RETURN_VALUES_DICT] = return_values_dict
|
||||
name = self.get_func_name(frame)
|
||||
return_values_dict[name] = arg
|
||||
except:
|
||||
traceback.print_exc()
|
||||
finally:
|
||||
f_locals_back = None
|
||||
|
||||
def remove_return_values(self, main_debugger, frame):
|
||||
try:
|
||||
try:
|
||||
# Showing return values was turned off, we should remove them from locals dict.
|
||||
# The values can be in the current frame or in the back one
|
||||
frame.f_locals.pop(RETURN_VALUES_DICT, None)
|
||||
|
||||
f_locals_back = getattr(frame.f_back, "f_locals", None)
|
||||
if f_locals_back is not None:
|
||||
f_locals_back.pop(RETURN_VALUES_DICT, None)
|
||||
except:
|
||||
traceback.print_exc()
|
||||
finally:
|
||||
f_locals_back = None
|
||||
|
||||
# IFDEF CYTHON
|
||||
# cpdef trace_dispatch(self, frame, str event, arg):
|
||||
# cdef str filename;
|
||||
# cdef bint is_exception_event;
|
||||
# cdef bint has_exception_breakpoints;
|
||||
# cdef bint can_skip;
|
||||
# cdef PyDBAdditionalThreadInfo info;
|
||||
# cdef int step_cmd;
|
||||
# cdef int line;
|
||||
# cdef bint is_line;
|
||||
# cdef bint is_call;
|
||||
# cdef bint is_return;
|
||||
# cdef str curr_func_name;
|
||||
# cdef bint exist_result;
|
||||
# cdef dict frame_skips_cache;
|
||||
# cdef tuple frame_cache_key;
|
||||
# cdef tuple line_cache_key;
|
||||
# cdef int breakpoints_in_line_cache;
|
||||
# cdef int breakpoints_in_frame_cache;
|
||||
# cdef bint has_breakpoint_in_frame;
|
||||
# ELSE
|
||||
def trace_dispatch(self, frame, event, arg):
|
||||
# ENDIF
|
||||
|
||||
main_debugger, filename, info, thread, frame_skips_cache, frame_cache_key = self._args
|
||||
# print('frame trace_dispatch', frame.f_lineno, frame.f_code.co_name, event, info.pydev_step_cmd)
|
||||
try:
|
||||
info.is_tracing = True
|
||||
line = frame.f_lineno
|
||||
line_cache_key = (frame_cache_key, line)
|
||||
|
||||
if main_debugger._finish_debugging_session:
|
||||
return None
|
||||
|
||||
plugin_manager = main_debugger.plugin
|
||||
|
||||
is_exception_event = event == 'exception'
|
||||
has_exception_breakpoints = main_debugger.break_on_caught_exceptions or main_debugger.has_plugin_exception_breaks
|
||||
|
||||
if is_exception_event:
|
||||
if has_exception_breakpoints:
|
||||
flag, frame = self.should_stop_on_exception(frame, event, arg)
|
||||
if flag:
|
||||
self.handle_exception(frame, event, arg)
|
||||
return self.trace_dispatch
|
||||
is_line = False
|
||||
is_return = False
|
||||
is_call = False
|
||||
else:
|
||||
is_line = event == 'line'
|
||||
is_return = event == 'return'
|
||||
is_call = event == 'call'
|
||||
if not is_line and not is_return and not is_call:
|
||||
# I believe this can only happen in jython on some frontiers on jython and java code, which we don't want to trace.
|
||||
return None
|
||||
|
||||
need_trace_return = False
|
||||
if is_call and main_debugger.signature_factory:
|
||||
need_trace_return = send_signature_call_trace(main_debugger, frame, filename)
|
||||
if is_return and main_debugger.signature_factory:
|
||||
send_signature_return_trace(main_debugger, frame, filename, arg)
|
||||
|
||||
stop_frame = info.pydev_step_stop
|
||||
step_cmd = info.pydev_step_cmd
|
||||
|
||||
if is_exception_event:
|
||||
breakpoints_for_file = None
|
||||
# CMD_STEP_OVER = 108
|
||||
if stop_frame and stop_frame is not frame and step_cmd == 108 and \
|
||||
arg[0] in (StopIteration, GeneratorExit) and arg[2] is None:
|
||||
info.pydev_step_cmd = 107 # CMD_STEP_INTO = 107
|
||||
info.pydev_step_stop = None
|
||||
else:
|
||||
# If we are in single step mode and something causes us to exit the current frame, we need to make sure we break
|
||||
# eventually. Force the step mode to step into and the step stop frame to None.
|
||||
# I.e.: F6 in the end of a function should stop in the next possible position (instead of forcing the user
|
||||
# to make a step in or step over at that location).
|
||||
# Note: this is especially troublesome when we're skipping code with the
|
||||
# @DontTrace comment.
|
||||
if stop_frame is frame and is_return and step_cmd in (109, 108): # CMD_STEP_RETURN = 109, CMD_STEP_OVER = 108
|
||||
if not frame.f_code.co_flags & 0x20: # CO_GENERATOR = 0x20 (inspect.CO_GENERATOR)
|
||||
info.pydev_step_cmd = 107 # CMD_STEP_INTO = 107
|
||||
info.pydev_step_stop = None
|
||||
|
||||
breakpoints_for_file = main_debugger.breakpoints.get(filename)
|
||||
|
||||
can_skip = False
|
||||
|
||||
if info.pydev_state == 1: # STATE_RUN = 1
|
||||
#we can skip if:
|
||||
#- we have no stop marked
|
||||
#- we should make a step return/step over and we're not in the current frame
|
||||
# CMD_STEP_RETURN = 109, CMD_STEP_OVER = 108
|
||||
can_skip = (step_cmd == -1 and stop_frame is None)\
|
||||
or (step_cmd in (109, 108) and stop_frame is not frame)
|
||||
|
||||
if can_skip:
|
||||
if plugin_manager is not None and main_debugger.has_plugin_line_breaks:
|
||||
can_skip = not plugin_manager.can_not_skip(main_debugger, self, frame)
|
||||
|
||||
# CMD_STEP_OVER = 108
|
||||
if can_skip and is_return and main_debugger.show_return_values and info.pydev_step_cmd == 108 and frame.f_back is info.pydev_step_stop:
|
||||
# trace function for showing return values after step over
|
||||
can_skip = False
|
||||
|
||||
# Let's check to see if we are in a function that has a breakpoint. If we don't have a breakpoint,
|
||||
# we will return nothing for the next trace
|
||||
# also, after we hit a breakpoint and go to some other debugging state, we have to force the set trace anyway,
|
||||
# so, that's why the additional checks are there.
|
||||
if not breakpoints_for_file:
|
||||
if can_skip:
|
||||
if has_exception_breakpoints:
|
||||
return self.trace_exception
|
||||
else:
|
||||
if need_trace_return:
|
||||
return self.trace_return
|
||||
else:
|
||||
return None
|
||||
|
||||
else:
|
||||
# When cached, 0 means we don't have a breakpoint and 1 means we have.
|
||||
if can_skip:
|
||||
breakpoints_in_line_cache = frame_skips_cache.get(line_cache_key, -1)
|
||||
if breakpoints_in_line_cache == 0:
|
||||
return self.trace_dispatch
|
||||
|
||||
breakpoints_in_frame_cache = frame_skips_cache.get(frame_cache_key, -1)
|
||||
if breakpoints_in_frame_cache != -1:
|
||||
# Gotten from cache.
|
||||
has_breakpoint_in_frame = breakpoints_in_frame_cache == 1
|
||||
|
||||
else:
|
||||
has_breakpoint_in_frame = False
|
||||
# Checks the breakpoint to see if there is a context match in some function
|
||||
curr_func_name = frame.f_code.co_name
|
||||
|
||||
#global context is set with an empty name
|
||||
if curr_func_name in ('?', '<module>'):
|
||||
curr_func_name = ''
|
||||
|
||||
for breakpoint in dict_iter_values(breakpoints_for_file): #jython does not support itervalues()
|
||||
#will match either global or some function
|
||||
if breakpoint.func_name in ('None', curr_func_name):
|
||||
has_breakpoint_in_frame = True
|
||||
break
|
||||
|
||||
# Cache the value (1 or 0 or -1 for default because of cython).
|
||||
if has_breakpoint_in_frame:
|
||||
frame_skips_cache[frame_cache_key] = 1
|
||||
else:
|
||||
frame_skips_cache[frame_cache_key] = 0
|
||||
|
||||
|
||||
if can_skip and not has_breakpoint_in_frame:
|
||||
if has_exception_breakpoints:
|
||||
return self.trace_exception
|
||||
else:
|
||||
if need_trace_return:
|
||||
return self.trace_return
|
||||
else:
|
||||
return None
|
||||
|
||||
#We may have hit a breakpoint or we are already in step mode. Either way, let's check what we should do in this frame
|
||||
# print('NOT skipped', frame.f_lineno, frame.f_code.co_name, event)
|
||||
|
||||
try:
|
||||
flag = False
|
||||
#return is not taken into account for breakpoint hit because we'd have a double-hit in this case
|
||||
#(one for the line and the other for the return).
|
||||
|
||||
stop_info = {}
|
||||
breakpoint = None
|
||||
exist_result = False
|
||||
stop = False
|
||||
bp_type = None
|
||||
if not is_return and info.pydev_state != STATE_SUSPEND and breakpoints_for_file is not None and line in breakpoints_for_file:
|
||||
breakpoint = breakpoints_for_file[line]
|
||||
new_frame = frame
|
||||
stop = True
|
||||
if step_cmd == CMD_STEP_OVER and stop_frame is frame and (is_line or is_return):
|
||||
stop = False #we don't stop on breakpoint if we have to stop by step-over (it will be processed later)
|
||||
elif plugin_manager is not None and main_debugger.has_plugin_line_breaks:
|
||||
result = plugin_manager.get_breakpoint(main_debugger, self, frame, event, self._args)
|
||||
if result:
|
||||
exist_result = True
|
||||
flag, breakpoint, new_frame, bp_type = result
|
||||
|
||||
if breakpoint:
|
||||
#ok, hit breakpoint, now, we have to discover if it is a conditional breakpoint
|
||||
# lets do the conditional stuff here
|
||||
if stop or exist_result:
|
||||
condition = breakpoint.condition
|
||||
if condition is not None:
|
||||
result = handle_breakpoint_condition(main_debugger, info, breakpoint, new_frame,
|
||||
self.trace_dispatch)
|
||||
if result is not None:
|
||||
return result
|
||||
|
||||
if breakpoint.expression is not None:
|
||||
handle_breakpoint_expression(breakpoint, info, new_frame)
|
||||
|
||||
if not main_debugger.first_breakpoint_reached:
|
||||
if is_call:
|
||||
back = frame.f_back
|
||||
if back is not None:
|
||||
# When we start debug session, we call execfile in pydevd run function. It produces an additional
|
||||
# 'call' event for tracing and we stop on the first line of code twice.
|
||||
_, back_filename, base = get_abs_path_real_path_and_base_from_frame(back)
|
||||
if (base == DEBUG_START[0] and back.f_code.co_name == DEBUG_START[1]) or \
|
||||
(base == DEBUG_START_PY3K[0] and back.f_code.co_name == DEBUG_START_PY3K[1]):
|
||||
stop = False
|
||||
main_debugger.first_breakpoint_reached = True
|
||||
else:
|
||||
# if the frame is traced after breakpoint stop,
|
||||
# but the file should be ignored while stepping because of filters
|
||||
if step_cmd != -1:
|
||||
if main_debugger.is_filter_enabled and main_debugger.is_ignored_by_filters(filename):
|
||||
# ignore files matching stepping filters
|
||||
return self.trace_dispatch
|
||||
if main_debugger.is_filter_libraries and main_debugger.not_in_scope(filename):
|
||||
# ignore library files while stepping
|
||||
return self.trace_dispatch
|
||||
|
||||
if main_debugger.show_return_values:
|
||||
if is_return and info.pydev_step_cmd == CMD_STEP_OVER and frame.f_back == info.pydev_step_stop:
|
||||
self.show_return_values(frame, arg)
|
||||
|
||||
elif main_debugger.remove_return_values_flag:
|
||||
try:
|
||||
self.remove_return_values(main_debugger, frame)
|
||||
finally:
|
||||
main_debugger.remove_return_values_flag = False
|
||||
|
||||
if stop:
|
||||
self.set_suspend(thread, CMD_SET_BREAK)
|
||||
if breakpoint and breakpoint.suspend_policy == "ALL":
|
||||
main_debugger.suspend_all_other_threads(thread)
|
||||
elif flag and plugin_manager is not None:
|
||||
result = plugin_manager.suspend(main_debugger, thread, frame, bp_type)
|
||||
if result:
|
||||
frame = result
|
||||
|
||||
# if thread has a suspend flag, we suspend with a busy wait
|
||||
if info.pydev_state == STATE_SUSPEND:
|
||||
self.do_wait_suspend(thread, frame, event, arg)
|
||||
return self.trace_dispatch
|
||||
else:
|
||||
if not breakpoint and not is_return:
|
||||
# No stop from anyone and no breakpoint found in line (cache that).
|
||||
frame_skips_cache[line_cache_key] = 0
|
||||
|
||||
except:
|
||||
traceback.print_exc()
|
||||
raise
|
||||
|
||||
#step handling. We stop when we hit the right frame
|
||||
try:
|
||||
should_skip = 0
|
||||
if pydevd_dont_trace.should_trace_hook is not None:
|
||||
if self.should_skip == -1:
|
||||
# I.e.: cache the result on self.should_skip (no need to evaluate the same frame multiple times).
|
||||
# Note that on a code reload, we won't re-evaluate this because in practice, the frame.f_code
|
||||
# Which will be handled by this frame is read-only, so, we can cache it safely.
|
||||
if not pydevd_dont_trace.should_trace_hook(frame, filename):
|
||||
# -1, 0, 1 to be Cython-friendly
|
||||
should_skip = self.should_skip = 1
|
||||
else:
|
||||
should_skip = self.should_skip = 0
|
||||
else:
|
||||
should_skip = self.should_skip
|
||||
|
||||
plugin_stop = False
|
||||
if should_skip:
|
||||
stop = False
|
||||
|
||||
elif step_cmd == CMD_STEP_INTO:
|
||||
stop = is_line or is_return
|
||||
if plugin_manager is not None:
|
||||
result = plugin_manager.cmd_step_into(main_debugger, frame, event, self._args, stop_info, stop)
|
||||
if result:
|
||||
stop, plugin_stop = result
|
||||
|
||||
elif step_cmd == CMD_STEP_INTO_MY_CODE:
|
||||
if not main_debugger.not_in_scope(frame.f_code.co_filename):
|
||||
stop = is_line
|
||||
|
||||
elif step_cmd == CMD_STEP_OVER:
|
||||
stop = stop_frame is frame and (is_line or is_return)
|
||||
|
||||
if frame.f_code.co_flags & CO_GENERATOR:
|
||||
if is_return:
|
||||
stop = False
|
||||
|
||||
if plugin_manager is not None:
|
||||
result = plugin_manager.cmd_step_over(main_debugger, frame, event, self._args, stop_info, stop)
|
||||
if result:
|
||||
stop, plugin_stop = result
|
||||
|
||||
elif step_cmd == CMD_SMART_STEP_INTO:
|
||||
stop = False
|
||||
if info.pydev_smart_step_stop is frame:
|
||||
info.pydev_func_name = '.invalid.' # Must match the type in cython
|
||||
info.pydev_smart_step_stop = None
|
||||
|
||||
if is_line or is_exception_event:
|
||||
curr_func_name = frame.f_code.co_name
|
||||
|
||||
#global context is set with an empty name
|
||||
if curr_func_name in ('?', '<module>') or curr_func_name is None:
|
||||
curr_func_name = ''
|
||||
|
||||
if curr_func_name == info.pydev_func_name:
|
||||
stop = True
|
||||
|
||||
elif step_cmd == CMD_STEP_RETURN:
|
||||
stop = is_return and stop_frame is frame
|
||||
|
||||
elif step_cmd == CMD_RUN_TO_LINE or step_cmd == CMD_SET_NEXT_STATEMENT:
|
||||
stop = False
|
||||
|
||||
if is_line or is_exception_event:
|
||||
#Yes, we can only act on line events (weird hum?)
|
||||
#Note: This code is duplicated at pydevd.py
|
||||
#Acting on exception events after debugger breaks with exception
|
||||
curr_func_name = frame.f_code.co_name
|
||||
|
||||
#global context is set with an empty name
|
||||
if curr_func_name in ('?', '<module>'):
|
||||
curr_func_name = ''
|
||||
|
||||
if curr_func_name == info.pydev_func_name:
|
||||
line = info.pydev_next_line
|
||||
if frame.f_lineno == line:
|
||||
stop = True
|
||||
else:
|
||||
if frame.f_trace is None:
|
||||
frame.f_trace = self.trace_dispatch
|
||||
frame.f_lineno = line
|
||||
frame.f_trace = None
|
||||
stop = True
|
||||
|
||||
else:
|
||||
stop = False
|
||||
|
||||
if stop and step_cmd != -1 and is_return and IS_PY3K and hasattr(frame, "f_back"):
|
||||
f_code = getattr(frame.f_back, 'f_code', None)
|
||||
if f_code is not None:
|
||||
back_filename = os.path.basename(f_code.co_filename)
|
||||
file_type = get_file_type(back_filename)
|
||||
if file_type == PYDEV_FILE:
|
||||
stop = False
|
||||
|
||||
if plugin_stop:
|
||||
stopped_on_plugin = plugin_manager.stop(main_debugger, frame, event, self._args, stop_info, arg, step_cmd)
|
||||
elif stop:
|
||||
if is_line:
|
||||
self.set_suspend(thread, step_cmd)
|
||||
self.do_wait_suspend(thread, frame, event, arg)
|
||||
else: #return event
|
||||
back = frame.f_back
|
||||
if back is not None:
|
||||
#When we get to the pydevd run function, the debugging has actually finished for the main thread
|
||||
#(note that it can still go on for other threads, but for this one, we just make it finish)
|
||||
#So, just setting it to None should be OK
|
||||
_, back_filename, base = get_abs_path_real_path_and_base_from_frame(back)
|
||||
if base == DEBUG_START[0] and back.f_code.co_name == DEBUG_START[1]:
|
||||
back = None
|
||||
|
||||
elif base == TRACE_PROPERTY:
|
||||
# We dont want to trace the return event of pydevd_traceproperty (custom property for debugging)
|
||||
#if we're in a return, we want it to appear to the user in the previous frame!
|
||||
return None
|
||||
|
||||
elif pydevd_dont_trace.should_trace_hook is not None:
|
||||
if not pydevd_dont_trace.should_trace_hook(back, back_filename):
|
||||
# In this case, we'll have to skip the previous one because it shouldn't be traced.
|
||||
# Also, we have to reset the tracing, because if the parent's parent (or some
|
||||
# other parent) has to be traced and it's not currently, we wouldn't stop where
|
||||
# we should anymore (so, a step in/over/return may not stop anywhere if no parent is traced).
|
||||
# Related test: _debugger_case17a.py
|
||||
main_debugger.set_trace_for_frame_and_parents(back, overwrite_prev_trace=True)
|
||||
return None
|
||||
|
||||
if back is not None:
|
||||
#if we're in a return, we want it to appear to the user in the previous frame!
|
||||
self.set_suspend(thread, step_cmd)
|
||||
self.do_wait_suspend(thread, back, event, arg)
|
||||
else:
|
||||
#in jython we may not have a back frame
|
||||
info.pydev_step_stop = None
|
||||
info.pydev_step_cmd = -1
|
||||
info.pydev_state = STATE_RUN
|
||||
|
||||
except KeyboardInterrupt:
|
||||
raise
|
||||
except:
|
||||
try:
|
||||
traceback.print_exc()
|
||||
info.pydev_step_cmd = -1
|
||||
except:
|
||||
return None
|
||||
|
||||
#if we are quitting, let's stop the tracing
|
||||
retVal = None
|
||||
if not main_debugger.quitting:
|
||||
retVal = self.trace_dispatch
|
||||
|
||||
return retVal
|
||||
finally:
|
||||
info.is_tracing = False
|
||||
|
||||
#end trace_dispatch
|
||||
|
||||
59
ptvsd/pydevd/_pydevd_bundle/pydevd_frame_utils.py
Normal file
59
ptvsd/pydevd/_pydevd_bundle/pydevd_frame_utils.py
Normal file
|
|
@ -0,0 +1,59 @@
|
|||
from _pydevd_bundle.pydevd_constants import IS_PY3K
|
||||
|
||||
class Frame(object):
|
||||
def __init__(
|
||||
self,
|
||||
f_back,
|
||||
f_fileno,
|
||||
f_code,
|
||||
f_locals,
|
||||
f_globals=None,
|
||||
f_trace=None):
|
||||
self.f_back = f_back
|
||||
self.f_lineno = f_fileno
|
||||
self.f_code = f_code
|
||||
self.f_locals = f_locals
|
||||
self.f_globals = f_globals
|
||||
self.f_trace = f_trace
|
||||
|
||||
if self.f_globals is None:
|
||||
self.f_globals = {}
|
||||
|
||||
|
||||
class FCode(object):
|
||||
def __init__(self, name, filename):
|
||||
self.co_name = name
|
||||
self.co_filename = filename
|
||||
|
||||
|
||||
def add_exception_to_frame(frame, exception_info):
|
||||
frame.f_locals['__exception__'] = exception_info
|
||||
|
||||
FILES_WITH_IMPORT_HOOKS = ['pydev_monkey_qt.py', 'pydev_import_hook.py']
|
||||
|
||||
def just_raised(trace):
|
||||
if trace is None:
|
||||
return False
|
||||
if trace.tb_next is None:
|
||||
if IS_PY3K:
|
||||
if trace.tb_frame.f_code.co_filename != '<frozen importlib._bootstrap>':
|
||||
# Do not stop on inner exceptions in py3 while importing
|
||||
return True
|
||||
else:
|
||||
return True
|
||||
if trace.tb_next is not None:
|
||||
filename = trace.tb_next.tb_frame.f_code.co_filename
|
||||
# ImportError should appear in a user's code, not inside debugger
|
||||
for file in FILES_WITH_IMPORT_HOOKS:
|
||||
if filename.endswith(file):
|
||||
return True
|
||||
return False
|
||||
|
||||
def cached_call(obj, func, *args):
|
||||
cached_name = '_cached_' + func.__name__
|
||||
if not hasattr(obj, cached_name):
|
||||
setattr(obj, cached_name, func(*args))
|
||||
|
||||
return getattr(obj, cached_name)
|
||||
|
||||
|
||||
68
ptvsd/pydevd/_pydevd_bundle/pydevd_import_class.py
Normal file
68
ptvsd/pydevd/_pydevd_bundle/pydevd_import_class.py
Normal file
|
|
@ -0,0 +1,68 @@
|
|||
#Note: code gotten from _pydev_imports_tipper.
|
||||
|
||||
import sys
|
||||
|
||||
def _imp(name, log=None):
|
||||
try:
|
||||
return __import__(name)
|
||||
except:
|
||||
if '.' in name:
|
||||
sub = name[0:name.rfind('.')]
|
||||
|
||||
if log is not None:
|
||||
log.add_content('Unable to import', name, 'trying with', sub)
|
||||
log.add_exception()
|
||||
|
||||
return _imp(sub, log)
|
||||
else:
|
||||
s = 'Unable to import module: %s - sys.path: %s' % (str(name), sys.path)
|
||||
if log is not None:
|
||||
log.add_content(s)
|
||||
log.add_exception()
|
||||
|
||||
raise ImportError(s)
|
||||
|
||||
|
||||
IS_IPY = False
|
||||
if sys.platform == 'cli':
|
||||
IS_IPY = True
|
||||
_old_imp = _imp
|
||||
def _imp(name, log=None):
|
||||
#We must add a reference in clr for .Net
|
||||
import clr #@UnresolvedImport
|
||||
initial_name = name
|
||||
while '.' in name:
|
||||
try:
|
||||
clr.AddReference(name)
|
||||
break #If it worked, that's OK.
|
||||
except:
|
||||
name = name[0:name.rfind('.')]
|
||||
else:
|
||||
try:
|
||||
clr.AddReference(name)
|
||||
except:
|
||||
pass #That's OK (not dot net module).
|
||||
|
||||
return _old_imp(initial_name, log)
|
||||
|
||||
|
||||
def import_name(name, log=None):
|
||||
mod = _imp(name, log)
|
||||
|
||||
components = name.split('.')
|
||||
|
||||
old_comp = None
|
||||
for comp in components[1:]:
|
||||
try:
|
||||
#this happens in the following case:
|
||||
#we have mx.DateTime.mxDateTime.mxDateTime.pyd
|
||||
#but after importing it, mx.DateTime.mxDateTime shadows access to mxDateTime.pyd
|
||||
mod = getattr(mod, comp)
|
||||
except AttributeError:
|
||||
if old_comp != comp:
|
||||
raise
|
||||
|
||||
old_comp = comp
|
||||
|
||||
return mod
|
||||
|
||||
101
ptvsd/pydevd/_pydevd_bundle/pydevd_io.py
Normal file
101
ptvsd/pydevd/_pydevd_bundle/pydevd_io.py
Normal file
|
|
@ -0,0 +1,101 @@
|
|||
from _pydevd_bundle import pydevd_constants
|
||||
|
||||
IS_PY3K = pydevd_constants.IS_PY3K
|
||||
|
||||
class IORedirector:
|
||||
'''This class works to redirect the write function to many streams
|
||||
'''
|
||||
|
||||
def __init__(self, *args):
|
||||
self._redirectTo = args
|
||||
|
||||
def write(self, s):
|
||||
for r in self._redirectTo:
|
||||
try:
|
||||
r.write(s)
|
||||
except:
|
||||
pass
|
||||
|
||||
def isatty(self):
|
||||
return False
|
||||
|
||||
def flush(self):
|
||||
for r in self._redirectTo:
|
||||
r.flush()
|
||||
|
||||
def __getattr__(self, name):
|
||||
for r in self._redirectTo:
|
||||
if hasattr(r, name):
|
||||
return getattr(r, name)
|
||||
raise AttributeError(name)
|
||||
|
||||
class IOBuf:
|
||||
'''This class works as a replacement for stdio and stderr.
|
||||
It is a buffer and when its contents are requested, it will erase what
|
||||
|
||||
it has so far so that the next return will not return the same contents again.
|
||||
'''
|
||||
def __init__(self):
|
||||
self.buflist = []
|
||||
import os
|
||||
self.encoding = os.environ.get('PYTHONIOENCODING', 'utf-8')
|
||||
|
||||
def getvalue(self):
|
||||
b = self.buflist
|
||||
self.buflist = [] #clear it
|
||||
return ''.join(b)
|
||||
|
||||
def write(self, s):
|
||||
if not IS_PY3K:
|
||||
if isinstance(s, unicode):
|
||||
s = s.encode(self.encoding)
|
||||
self.buflist.append(s)
|
||||
|
||||
def isatty(self):
|
||||
return False
|
||||
|
||||
def flush(self):
|
||||
pass
|
||||
|
||||
def empty(self):
|
||||
return len(self.buflist) == 0
|
||||
|
||||
class _RedirectionsHolder:
|
||||
_stack_stdout = []
|
||||
_stack_stderr = []
|
||||
|
||||
|
||||
def start_redirect(keep_original_redirection=False, std='stdout'):
|
||||
'''
|
||||
@param std: 'stdout', 'stderr', or 'both'
|
||||
'''
|
||||
import sys
|
||||
buf = IOBuf()
|
||||
|
||||
if std == 'both':
|
||||
config_stds = ['stdout', 'stderr']
|
||||
else:
|
||||
config_stds = [std]
|
||||
|
||||
for std in config_stds:
|
||||
original = getattr(sys, std)
|
||||
stack = getattr(_RedirectionsHolder, '_stack_%s' % std)
|
||||
stack.append(original)
|
||||
|
||||
if keep_original_redirection:
|
||||
setattr(sys, std, IORedirector(buf, getattr(sys, std)))
|
||||
else:
|
||||
setattr(sys, std, buf)
|
||||
return buf
|
||||
|
||||
|
||||
def end_redirect(std='stdout'):
|
||||
import sys
|
||||
if std == 'both':
|
||||
config_stds = ['stdout', 'stderr']
|
||||
else:
|
||||
config_stds = [std]
|
||||
for std in config_stds:
|
||||
stack = getattr(_RedirectionsHolder, '_stack_%s' % std)
|
||||
setattr(sys, std, stack.pop())
|
||||
|
||||
|
|
@ -0,0 +1,8 @@
|
|||
from _pydevd_bundle.pydevd_comm import PyDBDaemonThread
|
||||
from _pydevd_bundle.pydevd_constants import dict_keys
|
||||
|
||||
def kill_all_pydev_threads():
|
||||
threads = dict_keys(PyDBDaemonThread.created_pydb_daemon_threads)
|
||||
for t in threads:
|
||||
if hasattr(t, 'do_kill_pydev_thread'):
|
||||
t.do_kill_pydev_thread()
|
||||
91
ptvsd/pydevd/_pydevd_bundle/pydevd_plugin_utils.py
Normal file
91
ptvsd/pydevd/_pydevd_bundle/pydevd_plugin_utils.py
Normal file
|
|
@ -0,0 +1,91 @@
|
|||
import types
|
||||
|
||||
from _pydev_bundle import pydev_log
|
||||
from _pydevd_bundle import pydevd_trace_api
|
||||
|
||||
try:
|
||||
from pydevd_plugins import django_debug
|
||||
except:
|
||||
django_debug = None
|
||||
pydev_log.debug('Unable to load django_debug plugin')
|
||||
|
||||
try:
|
||||
from pydevd_plugins import jinja2_debug
|
||||
except:
|
||||
jinja2_debug = None
|
||||
pydev_log.debug('Unable to load jinja2_debug plugin')
|
||||
|
||||
def load_plugins():
|
||||
plugins = []
|
||||
if django_debug is not None:
|
||||
plugins.append(django_debug)
|
||||
|
||||
if jinja2_debug is not None:
|
||||
plugins.append(jinja2_debug)
|
||||
return plugins
|
||||
|
||||
|
||||
def bind_func_to_method(func, obj, method_name):
|
||||
bound_method = types.MethodType(func, obj)
|
||||
|
||||
setattr(obj, method_name, bound_method)
|
||||
return bound_method
|
||||
|
||||
|
||||
class PluginManager(object):
|
||||
|
||||
def __init__(self, main_debugger):
|
||||
self.plugins = load_plugins()
|
||||
self.active_plugins = []
|
||||
self.main_debugger = main_debugger
|
||||
self.rebind_methods()
|
||||
|
||||
def add_breakpoint(self, func_name, *args, **kwargs):
|
||||
# add breakpoint for plugin and remember which plugin to use in tracing
|
||||
for plugin in self.plugins:
|
||||
if hasattr(plugin, func_name):
|
||||
func = getattr(plugin, func_name)
|
||||
result = func(self, *args, **kwargs)
|
||||
if result:
|
||||
self.activate(plugin)
|
||||
|
||||
return result
|
||||
return None
|
||||
|
||||
def activate(self, plugin):
|
||||
if plugin not in self.active_plugins:
|
||||
self.active_plugins.append(plugin)
|
||||
self.rebind_methods()
|
||||
|
||||
def rebind_methods(self):
|
||||
if len(self.active_plugins) == 0:
|
||||
self.bind_functions(pydevd_trace_api, getattr, pydevd_trace_api)
|
||||
elif len(self.active_plugins) == 1:
|
||||
self.bind_functions(pydevd_trace_api, getattr, self.active_plugins[0])
|
||||
else:
|
||||
self.bind_functions(pydevd_trace_api, create_dispatch, self.active_plugins)
|
||||
|
||||
def bind_functions(self, interface, function_factory, arg):
|
||||
for name in dir(interface):
|
||||
func = function_factory(arg, name)
|
||||
if type(func) == types.FunctionType:
|
||||
bind_func_to_method(func, self, name)
|
||||
|
||||
|
||||
def create_dispatch(obj, name):
|
||||
def dispatch(self, *args, **kwargs):
|
||||
result = None
|
||||
for p in self.active_plugins:
|
||||
r = getattr(p, name)(self, *args, **kwargs)
|
||||
if not result:
|
||||
result = r
|
||||
return result
|
||||
return dispatch
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
709
ptvsd/pydevd/_pydevd_bundle/pydevd_process_net_command.py
Normal file
709
ptvsd/pydevd/_pydevd_bundle/pydevd_process_net_command.py
Normal file
|
|
@ -0,0 +1,709 @@
|
|||
import os
|
||||
import sys
|
||||
import traceback
|
||||
|
||||
from _pydev_bundle import pydev_log
|
||||
from _pydevd_bundle import pydevd_traceproperty, pydevd_dont_trace
|
||||
import pydevd_tracing
|
||||
import pydevd_file_utils
|
||||
from _pydevd_bundle.pydevd_breakpoints import LineBreakpoint, update_exception_hook
|
||||
from _pydevd_bundle.pydevd_comm import CMD_RUN, CMD_VERSION, CMD_LIST_THREADS, CMD_THREAD_KILL, InternalTerminateThread, \
|
||||
CMD_THREAD_SUSPEND, pydevd_find_thread_by_id, CMD_THREAD_RUN, InternalRunThread, CMD_STEP_INTO, CMD_STEP_OVER, \
|
||||
CMD_STEP_RETURN, CMD_STEP_INTO_MY_CODE, InternalStepThread, CMD_RUN_TO_LINE, CMD_SET_NEXT_STATEMENT, \
|
||||
CMD_SMART_STEP_INTO, InternalSetNextStatementThread, CMD_RELOAD_CODE, ReloadCodeCommand, CMD_CHANGE_VARIABLE, \
|
||||
InternalChangeVariable, CMD_GET_VARIABLE, InternalGetVariable, CMD_GET_ARRAY, InternalGetArray, CMD_GET_COMPLETIONS, \
|
||||
InternalGetCompletions, CMD_GET_FRAME, InternalGetFrame, CMD_SET_BREAK, file_system_encoding, CMD_REMOVE_BREAK, \
|
||||
CMD_EVALUATE_EXPRESSION, CMD_EXEC_EXPRESSION, InternalEvaluateExpression, CMD_CONSOLE_EXEC, InternalConsoleExec, \
|
||||
CMD_SET_PY_EXCEPTION, CMD_GET_FILE_CONTENTS, CMD_SET_PROPERTY_TRACE, CMD_ADD_EXCEPTION_BREAK, \
|
||||
CMD_REMOVE_EXCEPTION_BREAK, CMD_LOAD_SOURCE, CMD_ADD_DJANGO_EXCEPTION_BREAK, CMD_REMOVE_DJANGO_EXCEPTION_BREAK, \
|
||||
CMD_EVALUATE_CONSOLE_EXPRESSION, InternalEvaluateConsoleExpression, InternalConsoleGetCompletions, \
|
||||
CMD_RUN_CUSTOM_OPERATION, InternalRunCustomOperation, CMD_IGNORE_THROWN_EXCEPTION_AT, CMD_ENABLE_DONT_TRACE, \
|
||||
CMD_SHOW_RETURN_VALUES, ID_TO_MEANING, CMD_GET_DESCRIPTION, InternalGetDescription
|
||||
from _pydevd_bundle.pydevd_constants import get_thread_id, IS_PY3K, DebugInfoHolder, dict_keys, \
|
||||
STATE_RUN
|
||||
|
||||
|
||||
def process_net_command(py_db, cmd_id, seq, text):
|
||||
'''Processes a command received from the Java side
|
||||
|
||||
@param cmd_id: the id of the command
|
||||
@param seq: the sequence of the command
|
||||
@param text: the text received in the command
|
||||
|
||||
@note: this method is run as a big switch... after doing some tests, it's not clear whether changing it for
|
||||
a dict id --> function call will have better performance result. A simple test with xrange(10000000) showed
|
||||
that the gains from having a fast access to what should be executed are lost because of the function call in
|
||||
a way that if we had 10 elements in the switch the if..elif are better -- but growing the number of choices
|
||||
makes the solution with the dispatch look better -- so, if this gets more than 20-25 choices at some time,
|
||||
it may be worth refactoring it (actually, reordering the ifs so that the ones used mostly come before
|
||||
probably will give better performance).
|
||||
'''
|
||||
# print(ID_TO_MEANING[str(cmd_id)], repr(text))
|
||||
|
||||
py_db._main_lock.acquire()
|
||||
try:
|
||||
try:
|
||||
cmd = None
|
||||
if cmd_id == CMD_RUN:
|
||||
py_db.ready_to_run = True
|
||||
|
||||
elif cmd_id == CMD_VERSION:
|
||||
# response is version number
|
||||
# ide_os should be 'WINDOWS' or 'UNIX'.
|
||||
ide_os = 'WINDOWS'
|
||||
|
||||
# Breakpoints can be grouped by 'LINE' or by 'ID'.
|
||||
breakpoints_by = 'LINE'
|
||||
|
||||
splitted = text.split('\t')
|
||||
if len(splitted) == 1:
|
||||
_local_version = splitted
|
||||
|
||||
elif len(splitted) == 2:
|
||||
_local_version, ide_os = splitted
|
||||
|
||||
elif len(splitted) == 3:
|
||||
_local_version, ide_os, breakpoints_by = splitted
|
||||
|
||||
if breakpoints_by == 'ID':
|
||||
py_db._set_breakpoints_with_id = True
|
||||
else:
|
||||
py_db._set_breakpoints_with_id = False
|
||||
|
||||
pydevd_file_utils.set_ide_os(ide_os)
|
||||
|
||||
cmd = py_db.cmd_factory.make_version_message(seq)
|
||||
|
||||
elif cmd_id == CMD_LIST_THREADS:
|
||||
# response is a list of threads
|
||||
cmd = py_db.cmd_factory.make_list_threads_message(seq)
|
||||
|
||||
elif cmd_id == CMD_THREAD_KILL:
|
||||
int_cmd = InternalTerminateThread(text)
|
||||
py_db.post_internal_command(int_cmd, text)
|
||||
|
||||
elif cmd_id == CMD_THREAD_SUSPEND:
|
||||
# Yes, thread suspend is still done at this point, not through an internal command!
|
||||
t = pydevd_find_thread_by_id(text)
|
||||
if t and not hasattr(t, 'pydev_do_not_trace'):
|
||||
additional_info = None
|
||||
try:
|
||||
additional_info = t.additional_info
|
||||
except AttributeError:
|
||||
pass # that's ok, no info currently set
|
||||
|
||||
if additional_info is not None:
|
||||
for frame in additional_info.iter_frames(t):
|
||||
py_db.set_trace_for_frame_and_parents(frame, overwrite_prev_trace=True)
|
||||
del frame
|
||||
|
||||
py_db.set_suspend(t, CMD_THREAD_SUSPEND)
|
||||
elif text.startswith('__frame__:'):
|
||||
sys.stderr.write("Can't suspend tasklet: %s\n" % (text,))
|
||||
|
||||
elif cmd_id == CMD_THREAD_RUN:
|
||||
t = pydevd_find_thread_by_id(text)
|
||||
if t:
|
||||
t.additional_info.pydev_step_cmd = -1
|
||||
t.additional_info.pydev_step_stop = None
|
||||
t.additional_info.pydev_state = STATE_RUN
|
||||
|
||||
elif text.startswith('__frame__:'):
|
||||
sys.stderr.write("Can't make tasklet run: %s\n" % (text,))
|
||||
|
||||
|
||||
elif cmd_id == CMD_STEP_INTO or cmd_id == CMD_STEP_OVER or cmd_id == CMD_STEP_RETURN or \
|
||||
cmd_id == CMD_STEP_INTO_MY_CODE:
|
||||
# we received some command to make a single step
|
||||
t = pydevd_find_thread_by_id(text)
|
||||
if t:
|
||||
thread_id = get_thread_id(t)
|
||||
int_cmd = InternalStepThread(thread_id, cmd_id)
|
||||
py_db.post_internal_command(int_cmd, thread_id)
|
||||
|
||||
elif text.startswith('__frame__:'):
|
||||
sys.stderr.write("Can't make tasklet step command: %s\n" % (text,))
|
||||
|
||||
|
||||
elif cmd_id == CMD_RUN_TO_LINE or cmd_id == CMD_SET_NEXT_STATEMENT or cmd_id == CMD_SMART_STEP_INTO:
|
||||
# we received some command to make a single step
|
||||
thread_id, line, func_name = text.split('\t', 2)
|
||||
t = pydevd_find_thread_by_id(thread_id)
|
||||
if t:
|
||||
int_cmd = InternalSetNextStatementThread(thread_id, cmd_id, line, func_name)
|
||||
py_db.post_internal_command(int_cmd, thread_id)
|
||||
elif thread_id.startswith('__frame__:'):
|
||||
sys.stderr.write("Can't set next statement in tasklet: %s\n" % (thread_id,))
|
||||
|
||||
|
||||
elif cmd_id == CMD_RELOAD_CODE:
|
||||
# we received some command to make a reload of a module
|
||||
module_name = text.strip()
|
||||
|
||||
thread_id = '*' # Any thread
|
||||
|
||||
# Note: not going for the main thread because in this case it'd only do the load
|
||||
# when we stopped on a breakpoint.
|
||||
# for tid, t in py_db._running_thread_ids.items(): #Iterate in copy
|
||||
# thread_name = t.getName()
|
||||
#
|
||||
# print thread_name, get_thread_id(t)
|
||||
# #Note: if possible, try to reload on the main thread
|
||||
# if thread_name == 'MainThread':
|
||||
# thread_id = tid
|
||||
|
||||
int_cmd = ReloadCodeCommand(module_name, thread_id)
|
||||
py_db.post_internal_command(int_cmd, thread_id)
|
||||
|
||||
|
||||
elif cmd_id == CMD_CHANGE_VARIABLE:
|
||||
# the text is: thread\tstackframe\tFRAME|GLOBAL\tattribute_to_change\tvalue_to_change
|
||||
try:
|
||||
thread_id, frame_id, scope, attr_and_value = text.split('\t', 3)
|
||||
|
||||
tab_index = attr_and_value.rindex('\t')
|
||||
attr = attr_and_value[0:tab_index].replace('\t', '.')
|
||||
value = attr_and_value[tab_index + 1:]
|
||||
int_cmd = InternalChangeVariable(seq, thread_id, frame_id, scope, attr, value)
|
||||
py_db.post_internal_command(int_cmd, thread_id)
|
||||
|
||||
except:
|
||||
traceback.print_exc()
|
||||
|
||||
elif cmd_id == CMD_GET_VARIABLE:
|
||||
# we received some command to get a variable
|
||||
# the text is: thread_id\tframe_id\tFRAME|GLOBAL\tattributes*
|
||||
try:
|
||||
thread_id, frame_id, scopeattrs = text.split('\t', 2)
|
||||
|
||||
if scopeattrs.find('\t') != -1: # there are attributes beyond scope
|
||||
scope, attrs = scopeattrs.split('\t', 1)
|
||||
else:
|
||||
scope, attrs = (scopeattrs, None)
|
||||
|
||||
int_cmd = InternalGetVariable(seq, thread_id, frame_id, scope, attrs)
|
||||
py_db.post_internal_command(int_cmd, thread_id)
|
||||
|
||||
except:
|
||||
traceback.print_exc()
|
||||
|
||||
elif cmd_id == CMD_GET_ARRAY:
|
||||
# we received some command to get an array variable
|
||||
# the text is: thread_id\tframe_id\tFRAME|GLOBAL\tname\ttemp\troffs\tcoffs\trows\tcols\tformat
|
||||
try:
|
||||
roffset, coffset, rows, cols, format, thread_id, frame_id, scopeattrs = text.split('\t', 7)
|
||||
|
||||
if scopeattrs.find('\t') != -1: # there are attributes beyond scope
|
||||
scope, attrs = scopeattrs.split('\t', 1)
|
||||
else:
|
||||
scope, attrs = (scopeattrs, None)
|
||||
|
||||
int_cmd = InternalGetArray(seq, roffset, coffset, rows, cols, format, thread_id, frame_id, scope, attrs)
|
||||
py_db.post_internal_command(int_cmd, thread_id)
|
||||
|
||||
except:
|
||||
traceback.print_exc()
|
||||
|
||||
elif cmd_id == CMD_SHOW_RETURN_VALUES:
|
||||
try:
|
||||
show_return_values = text.split('\t')[1]
|
||||
if int(show_return_values) == 1:
|
||||
py_db.show_return_values = True
|
||||
else:
|
||||
if py_db.show_return_values:
|
||||
# We should remove saved return values
|
||||
py_db.remove_return_values_flag = True
|
||||
py_db.show_return_values = False
|
||||
pydev_log.debug("Show return values: %s\n" % py_db.show_return_values)
|
||||
except:
|
||||
traceback.print_exc()
|
||||
|
||||
elif cmd_id == CMD_GET_COMPLETIONS:
|
||||
# we received some command to get a variable
|
||||
# the text is: thread_id\tframe_id\tactivation token
|
||||
try:
|
||||
thread_id, frame_id, scope, act_tok = text.split('\t', 3)
|
||||
|
||||
int_cmd = InternalGetCompletions(seq, thread_id, frame_id, act_tok)
|
||||
py_db.post_internal_command(int_cmd, thread_id)
|
||||
|
||||
except:
|
||||
traceback.print_exc()
|
||||
elif cmd_id == CMD_GET_DESCRIPTION:
|
||||
try:
|
||||
|
||||
thread_id, frame_id, expression = text.split('\t', 2)
|
||||
int_cmd = InternalGetDescription(seq, thread_id, frame_id, expression)
|
||||
py_db.post_internal_command(int_cmd, thread_id)
|
||||
except:
|
||||
traceback.print_exc()
|
||||
|
||||
elif cmd_id == CMD_GET_FRAME:
|
||||
thread_id, frame_id, scope = text.split('\t', 2)
|
||||
|
||||
int_cmd = InternalGetFrame(seq, thread_id, frame_id)
|
||||
py_db.post_internal_command(int_cmd, thread_id)
|
||||
|
||||
elif cmd_id == CMD_SET_BREAK:
|
||||
# func name: 'None': match anything. Empty: match global, specified: only method context.
|
||||
# command to add some breakpoint.
|
||||
# text is file\tline. Add to breakpoints dictionary
|
||||
suspend_policy = "NONE"
|
||||
if py_db._set_breakpoints_with_id:
|
||||
breakpoint_id, type, file, line, func_name, condition, expression = text.split('\t', 6)
|
||||
|
||||
breakpoint_id = int(breakpoint_id)
|
||||
line = int(line)
|
||||
|
||||
# We must restore new lines and tabs as done in
|
||||
# AbstractDebugTarget.breakpointAdded
|
||||
condition = condition.replace("@_@NEW_LINE_CHAR@_@", '\n').\
|
||||
replace("@_@TAB_CHAR@_@", '\t').strip()
|
||||
|
||||
expression = expression.replace("@_@NEW_LINE_CHAR@_@", '\n').\
|
||||
replace("@_@TAB_CHAR@_@", '\t').strip()
|
||||
else:
|
||||
#Note: this else should be removed after PyCharm migrates to setting
|
||||
#breakpoints by id (and ideally also provides func_name).
|
||||
type, file, line, func_name, suspend_policy, condition, expression = text.split('\t', 6)
|
||||
# If we don't have an id given for each breakpoint, consider
|
||||
# the id to be the line.
|
||||
breakpoint_id = line = int(line)
|
||||
|
||||
condition = condition.replace("@_@NEW_LINE_CHAR@_@", '\n'). \
|
||||
replace("@_@TAB_CHAR@_@", '\t').strip()
|
||||
|
||||
expression = expression.replace("@_@NEW_LINE_CHAR@_@", '\n'). \
|
||||
replace("@_@TAB_CHAR@_@", '\t').strip()
|
||||
|
||||
if not IS_PY3K: # In Python 3, the frame object will have unicode for the file, whereas on python 2 it has a byte-array encoded with the filesystem encoding.
|
||||
file = file.encode(file_system_encoding)
|
||||
|
||||
file = pydevd_file_utils.norm_file_to_server(file)
|
||||
|
||||
if not pydevd_file_utils.exists(file):
|
||||
sys.stderr.write('pydev debugger: warning: trying to add breakpoint'\
|
||||
' to file that does not exist: %s (will have no effect)\n' % (file,))
|
||||
sys.stderr.flush()
|
||||
|
||||
|
||||
if len(condition) <= 0 or condition is None or condition == "None":
|
||||
condition = None
|
||||
|
||||
if len(expression) <= 0 or expression is None or expression == "None":
|
||||
expression = None
|
||||
|
||||
if type == 'python-line':
|
||||
breakpoint = LineBreakpoint(line, condition, func_name, expression, suspend_policy)
|
||||
breakpoints = py_db.breakpoints
|
||||
file_to_id_to_breakpoint = py_db.file_to_id_to_line_breakpoint
|
||||
supported_type = True
|
||||
else:
|
||||
result = None
|
||||
plugin = py_db.get_plugin_lazy_init()
|
||||
if plugin is not None:
|
||||
result = plugin.add_breakpoint('add_line_breakpoint', py_db, type, file, line, condition, expression, func_name)
|
||||
if result is not None:
|
||||
supported_type = True
|
||||
breakpoint, breakpoints = result
|
||||
file_to_id_to_breakpoint = py_db.file_to_id_to_plugin_breakpoint
|
||||
else:
|
||||
supported_type = False
|
||||
|
||||
if not supported_type:
|
||||
raise NameError(type)
|
||||
|
||||
if DebugInfoHolder.DEBUG_TRACE_BREAKPOINTS > 0:
|
||||
pydev_log.debug('Added breakpoint:%s - line:%s - func_name:%s\n' % (file, line, func_name.encode('utf-8')))
|
||||
sys.stderr.flush()
|
||||
|
||||
if file in file_to_id_to_breakpoint:
|
||||
id_to_pybreakpoint = file_to_id_to_breakpoint[file]
|
||||
else:
|
||||
id_to_pybreakpoint = file_to_id_to_breakpoint[file] = {}
|
||||
|
||||
id_to_pybreakpoint[breakpoint_id] = breakpoint
|
||||
py_db.consolidate_breakpoints(file, id_to_pybreakpoint, breakpoints)
|
||||
if py_db.plugin is not None:
|
||||
py_db.has_plugin_line_breaks = py_db.plugin.has_line_breaks()
|
||||
|
||||
py_db.set_tracing_for_untraced_contexts_if_not_frame_eval(overwrite_prev_trace=True)
|
||||
py_db.enable_tracing_in_frames_while_running_if_frame_eval()
|
||||
|
||||
elif cmd_id == CMD_REMOVE_BREAK:
|
||||
#command to remove some breakpoint
|
||||
#text is type\file\tid. Remove from breakpoints dictionary
|
||||
breakpoint_type, file, breakpoint_id = text.split('\t', 2)
|
||||
|
||||
if not IS_PY3K: # In Python 3, the frame object will have unicode for the file, whereas on python 2 it has a byte-array encoded with the filesystem encoding.
|
||||
file = file.encode(file_system_encoding)
|
||||
|
||||
file = pydevd_file_utils.norm_file_to_server(file)
|
||||
|
||||
try:
|
||||
breakpoint_id = int(breakpoint_id)
|
||||
except ValueError:
|
||||
pydev_log.error('Error removing breakpoint. Expected breakpoint_id to be an int. Found: %s' % (breakpoint_id,))
|
||||
|
||||
else:
|
||||
file_to_id_to_breakpoint = None
|
||||
if breakpoint_type == 'python-line':
|
||||
breakpoints = py_db.breakpoints
|
||||
file_to_id_to_breakpoint = py_db.file_to_id_to_line_breakpoint
|
||||
elif py_db.get_plugin_lazy_init() is not None:
|
||||
result = py_db.plugin.get_breakpoints(py_db, breakpoint_type)
|
||||
if result is not None:
|
||||
file_to_id_to_breakpoint = py_db.file_to_id_to_plugin_breakpoint
|
||||
breakpoints = result
|
||||
|
||||
if file_to_id_to_breakpoint is None:
|
||||
pydev_log.error('Error removing breakpoint. Cant handle breakpoint of type %s' % breakpoint_type)
|
||||
else:
|
||||
try:
|
||||
id_to_pybreakpoint = file_to_id_to_breakpoint.get(file, {})
|
||||
if DebugInfoHolder.DEBUG_TRACE_BREAKPOINTS > 0:
|
||||
existing = id_to_pybreakpoint[breakpoint_id]
|
||||
sys.stderr.write('Removed breakpoint:%s - line:%s - func_name:%s (id: %s)\n' % (
|
||||
file, existing.line, existing.func_name.encode('utf-8'), breakpoint_id))
|
||||
|
||||
del id_to_pybreakpoint[breakpoint_id]
|
||||
py_db.consolidate_breakpoints(file, id_to_pybreakpoint, breakpoints)
|
||||
if py_db.plugin is not None:
|
||||
py_db.has_plugin_line_breaks = py_db.plugin.has_line_breaks()
|
||||
|
||||
except KeyError:
|
||||
pydev_log.error("Error removing breakpoint: Breakpoint id not found: %s id: %s. Available ids: %s\n" % (
|
||||
file, breakpoint_id, dict_keys(id_to_pybreakpoint)))
|
||||
|
||||
|
||||
elif cmd_id == CMD_EVALUATE_EXPRESSION or cmd_id == CMD_EXEC_EXPRESSION:
|
||||
#command to evaluate the given expression
|
||||
#text is: thread\tstackframe\tLOCAL\texpression
|
||||
temp_name = ""
|
||||
try:
|
||||
thread_id, frame_id, scope, expression, trim, temp_name = text.split('\t', 5)
|
||||
except ValueError:
|
||||
thread_id, frame_id, scope, expression, trim = text.split('\t', 4)
|
||||
int_cmd = InternalEvaluateExpression(seq, thread_id, frame_id, expression,
|
||||
cmd_id == CMD_EXEC_EXPRESSION, int(trim) == 1, temp_name)
|
||||
py_db.post_internal_command(int_cmd, thread_id)
|
||||
|
||||
elif cmd_id == CMD_CONSOLE_EXEC:
|
||||
#command to exec expression in console, in case expression is only partially valid 'False' is returned
|
||||
#text is: thread\tstackframe\tLOCAL\texpression
|
||||
|
||||
thread_id, frame_id, scope, expression = text.split('\t', 3)
|
||||
|
||||
int_cmd = InternalConsoleExec(seq, thread_id, frame_id, expression)
|
||||
py_db.post_internal_command(int_cmd, thread_id)
|
||||
|
||||
elif cmd_id == CMD_SET_PY_EXCEPTION:
|
||||
# Command which receives set of exceptions on which user wants to break the debugger
|
||||
# text is: break_on_uncaught;break_on_caught;TypeError;ImportError;zipimport.ZipImportError;
|
||||
# This API is optional and works 'in bulk' -- it's possible
|
||||
# to get finer-grained control with CMD_ADD_EXCEPTION_BREAK/CMD_REMOVE_EXCEPTION_BREAK
|
||||
# which allows setting caught/uncaught per exception.
|
||||
#
|
||||
splitted = text.split(';')
|
||||
py_db.break_on_uncaught_exceptions = {}
|
||||
py_db.break_on_caught_exceptions = {}
|
||||
added = []
|
||||
if len(splitted) >= 4:
|
||||
if splitted[0] == 'true':
|
||||
break_on_uncaught = True
|
||||
else:
|
||||
break_on_uncaught = False
|
||||
|
||||
if splitted[1] == 'true':
|
||||
break_on_caught = True
|
||||
else:
|
||||
break_on_caught = False
|
||||
|
||||
if splitted[2] == 'true':
|
||||
py_db.break_on_exceptions_thrown_in_same_context = True
|
||||
else:
|
||||
py_db.break_on_exceptions_thrown_in_same_context = False
|
||||
|
||||
if splitted[3] == 'true':
|
||||
py_db.ignore_exceptions_thrown_in_lines_with_ignore_exception = True
|
||||
else:
|
||||
py_db.ignore_exceptions_thrown_in_lines_with_ignore_exception = False
|
||||
|
||||
for exception_type in splitted[4:]:
|
||||
exception_type = exception_type.strip()
|
||||
if not exception_type:
|
||||
continue
|
||||
|
||||
exception_breakpoint = py_db.add_break_on_exception(
|
||||
exception_type,
|
||||
notify_always=break_on_caught,
|
||||
notify_on_terminate=break_on_uncaught,
|
||||
notify_on_first_raise_only=False,
|
||||
)
|
||||
if exception_breakpoint is None:
|
||||
continue
|
||||
added.append(exception_breakpoint)
|
||||
|
||||
py_db.update_after_exceptions_added(added)
|
||||
if break_on_caught:
|
||||
py_db.enable_tracing_in_frames_while_running_if_frame_eval()
|
||||
|
||||
else:
|
||||
sys.stderr.write("Error when setting exception list. Received: %s\n" % (text,))
|
||||
|
||||
elif cmd_id == CMD_GET_FILE_CONTENTS:
|
||||
|
||||
if not IS_PY3K: # In Python 3, the frame object will have unicode for the file, whereas on python 2 it has a byte-array encoded with the filesystem encoding.
|
||||
text = text.encode(file_system_encoding)
|
||||
|
||||
if os.path.exists(text):
|
||||
f = open(text, 'r')
|
||||
try:
|
||||
source = f.read()
|
||||
finally:
|
||||
f.close()
|
||||
cmd = py_db.cmd_factory.make_get_file_contents(seq, source)
|
||||
|
||||
elif cmd_id == CMD_SET_PROPERTY_TRACE:
|
||||
# Command which receives whether to trace property getter/setter/deleter
|
||||
# text is feature_state(true/false);disable_getter/disable_setter/disable_deleter
|
||||
if text != "":
|
||||
splitted = text.split(';')
|
||||
if len(splitted) >= 3:
|
||||
if py_db.disable_property_trace is False and splitted[0] == 'true':
|
||||
# Replacing property by custom property only when the debugger starts
|
||||
pydevd_traceproperty.replace_builtin_property()
|
||||
py_db.disable_property_trace = True
|
||||
# Enable/Disable tracing of the property getter
|
||||
if splitted[1] == 'true':
|
||||
py_db.disable_property_getter_trace = True
|
||||
else:
|
||||
py_db.disable_property_getter_trace = False
|
||||
# Enable/Disable tracing of the property setter
|
||||
if splitted[2] == 'true':
|
||||
py_db.disable_property_setter_trace = True
|
||||
else:
|
||||
py_db.disable_property_setter_trace = False
|
||||
# Enable/Disable tracing of the property deleter
|
||||
if splitted[3] == 'true':
|
||||
py_db.disable_property_deleter_trace = True
|
||||
else:
|
||||
py_db.disable_property_deleter_trace = False
|
||||
else:
|
||||
# User hasn't configured any settings for property tracing
|
||||
pass
|
||||
|
||||
elif cmd_id == CMD_ADD_EXCEPTION_BREAK:
|
||||
if text.find('\t') != -1:
|
||||
exception, notify_always, notify_on_terminate, ignore_libraries = text.split('\t', 3)
|
||||
else:
|
||||
exception, notify_always, notify_on_terminate, ignore_libraries = text, 0, 0, 0
|
||||
|
||||
if exception.find('-') != -1:
|
||||
breakpoint_type, exception = exception.split('-')
|
||||
else:
|
||||
breakpoint_type = 'python'
|
||||
|
||||
if breakpoint_type == 'python':
|
||||
if int(notify_always) == 1:
|
||||
pydev_log.warn("Deprecated parameter: 'notify always' policy removed in PyCharm\n")
|
||||
exception_breakpoint = py_db.add_break_on_exception(
|
||||
exception,
|
||||
notify_always=int(notify_always) > 0,
|
||||
notify_on_terminate = int(notify_on_terminate) == 1,
|
||||
notify_on_first_raise_only=int(notify_always) == 2,
|
||||
ignore_libraries=int(ignore_libraries) > 0
|
||||
)
|
||||
|
||||
if exception_breakpoint is not None:
|
||||
py_db.update_after_exceptions_added([exception_breakpoint])
|
||||
if notify_always:
|
||||
py_db.enable_tracing_in_frames_while_running_if_frame_eval()
|
||||
else:
|
||||
supported_type = False
|
||||
plugin = py_db.get_plugin_lazy_init()
|
||||
if plugin is not None:
|
||||
supported_type = plugin.add_breakpoint('add_exception_breakpoint', py_db, breakpoint_type, exception)
|
||||
|
||||
if supported_type:
|
||||
py_db.has_plugin_exception_breaks = py_db.plugin.has_exception_breaks()
|
||||
py_db.enable_tracing_in_frames_while_running_if_frame_eval()
|
||||
else:
|
||||
raise NameError(breakpoint_type)
|
||||
|
||||
|
||||
|
||||
elif cmd_id == CMD_REMOVE_EXCEPTION_BREAK:
|
||||
exception = text
|
||||
if exception.find('-') != -1:
|
||||
exception_type, exception = exception.split('-')
|
||||
else:
|
||||
exception_type = 'python'
|
||||
|
||||
if exception_type == 'python':
|
||||
try:
|
||||
cp = py_db.break_on_uncaught_exceptions.copy()
|
||||
cp.pop(exception, None)
|
||||
py_db.break_on_uncaught_exceptions = cp
|
||||
|
||||
cp = py_db.break_on_caught_exceptions.copy()
|
||||
cp.pop(exception, None)
|
||||
py_db.break_on_caught_exceptions = cp
|
||||
except:
|
||||
pydev_log.debug("Error while removing exception %s"%sys.exc_info()[0])
|
||||
update_exception_hook(py_db)
|
||||
else:
|
||||
supported_type = False
|
||||
|
||||
# I.e.: no need to initialize lazy (if we didn't have it in the first place, we can't remove
|
||||
# anything from it anyways).
|
||||
plugin = py_db.plugin
|
||||
if plugin is not None:
|
||||
supported_type = plugin.remove_exception_breakpoint(py_db, exception_type, exception)
|
||||
|
||||
if supported_type:
|
||||
py_db.has_plugin_exception_breaks = py_db.plugin.has_exception_breaks()
|
||||
else:
|
||||
raise NameError(exception_type)
|
||||
if len(py_db.break_on_caught_exceptions) == 0 and not py_db.has_plugin_exception_breaks:
|
||||
py_db.disable_tracing_while_running_if_frame_eval()
|
||||
|
||||
elif cmd_id == CMD_LOAD_SOURCE:
|
||||
path = text
|
||||
try:
|
||||
f = open(path, 'r')
|
||||
source = f.read()
|
||||
py_db.cmd_factory.make_load_source_message(seq, source, py_db)
|
||||
except:
|
||||
return py_db.cmd_factory.make_error_message(seq, pydevd_tracing.get_exception_traceback_str())
|
||||
|
||||
elif cmd_id == CMD_ADD_DJANGO_EXCEPTION_BREAK:
|
||||
exception = text
|
||||
plugin = py_db.get_plugin_lazy_init()
|
||||
if plugin is not None:
|
||||
plugin.add_breakpoint('add_exception_breakpoint', py_db, 'django', exception)
|
||||
py_db.has_plugin_exception_breaks = py_db.plugin.has_exception_breaks()
|
||||
py_db.enable_tracing_in_frames_while_running_if_frame_eval()
|
||||
|
||||
elif cmd_id == CMD_REMOVE_DJANGO_EXCEPTION_BREAK:
|
||||
exception = text
|
||||
|
||||
# I.e.: no need to initialize lazy (if we didn't have it in the first place, we can't remove
|
||||
# anything from it anyways).
|
||||
plugin = py_db.plugin
|
||||
if plugin is not None:
|
||||
plugin.remove_exception_breakpoint(py_db, 'django', exception)
|
||||
py_db.has_plugin_exception_breaks = py_db.plugin.has_exception_breaks()
|
||||
if len(py_db.break_on_caught_exceptions) == 0 and not py_db.has_plugin_exception_breaks:
|
||||
py_db.disable_tracing_while_running_if_frame_eval()
|
||||
|
||||
elif cmd_id == CMD_EVALUATE_CONSOLE_EXPRESSION:
|
||||
# Command which takes care for the debug console communication
|
||||
if text != "":
|
||||
thread_id, frame_id, console_command = text.split('\t', 2)
|
||||
console_command, line = console_command.split('\t')
|
||||
|
||||
if console_command == 'EVALUATE':
|
||||
int_cmd = InternalEvaluateConsoleExpression(
|
||||
seq, thread_id, frame_id, line, buffer_output=True)
|
||||
|
||||
elif console_command == 'EVALUATE_UNBUFFERED':
|
||||
int_cmd = InternalEvaluateConsoleExpression(
|
||||
seq, thread_id, frame_id, line, buffer_output=False)
|
||||
|
||||
elif console_command == 'GET_COMPLETIONS':
|
||||
int_cmd = InternalConsoleGetCompletions(seq, thread_id, frame_id, line)
|
||||
|
||||
else:
|
||||
raise ValueError('Unrecognized command: %s' % (console_command,))
|
||||
|
||||
py_db.post_internal_command(int_cmd, thread_id)
|
||||
|
||||
elif cmd_id == CMD_RUN_CUSTOM_OPERATION:
|
||||
# Command which runs a custom operation
|
||||
if text != "":
|
||||
try:
|
||||
location, custom = text.split('||', 1)
|
||||
except:
|
||||
sys.stderr.write('Custom operation now needs a || separator. Found: %s\n' % (text,))
|
||||
raise
|
||||
|
||||
thread_id, frame_id, scopeattrs = location.split('\t', 2)
|
||||
|
||||
if scopeattrs.find('\t') != -1: # there are attributes beyond scope
|
||||
scope, attrs = scopeattrs.split('\t', 1)
|
||||
else:
|
||||
scope, attrs = (scopeattrs, None)
|
||||
|
||||
# : style: EXECFILE or EXEC
|
||||
# : encoded_code_or_file: file to execute or code
|
||||
# : fname: name of function to be executed in the resulting namespace
|
||||
style, encoded_code_or_file, fnname = custom.split('\t', 3)
|
||||
int_cmd = InternalRunCustomOperation(seq, thread_id, frame_id, scope, attrs,
|
||||
style, encoded_code_or_file, fnname)
|
||||
py_db.post_internal_command(int_cmd, thread_id)
|
||||
|
||||
elif cmd_id == CMD_IGNORE_THROWN_EXCEPTION_AT:
|
||||
if text:
|
||||
replace = 'REPLACE:' # Not all 3.x versions support u'REPLACE:', so, doing workaround.
|
||||
if not IS_PY3K:
|
||||
replace = unicode(replace)
|
||||
|
||||
if text.startswith(replace):
|
||||
text = text[8:]
|
||||
py_db.filename_to_lines_where_exceptions_are_ignored.clear()
|
||||
|
||||
if text:
|
||||
for line in text.split('||'): # Can be bulk-created (one in each line)
|
||||
filename, line_number = line.split('|')
|
||||
if not IS_PY3K:
|
||||
filename = filename.encode(file_system_encoding)
|
||||
|
||||
filename = pydevd_file_utils.norm_file_to_server(filename)
|
||||
|
||||
if os.path.exists(filename):
|
||||
lines_ignored = py_db.filename_to_lines_where_exceptions_are_ignored.get(filename)
|
||||
if lines_ignored is None:
|
||||
lines_ignored = py_db.filename_to_lines_where_exceptions_are_ignored[filename] = {}
|
||||
lines_ignored[int(line_number)] = 1
|
||||
else:
|
||||
sys.stderr.write('pydev debugger: warning: trying to ignore exception thrown'\
|
||||
' on file that does not exist: %s (will have no effect)\n' % (filename,))
|
||||
|
||||
elif cmd_id == CMD_ENABLE_DONT_TRACE:
|
||||
if text:
|
||||
true_str = 'true' # Not all 3.x versions support u'str', so, doing workaround.
|
||||
if not IS_PY3K:
|
||||
true_str = unicode(true_str)
|
||||
|
||||
mode = text.strip() == true_str
|
||||
pydevd_dont_trace.trace_filter(mode)
|
||||
|
||||
else:
|
||||
#I have no idea what this is all about
|
||||
cmd = py_db.cmd_factory.make_error_message(seq, "unexpected command " + str(cmd_id))
|
||||
|
||||
if cmd is not None:
|
||||
py_db.writer.add_command(cmd)
|
||||
del cmd
|
||||
|
||||
except Exception:
|
||||
traceback.print_exc()
|
||||
try:
|
||||
from StringIO import StringIO
|
||||
except ImportError:
|
||||
from io import StringIO
|
||||
stream = StringIO()
|
||||
traceback.print_exc(file=stream)
|
||||
cmd = py_db.cmd_factory.make_error_message(
|
||||
seq,
|
||||
"Unexpected exception in process_net_command.\nInitial params: %s. Exception: %s" % (
|
||||
((cmd_id, seq, text), stream.getvalue())
|
||||
)
|
||||
)
|
||||
|
||||
py_db.writer.add_command(cmd)
|
||||
finally:
|
||||
py_db._main_lock.release()
|
||||
|
||||
|
||||
236
ptvsd/pydevd/_pydevd_bundle/pydevd_referrers.py
Normal file
236
ptvsd/pydevd/_pydevd_bundle/pydevd_referrers.py
Normal file
|
|
@ -0,0 +1,236 @@
|
|||
import sys
|
||||
from _pydevd_bundle import pydevd_xml
|
||||
from os.path import basename
|
||||
import traceback
|
||||
try:
|
||||
from urllib import quote, quote_plus, unquote, unquote_plus
|
||||
except:
|
||||
from urllib.parse import quote, quote_plus, unquote, unquote_plus #@Reimport @UnresolvedImport
|
||||
|
||||
#===================================================================================================
|
||||
# print_var_node
|
||||
#===================================================================================================
|
||||
def print_var_node(xml_node, stream):
|
||||
name = xml_node.getAttribute('name')
|
||||
value = xml_node.getAttribute('value')
|
||||
val_type = xml_node.getAttribute('type')
|
||||
|
||||
found_as = xml_node.getAttribute('found_as')
|
||||
stream.write('Name: ')
|
||||
stream.write(unquote_plus(name))
|
||||
stream.write(', Value: ')
|
||||
stream.write(unquote_plus(value))
|
||||
stream.write(', Type: ')
|
||||
stream.write(unquote_plus(val_type))
|
||||
if found_as:
|
||||
stream.write(', Found as: %s' % (unquote_plus(found_as),))
|
||||
stream.write('\n')
|
||||
|
||||
#===================================================================================================
|
||||
# print_referrers
|
||||
#===================================================================================================
|
||||
def print_referrers(obj, stream=None):
|
||||
if stream is None:
|
||||
stream = sys.stdout
|
||||
result = get_referrer_info(obj)
|
||||
from xml.dom.minidom import parseString
|
||||
dom = parseString(result)
|
||||
|
||||
xml = dom.getElementsByTagName('xml')[0]
|
||||
for node in xml.childNodes:
|
||||
if node.nodeType == node.TEXT_NODE:
|
||||
continue
|
||||
|
||||
if node.localName == 'for':
|
||||
stream.write('Searching references for: ')
|
||||
for child in node.childNodes:
|
||||
if child.nodeType == node.TEXT_NODE:
|
||||
continue
|
||||
print_var_node(child, stream)
|
||||
|
||||
elif node.localName == 'var':
|
||||
stream.write('Referrer found: ')
|
||||
print_var_node(node, stream)
|
||||
|
||||
else:
|
||||
sys.stderr.write('Unhandled node: %s\n' % (node,))
|
||||
|
||||
return result
|
||||
|
||||
|
||||
#===================================================================================================
|
||||
# get_referrer_info
|
||||
#===================================================================================================
|
||||
def get_referrer_info(searched_obj):
|
||||
DEBUG = 0
|
||||
if DEBUG:
|
||||
sys.stderr.write('Getting referrers info.\n')
|
||||
try:
|
||||
try:
|
||||
if searched_obj is None:
|
||||
ret = ['<xml>\n']
|
||||
|
||||
ret.append('<for>\n')
|
||||
ret.append(pydevd_xml.var_to_xml(
|
||||
searched_obj,
|
||||
'Skipping getting referrers for None',
|
||||
additional_in_xml=' id="%s"' % (id(searched_obj),)))
|
||||
ret.append('</for>\n')
|
||||
ret.append('</xml>')
|
||||
ret = ''.join(ret)
|
||||
return ret
|
||||
|
||||
obj_id = id(searched_obj)
|
||||
|
||||
try:
|
||||
if DEBUG:
|
||||
sys.stderr.write('Getting referrers...\n')
|
||||
import gc
|
||||
referrers = gc.get_referrers(searched_obj)
|
||||
except:
|
||||
traceback.print_exc()
|
||||
ret = ['<xml>\n']
|
||||
|
||||
ret.append('<for>\n')
|
||||
ret.append(pydevd_xml.var_to_xml(
|
||||
searched_obj,
|
||||
'Exception raised while trying to get_referrers.',
|
||||
additional_in_xml=' id="%s"' % (id(searched_obj),)))
|
||||
ret.append('</for>\n')
|
||||
ret.append('</xml>')
|
||||
ret = ''.join(ret)
|
||||
return ret
|
||||
|
||||
if DEBUG:
|
||||
sys.stderr.write('Found %s referrers.\n' % (len(referrers),))
|
||||
|
||||
curr_frame = sys._getframe()
|
||||
frame_type = type(curr_frame)
|
||||
|
||||
#Ignore this frame and any caller frame of this frame
|
||||
|
||||
ignore_frames = {} #Should be a set, but it's not available on all python versions.
|
||||
while curr_frame is not None:
|
||||
if basename(curr_frame.f_code.co_filename).startswith('pydev'):
|
||||
ignore_frames[curr_frame] = 1
|
||||
curr_frame = curr_frame.f_back
|
||||
|
||||
|
||||
ret = ['<xml>\n']
|
||||
|
||||
ret.append('<for>\n')
|
||||
if DEBUG:
|
||||
sys.stderr.write('Searching Referrers of obj with id="%s"\n' % (obj_id,))
|
||||
|
||||
ret.append(pydevd_xml.var_to_xml(
|
||||
searched_obj,
|
||||
'Referrers of obj with id="%s"' % (obj_id,)))
|
||||
ret.append('</for>\n')
|
||||
|
||||
all_objects = None
|
||||
|
||||
for r in referrers:
|
||||
try:
|
||||
if r in ignore_frames:
|
||||
continue #Skip the references we may add ourselves
|
||||
except:
|
||||
pass #Ok: unhashable type checked...
|
||||
|
||||
if r is referrers:
|
||||
continue
|
||||
|
||||
r_type = type(r)
|
||||
r_id = str(id(r))
|
||||
|
||||
representation = str(r_type)
|
||||
|
||||
found_as = ''
|
||||
if r_type == frame_type:
|
||||
if DEBUG:
|
||||
sys.stderr.write('Found frame referrer: %r\n' % (r,))
|
||||
for key, val in r.f_locals.items():
|
||||
if val is searched_obj:
|
||||
found_as = key
|
||||
break
|
||||
|
||||
elif r_type == dict:
|
||||
if DEBUG:
|
||||
sys.stderr.write('Found dict referrer: %r\n' % (r,))
|
||||
|
||||
# Try to check if it's a value in the dict (and under which key it was found)
|
||||
for key, val in r.items():
|
||||
if val is searched_obj:
|
||||
found_as = key
|
||||
if DEBUG:
|
||||
sys.stderr.write(' Found as %r in dict\n' % (found_as,))
|
||||
break
|
||||
|
||||
#Ok, there's one annoying thing: many times we find it in a dict from an instance,
|
||||
#but with this we don't directly have the class, only the dict, so, to workaround that
|
||||
#we iterate over all reachable objects ad check if one of those has the given dict.
|
||||
if all_objects is None:
|
||||
all_objects = gc.get_objects()
|
||||
|
||||
for x in all_objects:
|
||||
try:
|
||||
if getattr(x, '__dict__', None) is r:
|
||||
r = x
|
||||
r_type = type(x)
|
||||
r_id = str(id(r))
|
||||
representation = str(r_type)
|
||||
break
|
||||
except:
|
||||
pass #Just ignore any error here (i.e.: ReferenceError, etc.)
|
||||
|
||||
elif r_type in (tuple, list):
|
||||
if DEBUG:
|
||||
sys.stderr.write('Found tuple referrer: %r\n' % (r,))
|
||||
|
||||
for i, x in enumerate(r):
|
||||
if x is searched_obj:
|
||||
found_as = '%s[%s]' % (r_type.__name__, i)
|
||||
if DEBUG:
|
||||
sys.stderr.write(' Found as %s in tuple: \n' % (found_as,))
|
||||
break
|
||||
|
||||
if found_as:
|
||||
if not isinstance(found_as, str):
|
||||
found_as = str(found_as)
|
||||
found_as = ' found_as="%s"' % (pydevd_xml.make_valid_xml_value(found_as),)
|
||||
|
||||
ret.append(pydevd_xml.var_to_xml(
|
||||
r,
|
||||
representation,
|
||||
additional_in_xml=' id="%s"%s' % (r_id, found_as)))
|
||||
finally:
|
||||
if DEBUG:
|
||||
sys.stderr.write('Done searching for references.\n')
|
||||
|
||||
#If we have any exceptions, don't keep dangling references from this frame to any of our objects.
|
||||
all_objects = None
|
||||
referrers = None
|
||||
searched_obj = None
|
||||
r = None
|
||||
x = None
|
||||
key = None
|
||||
val = None
|
||||
curr_frame = None
|
||||
ignore_frames = None
|
||||
except:
|
||||
traceback.print_exc()
|
||||
ret = ['<xml>\n']
|
||||
|
||||
ret.append('<for>\n')
|
||||
ret.append(pydevd_xml.var_to_xml(
|
||||
searched_obj,
|
||||
'Error getting referrers for:',
|
||||
additional_in_xml=' id="%s"' % (id(searched_obj),)))
|
||||
ret.append('</for>\n')
|
||||
ret.append('</xml>')
|
||||
ret = ''.join(ret)
|
||||
return ret
|
||||
|
||||
ret.append('</xml>')
|
||||
ret = ''.join(ret)
|
||||
return ret
|
||||
|
||||
453
ptvsd/pydevd/_pydevd_bundle/pydevd_reload.py
Normal file
453
ptvsd/pydevd/_pydevd_bundle/pydevd_reload.py
Normal file
|
|
@ -0,0 +1,453 @@
|
|||
"""
|
||||
Based on the python xreload.
|
||||
|
||||
Changes
|
||||
======================
|
||||
|
||||
1. we don't recreate the old namespace from new classes. Rather, we keep the existing namespace,
|
||||
load a new version of it and update only some of the things we can inplace. That way, we don't break
|
||||
things such as singletons or end up with a second representation of the same class in memory.
|
||||
|
||||
2. If we find it to be a __metaclass__, we try to update it as a regular class.
|
||||
|
||||
3. We don't remove old attributes (and leave them lying around even if they're no longer used).
|
||||
|
||||
4. Reload hooks were changed
|
||||
|
||||
These changes make it more stable, especially in the common case (where in a debug session only the
|
||||
contents of a function are changed), besides providing flexibility for users that want to extend
|
||||
on it.
|
||||
|
||||
|
||||
|
||||
Hooks
|
||||
======================
|
||||
|
||||
Classes/modules can be specially crafted to work with the reload (so that it can, for instance,
|
||||
update some constant which was changed).
|
||||
|
||||
1. To participate in the change of some attribute:
|
||||
|
||||
In a module:
|
||||
|
||||
__xreload_old_new__(namespace, name, old, new)
|
||||
|
||||
in a class:
|
||||
|
||||
@classmethod
|
||||
__xreload_old_new__(cls, name, old, new)
|
||||
|
||||
A class or module may include a method called '__xreload_old_new__' which is called when we're
|
||||
unable to reload a given attribute.
|
||||
|
||||
|
||||
|
||||
2. To do something after the whole reload is finished:
|
||||
|
||||
In a module:
|
||||
|
||||
__xreload_after_reload_update__(namespace):
|
||||
|
||||
In a class:
|
||||
|
||||
@classmethod
|
||||
__xreload_after_reload_update__(cls):
|
||||
|
||||
|
||||
A class or module may include a method called '__xreload_after_reload_update__' which is called
|
||||
after the reload finishes.
|
||||
|
||||
|
||||
Important: when providing a hook, always use the namespace or cls provided and not anything in the global
|
||||
namespace, as the global namespace are only temporarily created during the reload and may not reflect the
|
||||
actual application state (while the cls and namespace passed are).
|
||||
|
||||
|
||||
Current limitations
|
||||
======================
|
||||
|
||||
|
||||
- Attributes/constants are added, but not changed (so singletons and the application state is not
|
||||
broken -- use provided hooks to workaround it).
|
||||
|
||||
- Code using metaclasses may not always work.
|
||||
|
||||
- Functions and methods using decorators (other than classmethod and staticmethod) are not handled
|
||||
correctly.
|
||||
|
||||
- Renamings are not handled correctly.
|
||||
|
||||
- Dependent modules are not reloaded.
|
||||
|
||||
- New __slots__ can't be added to existing classes.
|
||||
|
||||
|
||||
Info
|
||||
======================
|
||||
|
||||
Original: http://svn.python.org/projects/sandbox/trunk/xreload/xreload.py
|
||||
Note: it seems https://github.com/plone/plone.reload/blob/master/plone/reload/xreload.py enhances it (to check later)
|
||||
|
||||
Interesting alternative: https://code.google.com/p/reimport/
|
||||
|
||||
Alternative to reload().
|
||||
|
||||
This works by executing the module in a scratch namespace, and then patching classes, methods and
|
||||
functions in place. This avoids the need to patch instances. New objects are copied into the
|
||||
target namespace.
|
||||
|
||||
"""
|
||||
|
||||
import imp
|
||||
from _pydev_bundle.pydev_imports import Exec
|
||||
from _pydevd_bundle import pydevd_dont_trace
|
||||
import sys
|
||||
import traceback
|
||||
import types
|
||||
|
||||
NO_DEBUG = 0
|
||||
LEVEL1 = 1
|
||||
LEVEL2 = 2
|
||||
|
||||
DEBUG = NO_DEBUG
|
||||
|
||||
def write(*args):
|
||||
new_lst = []
|
||||
for a in args:
|
||||
new_lst.append(str(a))
|
||||
|
||||
msg = ' '.join(new_lst)
|
||||
sys.stdout.write('%s\n' % (msg,))
|
||||
|
||||
def write_err(*args):
|
||||
new_lst = []
|
||||
for a in args:
|
||||
new_lst.append(str(a))
|
||||
|
||||
msg = ' '.join(new_lst)
|
||||
sys.stderr.write('pydev debugger: %s\n' % (msg,))
|
||||
|
||||
def notify_info0(*args):
|
||||
write_err(*args)
|
||||
|
||||
def notify_info(*args):
|
||||
if DEBUG >= LEVEL1:
|
||||
write(*args)
|
||||
|
||||
def notify_info2(*args):
|
||||
if DEBUG >= LEVEL2:
|
||||
write(*args)
|
||||
|
||||
def notify_error(*args):
|
||||
write_err(*args)
|
||||
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# code_objects_equal
|
||||
#=======================================================================================================================
|
||||
def code_objects_equal(code0, code1):
|
||||
for d in dir(code0):
|
||||
if d.startswith('_') or 'lineno' in d:
|
||||
continue
|
||||
if getattr(code0, d) != getattr(code1, d):
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# xreload
|
||||
#=======================================================================================================================
|
||||
def xreload(mod):
|
||||
"""Reload a module in place, updating classes, methods and functions.
|
||||
|
||||
mod: a module object
|
||||
|
||||
Returns a boolean indicating whether a change was done.
|
||||
"""
|
||||
r = Reload(mod)
|
||||
r.apply()
|
||||
found_change = r.found_change
|
||||
r = None
|
||||
pydevd_dont_trace.clear_trace_filter_cache()
|
||||
return found_change
|
||||
|
||||
|
||||
# This isn't actually used... Initially I planned to reload variables which are immutable on the
|
||||
# namespace, but this can destroy places where we're saving state, which may not be what we want,
|
||||
# so, we're being conservative and giving the user hooks if he wants to do a reload.
|
||||
#
|
||||
# immutable_types = [int, str, float, tuple] #That should be common to all Python versions
|
||||
#
|
||||
# for name in 'long basestr unicode frozenset'.split():
|
||||
# try:
|
||||
# immutable_types.append(__builtins__[name])
|
||||
# except:
|
||||
# pass #Just ignore: not all python versions are created equal.
|
||||
# immutable_types = tuple(immutable_types)
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# Reload
|
||||
#=======================================================================================================================
|
||||
class Reload:
|
||||
|
||||
def __init__(self, mod):
|
||||
self.mod = mod
|
||||
self.found_change = False
|
||||
|
||||
def apply(self):
|
||||
mod = self.mod
|
||||
self._on_finish_callbacks = []
|
||||
try:
|
||||
# Get the module name, e.g. 'foo.bar.whatever'
|
||||
modname = mod.__name__
|
||||
# Get the module namespace (dict) early; this is part of the type check
|
||||
modns = mod.__dict__
|
||||
# Parse it into package name and module name, e.g. 'foo.bar' and 'whatever'
|
||||
i = modname.rfind(".")
|
||||
if i >= 0:
|
||||
pkgname, modname = modname[:i], modname[i + 1:]
|
||||
else:
|
||||
pkgname = None
|
||||
# Compute the search path
|
||||
if pkgname:
|
||||
# We're not reloading the package, only the module in it
|
||||
pkg = sys.modules[pkgname]
|
||||
path = pkg.__path__ # Search inside the package
|
||||
else:
|
||||
# Search the top-level module path
|
||||
pkg = None
|
||||
path = None # Make find_module() uses the default search path
|
||||
# Find the module; may raise ImportError
|
||||
(stream, filename, (suffix, mode, kind)) = imp.find_module(modname, path)
|
||||
# Turn it into a code object
|
||||
try:
|
||||
# Is it Python source code or byte code read from a file?
|
||||
if kind not in (imp.PY_COMPILED, imp.PY_SOURCE):
|
||||
# Fall back to built-in reload()
|
||||
notify_error('Could not find source to reload (mod: %s)' % (modname,))
|
||||
return
|
||||
if kind == imp.PY_SOURCE:
|
||||
source = stream.read()
|
||||
code = compile(source, filename, "exec")
|
||||
else:
|
||||
import marshal
|
||||
code = marshal.load(stream)
|
||||
finally:
|
||||
if stream:
|
||||
stream.close()
|
||||
# Execute the code. We copy the module dict to a temporary; then
|
||||
# clear the module dict; then execute the new code in the module
|
||||
# dict; then swap things back and around. This trick (due to
|
||||
# Glyph Lefkowitz) ensures that the (readonly) __globals__
|
||||
# attribute of methods and functions is set to the correct dict
|
||||
# object.
|
||||
new_namespace = modns.copy()
|
||||
new_namespace.clear()
|
||||
new_namespace["__name__"] = modns["__name__"]
|
||||
Exec(code, new_namespace)
|
||||
# Now we get to the hard part
|
||||
oldnames = set(modns)
|
||||
newnames = set(new_namespace)
|
||||
|
||||
# Create new tokens (note: not deleting existing)
|
||||
for name in newnames - oldnames:
|
||||
notify_info0('Added:', name, 'to namespace')
|
||||
self.found_change = True
|
||||
modns[name] = new_namespace[name]
|
||||
|
||||
# Update in-place what we can
|
||||
for name in oldnames & newnames:
|
||||
self._update(modns, name, modns[name], new_namespace[name])
|
||||
|
||||
self._handle_namespace(modns)
|
||||
|
||||
for c in self._on_finish_callbacks:
|
||||
c()
|
||||
del self._on_finish_callbacks[:]
|
||||
except:
|
||||
traceback.print_exc()
|
||||
|
||||
|
||||
def _handle_namespace(self, namespace, is_class_namespace=False):
|
||||
on_finish = None
|
||||
if is_class_namespace:
|
||||
xreload_after_update = getattr(namespace, '__xreload_after_reload_update__', None)
|
||||
if xreload_after_update is not None:
|
||||
self.found_change = True
|
||||
on_finish = lambda: xreload_after_update()
|
||||
|
||||
elif '__xreload_after_reload_update__' in namespace:
|
||||
xreload_after_update = namespace['__xreload_after_reload_update__']
|
||||
self.found_change = True
|
||||
on_finish = lambda: xreload_after_update(namespace)
|
||||
|
||||
|
||||
if on_finish is not None:
|
||||
# If a client wants to know about it, give him a chance.
|
||||
self._on_finish_callbacks.append(on_finish)
|
||||
|
||||
|
||||
|
||||
def _update(self, namespace, name, oldobj, newobj, is_class_namespace=False):
|
||||
"""Update oldobj, if possible in place, with newobj.
|
||||
|
||||
If oldobj is immutable, this simply returns newobj.
|
||||
|
||||
Args:
|
||||
oldobj: the object to be updated
|
||||
newobj: the object used as the source for the update
|
||||
"""
|
||||
try:
|
||||
notify_info2('Updating: ', oldobj)
|
||||
if oldobj is newobj:
|
||||
# Probably something imported
|
||||
return
|
||||
|
||||
if type(oldobj) is not type(newobj):
|
||||
# Cop-out: if the type changed, give up
|
||||
notify_error('Type of: %s changed... Skipping.' % (oldobj,))
|
||||
return
|
||||
|
||||
if isinstance(newobj, types.FunctionType):
|
||||
self._update_function(oldobj, newobj)
|
||||
return
|
||||
|
||||
if isinstance(newobj, types.MethodType):
|
||||
self._update_method(oldobj, newobj)
|
||||
return
|
||||
|
||||
if isinstance(newobj, classmethod):
|
||||
self._update_classmethod(oldobj, newobj)
|
||||
return
|
||||
|
||||
if isinstance(newobj, staticmethod):
|
||||
self._update_staticmethod(oldobj, newobj)
|
||||
return
|
||||
|
||||
if hasattr(types, 'ClassType'):
|
||||
classtype = (types.ClassType, type) #object is not instance of types.ClassType.
|
||||
else:
|
||||
classtype = type
|
||||
|
||||
if isinstance(newobj, classtype):
|
||||
self._update_class(oldobj, newobj)
|
||||
return
|
||||
|
||||
# New: dealing with metaclasses.
|
||||
if hasattr(newobj, '__metaclass__') and hasattr(newobj, '__class__') and newobj.__metaclass__ == newobj.__class__:
|
||||
self._update_class(oldobj, newobj)
|
||||
return
|
||||
|
||||
if namespace is not None:
|
||||
|
||||
if oldobj != newobj and str(oldobj) != str(newobj) and repr(oldobj) != repr(newobj):
|
||||
xreload_old_new = None
|
||||
if is_class_namespace:
|
||||
xreload_old_new = getattr(namespace, '__xreload_old_new__', None)
|
||||
if xreload_old_new is not None:
|
||||
self.found_change = True
|
||||
xreload_old_new(name, oldobj, newobj)
|
||||
|
||||
elif '__xreload_old_new__' in namespace:
|
||||
xreload_old_new = namespace['__xreload_old_new__']
|
||||
xreload_old_new(namespace, name, oldobj, newobj)
|
||||
self.found_change = True
|
||||
|
||||
# Too much information to the user...
|
||||
# else:
|
||||
# notify_info0('%s NOT updated. Create __xreload_old_new__(name, old, new) for custom reload' % (name,))
|
||||
|
||||
except:
|
||||
notify_error('Exception found when updating %s. Proceeding for other items.' % (name,))
|
||||
traceback.print_exc()
|
||||
|
||||
|
||||
# All of the following functions have the same signature as _update()
|
||||
|
||||
|
||||
def _update_function(self, oldfunc, newfunc):
|
||||
"""Update a function object."""
|
||||
oldfunc.__doc__ = newfunc.__doc__
|
||||
oldfunc.__dict__.update(newfunc.__dict__)
|
||||
|
||||
try:
|
||||
newfunc.__code__
|
||||
attr_name = '__code__'
|
||||
except AttributeError:
|
||||
newfunc.func_code
|
||||
attr_name = 'func_code'
|
||||
|
||||
old_code = getattr(oldfunc, attr_name)
|
||||
new_code = getattr(newfunc, attr_name)
|
||||
if not code_objects_equal(old_code, new_code):
|
||||
notify_info0('Updated function code:', oldfunc)
|
||||
setattr(oldfunc, attr_name, new_code)
|
||||
self.found_change = True
|
||||
|
||||
try:
|
||||
oldfunc.__defaults__ = newfunc.__defaults__
|
||||
except AttributeError:
|
||||
oldfunc.func_defaults = newfunc.func_defaults
|
||||
|
||||
return oldfunc
|
||||
|
||||
|
||||
def _update_method(self, oldmeth, newmeth):
|
||||
"""Update a method object."""
|
||||
# XXX What if im_func is not a function?
|
||||
if hasattr(oldmeth, 'im_func') and hasattr(newmeth, 'im_func'):
|
||||
self._update(None, None, oldmeth.im_func, newmeth.im_func)
|
||||
elif hasattr(oldmeth, '__func__') and hasattr(newmeth, '__func__'):
|
||||
self._update(None, None, oldmeth.__func__, newmeth.__func__)
|
||||
return oldmeth
|
||||
|
||||
|
||||
def _update_class(self, oldclass, newclass):
|
||||
"""Update a class object."""
|
||||
olddict = oldclass.__dict__
|
||||
newdict = newclass.__dict__
|
||||
|
||||
oldnames = set(olddict)
|
||||
newnames = set(newdict)
|
||||
|
||||
for name in newnames - oldnames:
|
||||
setattr(oldclass, name, newdict[name])
|
||||
notify_info0('Added:', name, 'to', oldclass)
|
||||
self.found_change = True
|
||||
|
||||
# Note: not removing old things...
|
||||
# for name in oldnames - newnames:
|
||||
# notify_info('Removed:', name, 'from', oldclass)
|
||||
# delattr(oldclass, name)
|
||||
|
||||
for name in (oldnames & newnames) - set(['__dict__', '__doc__']):
|
||||
self._update(oldclass, name, olddict[name], newdict[name], is_class_namespace=True)
|
||||
|
||||
old_bases = getattr(oldclass, '__bases__', None)
|
||||
new_bases = getattr(newclass, '__bases__', None)
|
||||
if str(old_bases) != str(new_bases):
|
||||
notify_error('Changing the hierarchy of a class is not supported. %s may be inconsistent.' % (oldclass,))
|
||||
|
||||
self._handle_namespace(oldclass, is_class_namespace=True)
|
||||
|
||||
|
||||
def _update_classmethod(self, oldcm, newcm):
|
||||
"""Update a classmethod update."""
|
||||
# While we can't modify the classmethod object itself (it has no
|
||||
# mutable attributes), we *can* extract the underlying function
|
||||
# (by calling __get__(), which returns a method object) and update
|
||||
# it in-place. We don't have the class available to pass to
|
||||
# __get__() but any object except None will do.
|
||||
self._update(None, None, oldcm.__get__(0), newcm.__get__(0))
|
||||
|
||||
|
||||
def _update_staticmethod(self, oldsm, newsm):
|
||||
"""Update a staticmethod update."""
|
||||
# While we can't modify the staticmethod object itself (it has no
|
||||
# mutable attributes), we *can* extract the underlying function
|
||||
# (by calling __get__(), which returns it) and update it in-place.
|
||||
# We don't have the class available to pass to __get__() but any
|
||||
# object except None will do.
|
||||
self._update(None, None, oldsm.__get__(0), newsm.__get__(0))
|
||||
488
ptvsd/pydevd/_pydevd_bundle/pydevd_resolver.py
Normal file
488
ptvsd/pydevd/_pydevd_bundle/pydevd_resolver.py
Normal file
|
|
@ -0,0 +1,488 @@
|
|||
try:
|
||||
import StringIO
|
||||
except:
|
||||
import io as StringIO
|
||||
import traceback
|
||||
from os.path import basename
|
||||
|
||||
from _pydevd_bundle import pydevd_constants
|
||||
from _pydevd_bundle.pydevd_constants import dict_iter_items, dict_keys, xrange
|
||||
|
||||
|
||||
# Note: 300 is already a lot to see in the outline (after that the user should really use the shell to get things)
|
||||
# and this also means we'll pass less information to the client side (which makes debugging faster).
|
||||
MAX_ITEMS_TO_HANDLE = 300
|
||||
|
||||
TOO_LARGE_MSG = 'Too large to show contents. Max items to show: ' + str(MAX_ITEMS_TO_HANDLE)
|
||||
TOO_LARGE_ATTR = 'Unable to handle:'
|
||||
|
||||
#=======================================================================================================================
|
||||
# UnableToResolveVariableException
|
||||
#=======================================================================================================================
|
||||
class UnableToResolveVariableException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# InspectStub
|
||||
#=======================================================================================================================
|
||||
class InspectStub:
|
||||
def isbuiltin(self, _args):
|
||||
return False
|
||||
def isroutine(self, object):
|
||||
return False
|
||||
|
||||
try:
|
||||
import inspect
|
||||
except:
|
||||
inspect = InspectStub()
|
||||
|
||||
try:
|
||||
import java.lang #@UnresolvedImport
|
||||
except:
|
||||
pass
|
||||
|
||||
#types does not include a MethodWrapperType
|
||||
try:
|
||||
MethodWrapperType = type([].__str__)
|
||||
except:
|
||||
MethodWrapperType = None
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# See: pydevd_extension_api module for resolver interface
|
||||
#=======================================================================================================================
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# DefaultResolver
|
||||
#=======================================================================================================================
|
||||
class DefaultResolver:
|
||||
'''
|
||||
DefaultResolver is the class that'll actually resolve how to show some variable.
|
||||
'''
|
||||
|
||||
def resolve(self, var, attribute):
|
||||
return getattr(var, attribute)
|
||||
|
||||
def get_dictionary(self, var, names=None):
|
||||
if MethodWrapperType:
|
||||
return self._getPyDictionary(var, names)
|
||||
else:
|
||||
return self._getJyDictionary(var)
|
||||
|
||||
def _getJyDictionary(self, obj):
|
||||
ret = {}
|
||||
found = java.util.HashMap()
|
||||
|
||||
original = obj
|
||||
if hasattr(obj, '__class__') and obj.__class__ == java.lang.Class:
|
||||
|
||||
#get info about superclasses
|
||||
classes = []
|
||||
classes.append(obj)
|
||||
c = obj.getSuperclass()
|
||||
while c != None:
|
||||
classes.append(c)
|
||||
c = c.getSuperclass()
|
||||
|
||||
#get info about interfaces
|
||||
interfs = []
|
||||
for obj in classes:
|
||||
interfs.extend(obj.getInterfaces())
|
||||
classes.extend(interfs)
|
||||
|
||||
#now is the time when we actually get info on the declared methods and fields
|
||||
for obj in classes:
|
||||
|
||||
declaredMethods = obj.getDeclaredMethods()
|
||||
declaredFields = obj.getDeclaredFields()
|
||||
for i in xrange(len(declaredMethods)):
|
||||
name = declaredMethods[i].getName()
|
||||
ret[name] = declaredMethods[i].toString()
|
||||
found.put(name, 1)
|
||||
|
||||
for i in xrange(len(declaredFields)):
|
||||
name = declaredFields[i].getName()
|
||||
found.put(name, 1)
|
||||
#if declaredFields[i].isAccessible():
|
||||
declaredFields[i].setAccessible(True)
|
||||
#ret[name] = declaredFields[i].get( declaredFields[i] )
|
||||
try:
|
||||
ret[name] = declaredFields[i].get(original)
|
||||
except:
|
||||
ret[name] = declaredFields[i].toString()
|
||||
|
||||
#this simple dir does not always get all the info, that's why we have the part before
|
||||
#(e.g.: if we do a dir on String, some methods that are from other interfaces such as
|
||||
#charAt don't appear)
|
||||
try:
|
||||
d = dir(original)
|
||||
for name in d:
|
||||
if found.get(name) is not 1:
|
||||
ret[name] = getattr(original, name)
|
||||
except:
|
||||
#sometimes we're unable to do a dir
|
||||
pass
|
||||
|
||||
return ret
|
||||
|
||||
def get_names(self, var):
|
||||
names = dir(var)
|
||||
if not names and hasattr(var, '__members__'):
|
||||
names = var.__members__
|
||||
return names
|
||||
|
||||
def _getPyDictionary(self, var, names=None):
|
||||
filterPrivate = False
|
||||
filterSpecial = True
|
||||
filterFunction = True
|
||||
filterBuiltIn = True
|
||||
|
||||
if not names:
|
||||
names = self.get_names(var)
|
||||
d = {}
|
||||
|
||||
#Be aware that the order in which the filters are applied attempts to
|
||||
#optimize the operation by removing as many items as possible in the
|
||||
#first filters, leaving fewer items for later filters
|
||||
|
||||
if filterBuiltIn or filterFunction:
|
||||
for n in names:
|
||||
if filterSpecial:
|
||||
if n.startswith('__') and n.endswith('__'):
|
||||
continue
|
||||
|
||||
if filterPrivate:
|
||||
if n.startswith('_') or n.endswith('__'):
|
||||
continue
|
||||
|
||||
try:
|
||||
attr = getattr(var, n)
|
||||
|
||||
#filter builtins?
|
||||
if filterBuiltIn:
|
||||
if inspect.isbuiltin(attr):
|
||||
continue
|
||||
|
||||
#filter functions?
|
||||
if filterFunction:
|
||||
if inspect.isroutine(attr) or isinstance(attr, MethodWrapperType):
|
||||
continue
|
||||
except:
|
||||
#if some error occurs getting it, let's put it to the user.
|
||||
strIO = StringIO.StringIO()
|
||||
traceback.print_exc(file=strIO)
|
||||
attr = strIO.getvalue()
|
||||
|
||||
d[ n ] = attr
|
||||
|
||||
return d
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# DictResolver
|
||||
#=======================================================================================================================
|
||||
class DictResolver:
|
||||
|
||||
def resolve(self, dict, key):
|
||||
if key in ('__len__', TOO_LARGE_ATTR):
|
||||
return None
|
||||
|
||||
if '(' not in key:
|
||||
#we have to treat that because the dict resolver is also used to directly resolve the global and local
|
||||
#scopes (which already have the items directly)
|
||||
try:
|
||||
return dict[key]
|
||||
except:
|
||||
return getattr(dict, key)
|
||||
|
||||
#ok, we have to iterate over the items to find the one that matches the id, because that's the only way
|
||||
#to actually find the reference from the string we have before.
|
||||
expected_id = int(key.split('(')[-1][:-1])
|
||||
for key, val in dict_iter_items(dict):
|
||||
if id(key) == expected_id:
|
||||
return val
|
||||
|
||||
raise UnableToResolveVariableException()
|
||||
|
||||
def key_to_str(self, key):
|
||||
if isinstance(key, str):
|
||||
return '%r' % key
|
||||
else:
|
||||
if not pydevd_constants.IS_PY3K:
|
||||
if isinstance(key, unicode):
|
||||
return "u'%s'" % key
|
||||
return key
|
||||
|
||||
def get_dictionary(self, dict):
|
||||
ret = {}
|
||||
|
||||
i = 0
|
||||
for key, val in dict_iter_items(dict):
|
||||
i += 1
|
||||
#we need to add the id because otherwise we cannot find the real object to get its contents later on.
|
||||
key = '%s (%s)' % (self.key_to_str(key), id(key))
|
||||
ret[key] = val
|
||||
if i > MAX_ITEMS_TO_HANDLE:
|
||||
ret[TOO_LARGE_ATTR] = TOO_LARGE_MSG
|
||||
break
|
||||
|
||||
ret['__len__'] = len(dict)
|
||||
# in case if the class extends built-in type and has some additional fields
|
||||
additional_fields = defaultResolver.get_dictionary(dict)
|
||||
ret.update(additional_fields)
|
||||
return ret
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# TupleResolver
|
||||
#=======================================================================================================================
|
||||
class TupleResolver: #to enumerate tuples and lists
|
||||
|
||||
def resolve(self, var, attribute):
|
||||
'''
|
||||
@param var: that's the original attribute
|
||||
@param attribute: that's the key passed in the dict (as a string)
|
||||
'''
|
||||
if attribute in ('__len__', TOO_LARGE_ATTR):
|
||||
return None
|
||||
try:
|
||||
return var[int(attribute)]
|
||||
except:
|
||||
return getattr(var, attribute)
|
||||
|
||||
def get_dictionary(self, var):
|
||||
l = len(var)
|
||||
d = {}
|
||||
|
||||
format_str = '%0' + str(int(len(str(l)))) + 'd'
|
||||
|
||||
i = 0
|
||||
for item in var:
|
||||
d[format_str % i] = item
|
||||
i += 1
|
||||
|
||||
if i > MAX_ITEMS_TO_HANDLE:
|
||||
d[TOO_LARGE_ATTR] = TOO_LARGE_MSG
|
||||
break
|
||||
|
||||
d['__len__'] = len(var)
|
||||
# in case if the class extends built-in type and has some additional fields
|
||||
additional_fields = defaultResolver.get_dictionary(var)
|
||||
d.update(additional_fields)
|
||||
return d
|
||||
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# SetResolver
|
||||
#=======================================================================================================================
|
||||
class SetResolver:
|
||||
'''
|
||||
Resolves a set as dict id(object)->object
|
||||
'''
|
||||
|
||||
def resolve(self, var, attribute):
|
||||
if attribute in ('__len__', TOO_LARGE_ATTR):
|
||||
return None
|
||||
|
||||
try:
|
||||
attribute = int(attribute)
|
||||
except:
|
||||
return getattr(var, attribute)
|
||||
|
||||
for v in var:
|
||||
if id(v) == attribute:
|
||||
return v
|
||||
|
||||
raise UnableToResolveVariableException('Unable to resolve %s in %s' % (attribute, var))
|
||||
|
||||
def get_dictionary(self, var):
|
||||
d = {}
|
||||
i = 0
|
||||
for item in var:
|
||||
i+= 1
|
||||
d[id(item)] = item
|
||||
|
||||
if i > MAX_ITEMS_TO_HANDLE:
|
||||
d[TOO_LARGE_ATTR] = TOO_LARGE_MSG
|
||||
break
|
||||
|
||||
|
||||
d['__len__'] = len(var)
|
||||
# in case if the class extends built-in type and has some additional fields
|
||||
additional_fields = defaultResolver.get_dictionary(var)
|
||||
d.update(additional_fields)
|
||||
return d
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# InstanceResolver
|
||||
#=======================================================================================================================
|
||||
class InstanceResolver:
|
||||
|
||||
def resolve(self, var, attribute):
|
||||
field = var.__class__.getDeclaredField(attribute)
|
||||
field.setAccessible(True)
|
||||
return field.get(var)
|
||||
|
||||
def get_dictionary(self, obj):
|
||||
ret = {}
|
||||
|
||||
declaredFields = obj.__class__.getDeclaredFields()
|
||||
for i in xrange(len(declaredFields)):
|
||||
name = declaredFields[i].getName()
|
||||
try:
|
||||
declaredFields[i].setAccessible(True)
|
||||
ret[name] = declaredFields[i].get(obj)
|
||||
except:
|
||||
traceback.print_exc()
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# JyArrayResolver
|
||||
#=======================================================================================================================
|
||||
class JyArrayResolver:
|
||||
'''
|
||||
This resolves a regular Object[] array from java
|
||||
'''
|
||||
|
||||
def resolve(self, var, attribute):
|
||||
if attribute == '__len__':
|
||||
return None
|
||||
return var[int(attribute)]
|
||||
|
||||
def get_dictionary(self, obj):
|
||||
ret = {}
|
||||
|
||||
for i in xrange(len(obj)):
|
||||
ret[ i ] = obj[i]
|
||||
|
||||
ret['__len__'] = len(obj)
|
||||
return ret
|
||||
|
||||
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# MultiValueDictResolver
|
||||
#=======================================================================================================================
|
||||
class MultiValueDictResolver(DictResolver):
|
||||
|
||||
def resolve(self, dict, key):
|
||||
if key in ('__len__', TOO_LARGE_ATTR):
|
||||
return None
|
||||
|
||||
#ok, we have to iterate over the items to find the one that matches the id, because that's the only way
|
||||
#to actually find the reference from the string we have before.
|
||||
expected_id = int(key.split('(')[-1][:-1])
|
||||
for key in dict_keys(dict):
|
||||
val = dict.getlist(key)
|
||||
if id(key) == expected_id:
|
||||
return val
|
||||
|
||||
raise UnableToResolveVariableException()
|
||||
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# DjangoFormResolver
|
||||
#=======================================================================================================================
|
||||
class DjangoFormResolver(DefaultResolver):
|
||||
has_errors_attr = False
|
||||
|
||||
def get_names(self, var):
|
||||
names = dir(var)
|
||||
if not names and hasattr(var, '__members__'):
|
||||
names = var.__members__
|
||||
|
||||
if "errors" in names:
|
||||
self.has_errors_attr = True
|
||||
names.remove("errors")
|
||||
return names
|
||||
|
||||
def get_dictionary(self, var, names=None):
|
||||
# Do not call self.errors because it is property and has side effects
|
||||
d = defaultResolver.get_dictionary(var, self.get_names(var))
|
||||
if self.has_errors_attr:
|
||||
try:
|
||||
errors_attr = getattr(var, "_errors")
|
||||
except:
|
||||
errors_attr = None
|
||||
d["errors"] = errors_attr
|
||||
return d
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# DequeResolver
|
||||
#=======================================================================================================================
|
||||
class DequeResolver(TupleResolver):
|
||||
def get_dictionary(self, var):
|
||||
d = TupleResolver.get_dictionary(self, var)
|
||||
d['maxlen'] = getattr(var, 'maxlen', None)
|
||||
return d
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# FrameResolver
|
||||
#=======================================================================================================================
|
||||
class FrameResolver:
|
||||
'''
|
||||
This resolves a frame.
|
||||
'''
|
||||
|
||||
def resolve(self, obj, attribute):
|
||||
if attribute == '__internals__':
|
||||
return defaultResolver.get_dictionary(obj)
|
||||
|
||||
if attribute == 'stack':
|
||||
return self.get_frame_stack(obj)
|
||||
|
||||
if attribute == 'f_locals':
|
||||
return obj.f_locals
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def get_dictionary(self, obj):
|
||||
ret = {}
|
||||
ret['__internals__'] = defaultResolver.get_dictionary(obj)
|
||||
ret['stack'] = self.get_frame_stack(obj)
|
||||
ret['f_locals'] = obj.f_locals
|
||||
return ret
|
||||
|
||||
|
||||
def get_frame_stack(self, frame):
|
||||
ret = []
|
||||
if frame is not None:
|
||||
ret.append(self.get_frame_name(frame))
|
||||
|
||||
while frame.f_back:
|
||||
frame = frame.f_back
|
||||
ret.append(self.get_frame_name(frame))
|
||||
|
||||
return ret
|
||||
|
||||
def get_frame_name(self, frame):
|
||||
if frame is None:
|
||||
return 'None'
|
||||
try:
|
||||
name = basename(frame.f_code.co_filename)
|
||||
return 'frame: %s [%s:%s] id:%s' % (frame.f_code.co_name, name, frame.f_lineno, id(frame))
|
||||
except:
|
||||
return 'frame object'
|
||||
|
||||
|
||||
defaultResolver = DefaultResolver()
|
||||
dictResolver = DictResolver()
|
||||
tupleResolver = TupleResolver()
|
||||
instanceResolver = InstanceResolver()
|
||||
jyArrayResolver = JyArrayResolver()
|
||||
setResolver = SetResolver()
|
||||
multiValueDictResolver = MultiValueDictResolver()
|
||||
djangoFormResolver = DjangoFormResolver()
|
||||
dequeResolver = DequeResolver()
|
||||
frameResolver = FrameResolver()
|
||||
69
ptvsd/pydevd/_pydevd_bundle/pydevd_save_locals.py
Normal file
69
ptvsd/pydevd/_pydevd_bundle/pydevd_save_locals.py
Normal file
|
|
@ -0,0 +1,69 @@
|
|||
"""
|
||||
Utility for saving locals.
|
||||
"""
|
||||
import sys
|
||||
|
||||
try:
|
||||
import types
|
||||
|
||||
frame_type = types.FrameType
|
||||
except:
|
||||
frame_type = type(sys._getframe())
|
||||
|
||||
|
||||
def is_save_locals_available():
|
||||
return save_locals_impl is not None
|
||||
|
||||
|
||||
def save_locals(frame):
|
||||
"""
|
||||
Copy values from locals_dict into the fast stack slots in the given frame.
|
||||
|
||||
Note: the 'save_locals' branch had a different approach wrapping the frame (much more code, but it gives ideas
|
||||
on how to save things partially, not the 'whole' locals).
|
||||
"""
|
||||
if not isinstance(frame, frame_type):
|
||||
# Fix exception when changing Django variable (receiving DjangoTemplateFrame)
|
||||
return
|
||||
|
||||
if save_locals_impl is not None:
|
||||
try:
|
||||
save_locals_impl(frame)
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
def make_save_locals_impl():
|
||||
"""
|
||||
Factory for the 'save_locals_impl' method. This may seem like a complicated pattern but it is essential that the method is created at
|
||||
module load time. Inner imports after module load time would cause an occasional debugger deadlock due to the importer lock and debugger
|
||||
lock being taken in different order in different threads.
|
||||
"""
|
||||
try:
|
||||
if '__pypy__' in sys.builtin_module_names:
|
||||
import __pypy__ # @UnresolvedImport
|
||||
save_locals = __pypy__.locals_to_fast
|
||||
except:
|
||||
pass
|
||||
else:
|
||||
if '__pypy__' in sys.builtin_module_names:
|
||||
def save_locals_pypy_impl(frame):
|
||||
save_locals(frame)
|
||||
|
||||
return save_locals_pypy_impl
|
||||
|
||||
try:
|
||||
import ctypes
|
||||
locals_to_fast = ctypes.pythonapi.PyFrame_LocalsToFast
|
||||
except:
|
||||
pass
|
||||
else:
|
||||
def save_locals_ctypes_impl(frame):
|
||||
locals_to_fast(ctypes.py_object(frame), ctypes.c_int(0))
|
||||
|
||||
return save_locals_ctypes_impl
|
||||
|
||||
return None
|
||||
|
||||
|
||||
save_locals_impl = make_save_locals_impl()
|
||||
206
ptvsd/pydevd/_pydevd_bundle/pydevd_signature.py
Normal file
206
ptvsd/pydevd/_pydevd_bundle/pydevd_signature.py
Normal file
|
|
@ -0,0 +1,206 @@
|
|||
|
||||
try:
|
||||
import trace
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
trace._warn = lambda *args: None # workaround for http://bugs.python.org/issue17143 (PY-8706)
|
||||
|
||||
import os
|
||||
from _pydevd_bundle.pydevd_comm import CMD_SIGNATURE_CALL_TRACE, NetCommand
|
||||
from _pydevd_bundle import pydevd_xml
|
||||
from _pydevd_bundle.pydevd_constants import xrange, dict_iter_items
|
||||
from _pydevd_bundle import pydevd_utils
|
||||
from _pydevd_bundle.pydevd_utils import get_clsname_for_code
|
||||
|
||||
|
||||
class Signature(object):
|
||||
def __init__(self, file, name):
|
||||
self.file = file
|
||||
self.name = name
|
||||
self.args = []
|
||||
self.args_str = []
|
||||
self.return_type = None
|
||||
|
||||
def add_arg(self, name, type):
|
||||
self.args.append((name, type))
|
||||
self.args_str.append("%s:%s"%(name, type))
|
||||
|
||||
def set_args(self, frame, recursive=False):
|
||||
self.args = []
|
||||
|
||||
code = frame.f_code
|
||||
locals = frame.f_locals
|
||||
|
||||
for i in xrange(0, code.co_argcount):
|
||||
name = code.co_varnames[i]
|
||||
class_name = get_type_of_value(locals[name], recursive=recursive)
|
||||
|
||||
self.add_arg(name, class_name)
|
||||
|
||||
def __str__(self):
|
||||
return "%s %s(%s)"%(self.file, self.name, ", ".join(self.args_str))
|
||||
|
||||
|
||||
def get_type_of_value(value, ignore_module_name=('__main__', '__builtin__', 'builtins'), recursive=False):
|
||||
tp = type(value)
|
||||
class_name = tp.__name__
|
||||
if class_name == 'instance': # old-style classes
|
||||
tp = value.__class__
|
||||
class_name = tp.__name__
|
||||
|
||||
if hasattr(tp, '__module__') and tp.__module__ and tp.__module__ not in ignore_module_name:
|
||||
class_name = "%s.%s"%(tp.__module__, class_name)
|
||||
|
||||
if class_name == 'list':
|
||||
class_name = 'List'
|
||||
if len(value) > 0 and recursive:
|
||||
class_name += '[%s]' % get_type_of_value(value[0], recursive=recursive)
|
||||
return class_name
|
||||
|
||||
if class_name == 'dict':
|
||||
class_name = 'Dict'
|
||||
if len(value) > 0 and recursive:
|
||||
for (k, v) in dict_iter_items(value):
|
||||
class_name += '[%s, %s]' % (get_type_of_value(k, recursive=recursive),
|
||||
get_type_of_value(v, recursive=recursive))
|
||||
break
|
||||
return class_name
|
||||
|
||||
if class_name == 'tuple':
|
||||
class_name = 'Tuple'
|
||||
if len(value) > 0 and recursive:
|
||||
class_name += '['
|
||||
class_name += ', '.join(get_type_of_value(v, recursive=recursive) for v in value)
|
||||
class_name += ']'
|
||||
|
||||
return class_name
|
||||
|
||||
|
||||
def _modname(path):
|
||||
"""Return a plausible module name for the path"""
|
||||
base = os.path.basename(path)
|
||||
filename, ext = os.path.splitext(base)
|
||||
return filename
|
||||
|
||||
|
||||
class SignatureFactory(object):
|
||||
def __init__(self):
|
||||
self._caller_cache = {}
|
||||
self.cache = CallSignatureCache()
|
||||
|
||||
def is_in_scope(self, filename):
|
||||
return not pydevd_utils.not_in_project_roots(filename)
|
||||
|
||||
def create_signature(self, frame, filename, with_args=True):
|
||||
try:
|
||||
_, modulename, funcname = self.file_module_function_of(frame)
|
||||
signature = Signature(filename, funcname)
|
||||
if with_args:
|
||||
signature.set_args(frame, recursive=True)
|
||||
return signature
|
||||
except:
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
|
||||
def file_module_function_of(self, frame): #this code is take from trace module and fixed to work with new-style classes
|
||||
code = frame.f_code
|
||||
filename = code.co_filename
|
||||
if filename:
|
||||
modulename = _modname(filename)
|
||||
else:
|
||||
modulename = None
|
||||
|
||||
funcname = code.co_name
|
||||
clsname = None
|
||||
if code in self._caller_cache:
|
||||
if self._caller_cache[code] is not None:
|
||||
clsname = self._caller_cache[code]
|
||||
else:
|
||||
self._caller_cache[code] = None
|
||||
clsname = get_clsname_for_code(code, frame)
|
||||
if clsname is not None:
|
||||
# cache the result - assumption is that new.* is
|
||||
# not called later to disturb this relationship
|
||||
# _caller_cache could be flushed if functions in
|
||||
# the new module get called.
|
||||
self._caller_cache[code] = clsname
|
||||
|
||||
if clsname is not None:
|
||||
funcname = "%s.%s" % (clsname, funcname)
|
||||
|
||||
return filename, modulename, funcname
|
||||
|
||||
|
||||
def get_signature_info(signature):
|
||||
return signature.file, signature.name, ' '.join([arg[1] for arg in signature.args])
|
||||
|
||||
|
||||
def get_frame_info(frame):
|
||||
co = frame.f_code
|
||||
return co.co_name, frame.f_lineno, co.co_filename
|
||||
|
||||
|
||||
class CallSignatureCache(object):
|
||||
def __init__(self):
|
||||
self.cache = {}
|
||||
|
||||
def add(self, signature):
|
||||
filename, name, args_type = get_signature_info(signature)
|
||||
calls_from_file = self.cache.setdefault(filename, {})
|
||||
name_calls = calls_from_file.setdefault(name, {})
|
||||
name_calls[args_type] = None
|
||||
|
||||
def is_in_cache(self, signature):
|
||||
filename, name, args_type = get_signature_info(signature)
|
||||
if args_type in self.cache.get(filename, {}).get(name, {}):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def create_signature_message(signature):
|
||||
cmdTextList = ["<xml>"]
|
||||
|
||||
cmdTextList.append('<call_signature file="%s" name="%s">' % (pydevd_xml.make_valid_xml_value(signature.file), pydevd_xml.make_valid_xml_value(signature.name)))
|
||||
|
||||
for arg in signature.args:
|
||||
cmdTextList.append('<arg name="%s" type="%s"></arg>' % (pydevd_xml.make_valid_xml_value(arg[0]), pydevd_xml.make_valid_xml_value(arg[1])))
|
||||
|
||||
if signature.return_type is not None:
|
||||
cmdTextList.append('<return type="%s"></return>' % (pydevd_xml.make_valid_xml_value(signature.return_type)))
|
||||
|
||||
cmdTextList.append("</call_signature></xml>")
|
||||
cmdText = ''.join(cmdTextList)
|
||||
return NetCommand(CMD_SIGNATURE_CALL_TRACE, 0, cmdText)
|
||||
|
||||
|
||||
def send_signature_call_trace(dbg, frame, filename):
|
||||
if dbg.signature_factory and dbg.signature_factory.is_in_scope(filename):
|
||||
signature = dbg.signature_factory.create_signature(frame, filename)
|
||||
if signature is not None:
|
||||
if dbg.signature_factory.cache is not None:
|
||||
if not dbg.signature_factory.cache.is_in_cache(signature):
|
||||
dbg.signature_factory.cache.add(signature)
|
||||
dbg.writer.add_command(create_signature_message(signature))
|
||||
return True
|
||||
else:
|
||||
# we don't send signature if it is cached
|
||||
return False
|
||||
else:
|
||||
dbg.writer.add_command(create_signature_message(signature))
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def send_signature_return_trace(dbg, frame, filename, return_value):
|
||||
if dbg.signature_factory and dbg.signature_factory.is_in_scope(filename):
|
||||
signature = dbg.signature_factory.create_signature(frame, filename, with_args=False)
|
||||
signature.return_type = get_type_of_value(return_value, recursive=True)
|
||||
dbg.writer.add_command(create_signature_message(signature))
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
|
||||
416
ptvsd/pydevd/_pydevd_bundle/pydevd_stackless.py
Normal file
416
ptvsd/pydevd/_pydevd_bundle/pydevd_stackless.py
Normal file
|
|
@ -0,0 +1,416 @@
|
|||
from __future__ import nested_scopes
|
||||
|
||||
import weakref
|
||||
import sys
|
||||
|
||||
from _pydevd_bundle.pydevd_comm import get_global_debugger
|
||||
from _pydevd_bundle.pydevd_constants import threading, call_only_once
|
||||
from _pydevd_bundle.pydevd_constants import dict_items
|
||||
from _pydevd_bundle.pydevd_custom_frames import update_custom_frame, remove_custom_frame, add_custom_frame
|
||||
from _pydevd_bundle.pydevd_dont_trace_files import DONT_TRACE
|
||||
from pydevd_file_utils import get_abs_path_real_path_and_base_from_frame
|
||||
from pydevd_tracing import SetTrace
|
||||
import stackless # @UnresolvedImport
|
||||
|
||||
|
||||
# Used so that we don't loose the id (because we'll remove when it's not alive and would generate a new id for the
|
||||
# same tasklet).
|
||||
class TaskletToLastId:
|
||||
'''
|
||||
So, why not a WeakKeyDictionary?
|
||||
The problem is that removals from the WeakKeyDictionary will create a new tasklet (as it adds a callback to
|
||||
remove the key when it's garbage-collected), so, we can get into a recursion.
|
||||
'''
|
||||
|
||||
def __init__(self):
|
||||
self.tasklet_ref_to_last_id = {}
|
||||
self._i = 0
|
||||
|
||||
|
||||
def get(self, tasklet):
|
||||
return self.tasklet_ref_to_last_id.get(weakref.ref(tasklet))
|
||||
|
||||
|
||||
def __setitem__(self, tasklet, last_id):
|
||||
self.tasklet_ref_to_last_id[weakref.ref(tasklet)] = last_id
|
||||
self._i += 1
|
||||
if self._i % 100 == 0: #Collect at each 100 additions to the dict (no need to rush).
|
||||
for tasklet_ref in list(self.tasklet_ref_to_last_id.keys()):
|
||||
if tasklet_ref() is None:
|
||||
del self.tasklet_ref_to_last_id[tasklet_ref]
|
||||
|
||||
|
||||
_tasklet_to_last_id = TaskletToLastId()
|
||||
|
||||
#=======================================================================================================================
|
||||
# _TaskletInfo
|
||||
#=======================================================================================================================
|
||||
class _TaskletInfo:
|
||||
|
||||
_last_id = 0
|
||||
|
||||
def __init__(self, tasklet_weakref, tasklet):
|
||||
self.frame_id = None
|
||||
self.tasklet_weakref = tasklet_weakref
|
||||
|
||||
last_id = _tasklet_to_last_id.get(tasklet)
|
||||
if last_id is None:
|
||||
_TaskletInfo._last_id += 1
|
||||
last_id = _TaskletInfo._last_id
|
||||
_tasklet_to_last_id[tasklet] = last_id
|
||||
|
||||
self._tasklet_id = last_id
|
||||
|
||||
self.update_name()
|
||||
|
||||
def update_name(self):
|
||||
tasklet = self.tasklet_weakref()
|
||||
if tasklet:
|
||||
if tasklet.blocked:
|
||||
state = 'blocked'
|
||||
elif tasklet.paused:
|
||||
state = 'paused'
|
||||
elif tasklet.scheduled:
|
||||
state = 'scheduled'
|
||||
else:
|
||||
state = '<UNEXPECTED>'
|
||||
|
||||
try:
|
||||
name = tasklet.name
|
||||
except AttributeError:
|
||||
if tasklet.is_main:
|
||||
name = 'MainTasklet'
|
||||
else:
|
||||
name = 'Tasklet-%s' % (self._tasklet_id,)
|
||||
|
||||
thread_id = tasklet.thread_id
|
||||
if thread_id != -1:
|
||||
for thread in threading.enumerate():
|
||||
if thread.ident == thread_id:
|
||||
if thread.name:
|
||||
thread_name = "of %s" % (thread.name,)
|
||||
else:
|
||||
thread_name = "of Thread-%s" % (thread.name or str(thread_id),)
|
||||
break
|
||||
else:
|
||||
# should not happen.
|
||||
thread_name = "of Thread-%s" % (str(thread_id),)
|
||||
thread = None
|
||||
else:
|
||||
# tasklet is no longer bound to a thread, because its thread ended
|
||||
thread_name = "without thread"
|
||||
|
||||
tid = id(tasklet)
|
||||
tasklet = None
|
||||
else:
|
||||
state = 'dead'
|
||||
name = 'Tasklet-%s' % (self._tasklet_id,)
|
||||
thread_name = ""
|
||||
tid = '-'
|
||||
self.tasklet_name = '%s %s %s (%s)' % (state, name, thread_name, tid)
|
||||
|
||||
if not hasattr(stackless.tasklet, "trace_function"):
|
||||
# bug https://bitbucket.org/stackless-dev/stackless/issue/42
|
||||
# is not fixed. Stackless releases before 2014
|
||||
def update_name(self):
|
||||
tasklet = self.tasklet_weakref()
|
||||
if tasklet:
|
||||
try:
|
||||
name = tasklet.name
|
||||
except AttributeError:
|
||||
if tasklet.is_main:
|
||||
name = 'MainTasklet'
|
||||
else:
|
||||
name = 'Tasklet-%s' % (self._tasklet_id,)
|
||||
|
||||
thread_id = tasklet.thread_id
|
||||
for thread in threading.enumerate():
|
||||
if thread.ident == thread_id:
|
||||
if thread.name:
|
||||
thread_name = "of %s" % (thread.name,)
|
||||
else:
|
||||
thread_name = "of Thread-%s" % (thread.name or str(thread_id),)
|
||||
break
|
||||
else:
|
||||
# should not happen.
|
||||
thread_name = "of Thread-%s" % (str(thread_id),)
|
||||
thread = None
|
||||
|
||||
tid = id(tasklet)
|
||||
tasklet = None
|
||||
else:
|
||||
name = 'Tasklet-%s' % (self._tasklet_id,)
|
||||
thread_name = ""
|
||||
tid = '-'
|
||||
self.tasklet_name = '%s %s (%s)' % (name, thread_name, tid)
|
||||
|
||||
_weak_tasklet_registered_to_info = {}
|
||||
|
||||
#=======================================================================================================================
|
||||
# get_tasklet_info
|
||||
#=======================================================================================================================
|
||||
def get_tasklet_info(tasklet):
|
||||
return register_tasklet_info(tasklet)
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# register_tasklet_info
|
||||
#=======================================================================================================================
|
||||
def register_tasklet_info(tasklet):
|
||||
r = weakref.ref(tasklet)
|
||||
info = _weak_tasklet_registered_to_info.get(r)
|
||||
if info is None:
|
||||
info = _weak_tasklet_registered_to_info[r] = _TaskletInfo(r, tasklet)
|
||||
|
||||
return info
|
||||
|
||||
|
||||
_application_set_schedule_callback = None
|
||||
|
||||
#=======================================================================================================================
|
||||
# _schedule_callback
|
||||
#=======================================================================================================================
|
||||
def _schedule_callback(prev, next):
|
||||
'''
|
||||
Called when a context is stopped or a new context is made runnable.
|
||||
'''
|
||||
try:
|
||||
if not prev and not next:
|
||||
return
|
||||
|
||||
current_frame = sys._getframe()
|
||||
|
||||
if next:
|
||||
register_tasklet_info(next)
|
||||
|
||||
# Ok, making next runnable: set the tracing facility in it.
|
||||
debugger = get_global_debugger()
|
||||
if debugger is not None:
|
||||
next.trace_function = debugger.trace_dispatch
|
||||
frame = next.frame
|
||||
if frame is current_frame:
|
||||
frame = frame.f_back
|
||||
if hasattr(frame, 'f_trace'): # Note: can be None (but hasattr should cover for that too).
|
||||
frame.f_trace = debugger.trace_dispatch
|
||||
|
||||
debugger = None
|
||||
|
||||
if prev:
|
||||
register_tasklet_info(prev)
|
||||
|
||||
try:
|
||||
for tasklet_ref, tasklet_info in dict_items(_weak_tasklet_registered_to_info): # Make sure it's a copy!
|
||||
tasklet = tasklet_ref()
|
||||
if tasklet is None or not tasklet.alive:
|
||||
# Garbage-collected already!
|
||||
try:
|
||||
del _weak_tasklet_registered_to_info[tasklet_ref]
|
||||
except KeyError:
|
||||
pass
|
||||
if tasklet_info.frame_id is not None:
|
||||
remove_custom_frame(tasklet_info.frame_id)
|
||||
else:
|
||||
is_running = stackless.get_thread_info(tasklet.thread_id)[1] is tasklet
|
||||
if tasklet is prev or (tasklet is not next and not is_running):
|
||||
# the tasklet won't run after this scheduler action:
|
||||
# - the tasklet is the previous tasklet
|
||||
# - it is not the next tasklet and it is not an already running tasklet
|
||||
frame = tasklet.frame
|
||||
if frame is current_frame:
|
||||
frame = frame.f_back
|
||||
if frame is not None:
|
||||
base = get_abs_path_real_path_and_base_from_frame(frame)[-1]
|
||||
# print >>sys.stderr, "SchedCB: %r, %d, '%s', '%s'" % (tasklet, frame.f_lineno, _filename, base)
|
||||
is_file_to_ignore = base in DONT_TRACE
|
||||
if not is_file_to_ignore:
|
||||
tasklet_info.update_name()
|
||||
if tasklet_info.frame_id is None:
|
||||
tasklet_info.frame_id = add_custom_frame(frame, tasklet_info.tasklet_name, tasklet.thread_id)
|
||||
else:
|
||||
update_custom_frame(tasklet_info.frame_id, frame, tasklet.thread_id, name=tasklet_info.tasklet_name)
|
||||
|
||||
elif tasklet is next or is_running:
|
||||
if tasklet_info.frame_id is not None:
|
||||
# Remove info about stackless suspended when it starts to run.
|
||||
remove_custom_frame(tasklet_info.frame_id)
|
||||
tasklet_info.frame_id = None
|
||||
|
||||
|
||||
finally:
|
||||
tasklet = None
|
||||
tasklet_info = None
|
||||
frame = None
|
||||
|
||||
except:
|
||||
import traceback;traceback.print_exc()
|
||||
|
||||
if _application_set_schedule_callback is not None:
|
||||
return _application_set_schedule_callback(prev, next)
|
||||
|
||||
if not hasattr(stackless.tasklet, "trace_function"):
|
||||
# Older versions of Stackless, released before 2014
|
||||
# This code does not work reliable! It is affected by several
|
||||
# stackless bugs: Stackless issues #44, #42, #40
|
||||
def _schedule_callback(prev, next):
|
||||
'''
|
||||
Called when a context is stopped or a new context is made runnable.
|
||||
'''
|
||||
try:
|
||||
if not prev and not next:
|
||||
return
|
||||
|
||||
if next:
|
||||
register_tasklet_info(next)
|
||||
|
||||
# Ok, making next runnable: set the tracing facility in it.
|
||||
debugger = get_global_debugger()
|
||||
if debugger is not None and next.frame:
|
||||
if hasattr(next.frame, 'f_trace'):
|
||||
next.frame.f_trace = debugger.trace_dispatch
|
||||
debugger = None
|
||||
|
||||
if prev:
|
||||
register_tasklet_info(prev)
|
||||
|
||||
try:
|
||||
for tasklet_ref, tasklet_info in dict_items(_weak_tasklet_registered_to_info): # Make sure it's a copy!
|
||||
tasklet = tasklet_ref()
|
||||
if tasklet is None or not tasklet.alive:
|
||||
# Garbage-collected already!
|
||||
try:
|
||||
del _weak_tasklet_registered_to_info[tasklet_ref]
|
||||
except KeyError:
|
||||
pass
|
||||
if tasklet_info.frame_id is not None:
|
||||
remove_custom_frame(tasklet_info.frame_id)
|
||||
else:
|
||||
if tasklet.paused or tasklet.blocked or tasklet.scheduled:
|
||||
if tasklet.frame and tasklet.frame.f_back:
|
||||
f_back = tasklet.frame.f_back
|
||||
base = get_abs_path_real_path_and_base_from_frame(f_back)[-1]
|
||||
is_file_to_ignore = base in DONT_TRACE
|
||||
if not is_file_to_ignore:
|
||||
if tasklet_info.frame_id is None:
|
||||
tasklet_info.frame_id = add_custom_frame(f_back, tasklet_info.tasklet_name, tasklet.thread_id)
|
||||
else:
|
||||
update_custom_frame(tasklet_info.frame_id, f_back, tasklet.thread_id)
|
||||
|
||||
elif tasklet.is_current:
|
||||
if tasklet_info.frame_id is not None:
|
||||
# Remove info about stackless suspended when it starts to run.
|
||||
remove_custom_frame(tasklet_info.frame_id)
|
||||
tasklet_info.frame_id = None
|
||||
|
||||
finally:
|
||||
tasklet = None
|
||||
tasklet_info = None
|
||||
f_back = None
|
||||
|
||||
except:
|
||||
import traceback;traceback.print_exc()
|
||||
|
||||
if _application_set_schedule_callback is not None:
|
||||
return _application_set_schedule_callback(prev, next)
|
||||
|
||||
|
||||
_original_setup = stackless.tasklet.setup
|
||||
|
||||
#=======================================================================================================================
|
||||
# setup
|
||||
#=======================================================================================================================
|
||||
def setup(self, *args, **kwargs):
|
||||
'''
|
||||
Called to run a new tasklet: rebind the creation so that we can trace it.
|
||||
'''
|
||||
|
||||
f = self.tempval
|
||||
def new_f(old_f, args, kwargs):
|
||||
|
||||
debugger = get_global_debugger()
|
||||
if debugger is not None:
|
||||
SetTrace(debugger.trace_dispatch)
|
||||
|
||||
debugger = None
|
||||
|
||||
# Remove our own traces :)
|
||||
self.tempval = old_f
|
||||
register_tasklet_info(self)
|
||||
|
||||
# Hover old_f to see the stackless being created and *args and **kwargs to see its parameters.
|
||||
return old_f(*args, **kwargs)
|
||||
|
||||
# This is the way to tell stackless that the function it should execute is our function, not the original one. Note:
|
||||
# setting tempval is the same as calling bind(new_f), but it seems that there's no other way to get the currently
|
||||
# bound function, so, keeping on using tempval instead of calling bind (which is actually the same thing in a better
|
||||
# API).
|
||||
|
||||
self.tempval = new_f
|
||||
|
||||
return _original_setup(self, f, args, kwargs)
|
||||
|
||||
#=======================================================================================================================
|
||||
# __call__
|
||||
#=======================================================================================================================
|
||||
def __call__(self, *args, **kwargs):
|
||||
'''
|
||||
Called to run a new tasklet: rebind the creation so that we can trace it.
|
||||
'''
|
||||
|
||||
return setup(self, *args, **kwargs)
|
||||
|
||||
|
||||
_original_run = stackless.run
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# run
|
||||
#=======================================================================================================================
|
||||
def run(*args, **kwargs):
|
||||
debugger = get_global_debugger()
|
||||
if debugger is not None:
|
||||
SetTrace(debugger.trace_dispatch)
|
||||
debugger = None
|
||||
|
||||
return _original_run(*args, **kwargs)
|
||||
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# patch_stackless
|
||||
#=======================================================================================================================
|
||||
def patch_stackless():
|
||||
'''
|
||||
This function should be called to patch the stackless module so that new tasklets are properly tracked in the
|
||||
debugger.
|
||||
'''
|
||||
global _application_set_schedule_callback
|
||||
_application_set_schedule_callback = stackless.set_schedule_callback(_schedule_callback)
|
||||
|
||||
def set_schedule_callback(callable):
|
||||
global _application_set_schedule_callback
|
||||
old = _application_set_schedule_callback
|
||||
_application_set_schedule_callback = callable
|
||||
return old
|
||||
|
||||
def get_schedule_callback():
|
||||
global _application_set_schedule_callback
|
||||
return _application_set_schedule_callback
|
||||
|
||||
set_schedule_callback.__doc__ = stackless.set_schedule_callback.__doc__
|
||||
if hasattr(stackless, "get_schedule_callback"):
|
||||
get_schedule_callback.__doc__ = stackless.get_schedule_callback.__doc__
|
||||
stackless.set_schedule_callback = set_schedule_callback
|
||||
stackless.get_schedule_callback = get_schedule_callback
|
||||
|
||||
if not hasattr(stackless.tasklet, "trace_function"):
|
||||
# Older versions of Stackless, released before 2014
|
||||
__call__.__doc__ = stackless.tasklet.__call__.__doc__
|
||||
stackless.tasklet.__call__ = __call__
|
||||
|
||||
setup.__doc__ = stackless.tasklet.setup.__doc__
|
||||
stackless.tasklet.setup = setup
|
||||
|
||||
run.__doc__ = stackless.run.__doc__
|
||||
stackless.run = run
|
||||
|
||||
patch_stackless = call_only_once(patch_stackless)
|
||||
41
ptvsd/pydevd/_pydevd_bundle/pydevd_trace_api.py
Normal file
41
ptvsd/pydevd/_pydevd_bundle/pydevd_trace_api.py
Normal file
|
|
@ -0,0 +1,41 @@
|
|||
def add_line_breakpoint(plugin, pydb, type, file, line, condition, expression, func_name):
|
||||
return None
|
||||
|
||||
def add_exception_breakpoint(plugin, pydb, type, exception):
|
||||
return False
|
||||
|
||||
def remove_exception_breakpoint(plugin, pydb, type, exception):
|
||||
return False
|
||||
|
||||
def get_breakpoints(plugin, pydb):
|
||||
return None
|
||||
|
||||
def can_not_skip(plugin, pydb, pydb_frame, frame):
|
||||
return False
|
||||
|
||||
def has_exception_breaks(plugin):
|
||||
return False
|
||||
|
||||
def has_line_breaks(plugin):
|
||||
return False
|
||||
|
||||
def cmd_step_into(plugin, pydb, frame, event, args, stop_info, stop):
|
||||
return False
|
||||
|
||||
def cmd_step_over(plugin, pydb, frame, event, args, stop_info, stop):
|
||||
return False
|
||||
|
||||
def stop(plugin, pydb, frame, event, args, stop_info, arg, step_cmd):
|
||||
return False
|
||||
|
||||
def get_breakpoint(plugin, pydb, pydb_frame, frame, event, args):
|
||||
return None
|
||||
|
||||
def suspend(plugin, pydb, thread, frame):
|
||||
return None
|
||||
|
||||
def exception_break(plugin, pydb, pydb_frame, frame, args, arg):
|
||||
return None
|
||||
|
||||
def change_variable(plugin, frame, attr, expression):
|
||||
return False
|
||||
74
ptvsd/pydevd/_pydevd_bundle/pydevd_trace_dispatch.py
Normal file
74
ptvsd/pydevd/_pydevd_bundle/pydevd_trace_dispatch.py
Normal file
|
|
@ -0,0 +1,74 @@
|
|||
# Defines which version of the trace_dispatch we'll use.
|
||||
# Should give warning only here if cython is not available but supported.
|
||||
|
||||
import os
|
||||
import sys
|
||||
from _pydevd_bundle.pydevd_constants import CYTHON_SUPPORTED
|
||||
|
||||
|
||||
use_cython = os.getenv('PYDEVD_USE_CYTHON', None)
|
||||
dirname = os.path.dirname(os.path.dirname(__file__))
|
||||
# Do not show incorrect warning for .egg files for Remote debugger
|
||||
if not CYTHON_SUPPORTED or dirname.endswith('.egg'):
|
||||
# Do not try to import cython extensions if cython isn't supported
|
||||
use_cython = 'NO'
|
||||
|
||||
|
||||
def delete_old_compiled_extensions():
|
||||
pydev_dir = os.path.dirname(os.path.dirname(__file__))
|
||||
_pydevd_bundle_dir = os.path.dirname(__file__)
|
||||
_pydevd_frame_eval_dir = os.path.join(pydev_dir, '_pydevd_frame_eval')
|
||||
try:
|
||||
import shutil
|
||||
for file in os.listdir(_pydevd_bundle_dir):
|
||||
if file.startswith("pydevd") and file.endswith(".so"):
|
||||
os.remove(os.path.join(_pydevd_bundle_dir, file))
|
||||
for file in os.listdir(_pydevd_frame_eval_dir):
|
||||
if file.startswith("pydevd") and file.endswith(".so"):
|
||||
os.remove(os.path.join(_pydevd_frame_eval_dir, file))
|
||||
build_dir = os.path.join(pydev_dir, "build")
|
||||
if os.path.exists(build_dir):
|
||||
shutil.rmtree(os.path.join(pydev_dir, "build"))
|
||||
except OSError:
|
||||
from _pydev_bundle.pydev_monkey import log_error_once
|
||||
log_error_once("warning: failed to delete old cython speedups. Please delete all *.so files from the directories "
|
||||
"\"%s\" and \"%s\"" % (_pydevd_bundle_dir, _pydevd_frame_eval_dir))
|
||||
|
||||
|
||||
if use_cython == 'YES':
|
||||
# We must import the cython version if forcing cython
|
||||
from _pydevd_bundle.pydevd_cython_wrapper import trace_dispatch as _trace_dispatch, global_cache_skips, global_cache_frame_skips
|
||||
def trace_dispatch(py_db, frame, event, arg):
|
||||
return _trace_dispatch(py_db, frame, event, arg)
|
||||
|
||||
elif use_cython == 'NO':
|
||||
# Use the regular version if not forcing cython
|
||||
from _pydevd_bundle.pydevd_trace_dispatch_regular import trace_dispatch, global_cache_skips, global_cache_frame_skips # @UnusedImport
|
||||
|
||||
elif use_cython is None:
|
||||
# Regular: use fallback if not found and give message to user
|
||||
try:
|
||||
from _pydevd_bundle.pydevd_cython_wrapper import trace_dispatch as _trace_dispatch, global_cache_skips, global_cache_frame_skips
|
||||
def trace_dispatch(py_db, frame, event, arg):
|
||||
return _trace_dispatch(py_db, frame, event, arg)
|
||||
|
||||
# This version number is always available
|
||||
from _pydevd_bundle.pydevd_additional_thread_info_regular import version as regular_version
|
||||
# This version number from the already compiled cython extension
|
||||
from _pydevd_bundle.pydevd_cython_wrapper import version as cython_version
|
||||
if cython_version != regular_version:
|
||||
delete_old_compiled_extensions()
|
||||
raise ImportError()
|
||||
|
||||
except ImportError:
|
||||
from _pydevd_bundle.pydevd_additional_thread_info_regular import PyDBAdditionalThreadInfo # @UnusedImport
|
||||
from _pydevd_bundle.pydevd_trace_dispatch_regular import trace_dispatch, global_cache_skips, global_cache_frame_skips # @UnusedImport
|
||||
from _pydev_bundle.pydev_monkey import log_error_once
|
||||
|
||||
log_error_once("warning: Debugger speedups using cython not found. Run '\"%s\" \"%s\" build_ext --inplace' to build." % (
|
||||
sys.executable, os.path.join(dirname, 'setup_cython.py')))
|
||||
|
||||
else:
|
||||
raise RuntimeError('Unexpected value for PYDEVD_USE_CYTHON: %s (accepted: YES, NO)' % (use_cython,))
|
||||
|
||||
|
||||
229
ptvsd/pydevd/_pydevd_bundle/pydevd_trace_dispatch_regular.py
Normal file
229
ptvsd/pydevd/_pydevd_bundle/pydevd_trace_dispatch_regular.py
Normal file
|
|
@ -0,0 +1,229 @@
|
|||
import traceback
|
||||
|
||||
from _pydev_bundle.pydev_is_thread_alive import is_thread_alive
|
||||
from _pydev_imps._pydev_saved_modules import threading
|
||||
from _pydevd_bundle.pydevd_constants import get_thread_id, IS_IRONPYTHON
|
||||
from _pydevd_bundle.pydevd_dont_trace_files import DONT_TRACE
|
||||
from _pydevd_bundle.pydevd_kill_all_pydevd_threads import kill_all_pydev_threads
|
||||
from pydevd_file_utils import get_abs_path_real_path_and_base_from_frame, NORM_PATHS_AND_BASE_CONTAINER
|
||||
from pydevd_tracing import SetTrace
|
||||
# IFDEF CYTHON
|
||||
# # In Cython, PyDBAdditionalThreadInfo is bundled in the file.
|
||||
# from cpython.object cimport PyObject
|
||||
# from cpython.ref cimport Py_INCREF, Py_XDECREF
|
||||
# ELSE
|
||||
from _pydevd_bundle.pydevd_additional_thread_info import PyDBAdditionalThreadInfo
|
||||
from _pydevd_bundle.pydevd_frame import PyDBFrame
|
||||
# ENDIF
|
||||
|
||||
try:
|
||||
from _pydevd_bundle.pydevd_signature import send_signature_call_trace
|
||||
except ImportError:
|
||||
def send_signature_call_trace(*args, **kwargs):
|
||||
pass
|
||||
|
||||
threadingCurrentThread = threading.currentThread
|
||||
get_file_type = DONT_TRACE.get
|
||||
|
||||
# IFDEF CYTHON -- DONT EDIT THIS FILE (it is automatically generated)
|
||||
# cdef dict global_cache_skips
|
||||
# cdef dict global_cache_frame_skips
|
||||
# ELSE
|
||||
# ENDIF
|
||||
|
||||
|
||||
# Cache where we should keep that we completely skipped entering some context.
|
||||
# It needs to be invalidated when:
|
||||
# - Breakpoints are changed
|
||||
# It can be used when running regularly (without step over/step in/step return)
|
||||
global_cache_skips = {}
|
||||
global_cache_frame_skips = {}
|
||||
|
||||
def trace_dispatch(py_db, frame, event, arg):
|
||||
t = threadingCurrentThread()
|
||||
|
||||
if getattr(t, 'pydev_do_not_trace', None):
|
||||
return None
|
||||
|
||||
try:
|
||||
additional_info = t.additional_info
|
||||
if additional_info is None:
|
||||
raise AttributeError()
|
||||
except:
|
||||
additional_info = t.additional_info = PyDBAdditionalThreadInfo()
|
||||
|
||||
thread_tracer = ThreadTracer((py_db, t, additional_info, global_cache_skips, global_cache_frame_skips))
|
||||
# IFDEF CYTHON
|
||||
# t._tracer = thread_tracer # Hack for cython to keep it alive while the thread is alive (just the method in the SetTrace is not enough).
|
||||
# ELSE
|
||||
# ENDIF
|
||||
SetTrace(thread_tracer.__call__)
|
||||
return thread_tracer.__call__(frame, event, arg)
|
||||
|
||||
# IFDEF CYTHON
|
||||
# cdef class SafeCallWrapper:
|
||||
# cdef method_object
|
||||
# def __init__(self, method_object):
|
||||
# self.method_object = method_object
|
||||
# def __call__(self, *args):
|
||||
# #Cannot use 'self' once inside the delegate call since we are borrowing the self reference f_trace field
|
||||
# #in the frame, and that reference might get destroyed by set trace on frame and parents
|
||||
# cdef PyObject* method_obj = <PyObject*> self.method_object
|
||||
# Py_INCREF(<object>method_obj)
|
||||
# ret = (<object>method_obj)(*args)
|
||||
# Py_XDECREF (method_obj)
|
||||
# return SafeCallWrapper(ret) if ret is not None else None
|
||||
# cdef class ThreadTracer:
|
||||
# cdef public tuple _args;
|
||||
# def __init__(self, tuple args):
|
||||
# self._args = args
|
||||
# ELSE
|
||||
class ThreadTracer:
|
||||
def __init__(self, args):
|
||||
self._args = args
|
||||
# ENDIF
|
||||
|
||||
|
||||
def __call__(self, frame, event, arg):
|
||||
''' This is the callback used when we enter some context in the debugger.
|
||||
|
||||
We also decorate the thread we are in with info about the debugging.
|
||||
The attributes added are:
|
||||
pydev_state
|
||||
pydev_step_stop
|
||||
pydev_step_cmd
|
||||
pydev_notify_kill
|
||||
|
||||
:param PyDB py_db:
|
||||
This is the global debugger (this method should actually be added as a method to it).
|
||||
'''
|
||||
# IFDEF CYTHON
|
||||
# cdef str filename;
|
||||
# cdef str base;
|
||||
# cdef int pydev_step_cmd;
|
||||
# cdef tuple cache_key;
|
||||
# cdef dict cache_skips;
|
||||
# cdef bint is_stepping;
|
||||
# cdef tuple abs_path_real_path_and_base;
|
||||
# cdef PyDBAdditionalThreadInfo additional_info;
|
||||
# ENDIF
|
||||
# print('ENTER: trace_dispatch', frame.f_code.co_filename, frame.f_lineno, event, frame.f_code.co_name)
|
||||
py_db, t, additional_info, cache_skips, frame_skips_cache = self._args
|
||||
pydev_step_cmd = additional_info.pydev_step_cmd
|
||||
is_stepping = pydev_step_cmd != -1
|
||||
|
||||
try:
|
||||
if py_db._finish_debugging_session:
|
||||
if not py_db._termination_event_set:
|
||||
#that was not working very well because jython gave some socket errors
|
||||
try:
|
||||
if py_db.output_checker is None:
|
||||
kill_all_pydev_threads()
|
||||
except:
|
||||
traceback.print_exc()
|
||||
py_db._termination_event_set = True
|
||||
return None
|
||||
|
||||
# if thread is not alive, cancel trace_dispatch processing
|
||||
if not is_thread_alive(t):
|
||||
py_db._process_thread_not_alive(get_thread_id(t))
|
||||
return None # suspend tracing
|
||||
|
||||
try:
|
||||
# Make fast path faster!
|
||||
abs_path_real_path_and_base = NORM_PATHS_AND_BASE_CONTAINER[frame.f_code.co_filename]
|
||||
except:
|
||||
abs_path_real_path_and_base = get_abs_path_real_path_and_base_from_frame(frame)
|
||||
|
||||
if py_db.thread_analyser is not None:
|
||||
py_db.thread_analyser.log_event(frame)
|
||||
|
||||
if py_db.asyncio_analyser is not None:
|
||||
py_db.asyncio_analyser.log_event(frame)
|
||||
|
||||
filename = abs_path_real_path_and_base[1]
|
||||
# Note: it's important that the context name is also given because we may hit something once
|
||||
# in the global context and another in the local context.
|
||||
cache_key = (frame.f_lineno, frame.f_code.co_name, filename)
|
||||
if not is_stepping and cache_key in cache_skips:
|
||||
# print('skipped: trace_dispatch (cache hit)', cache_key, frame.f_lineno, event, frame.f_code.co_name)
|
||||
return None
|
||||
|
||||
file_type = get_file_type(abs_path_real_path_and_base[-1]) #we don't want to debug threading or anything related to pydevd
|
||||
|
||||
if file_type is not None:
|
||||
if file_type == 1: # inlining LIB_FILE = 1
|
||||
if py_db.not_in_scope(filename):
|
||||
# print('skipped: trace_dispatch (not in scope)', abs_path_real_path_and_base[-1], frame.f_lineno, event, frame.f_code.co_name, file_type)
|
||||
cache_skips[cache_key] = 1
|
||||
return None
|
||||
else:
|
||||
# print('skipped: trace_dispatch', abs_path_real_path_and_base[-1], frame.f_lineno, event, frame.f_code.co_name, file_type)
|
||||
cache_skips[cache_key] = 1
|
||||
return None
|
||||
|
||||
if is_stepping:
|
||||
if py_db.is_filter_enabled and py_db.is_ignored_by_filters(filename):
|
||||
# ignore files matching stepping filters
|
||||
return None
|
||||
if py_db.is_filter_libraries and py_db.not_in_scope(filename):
|
||||
# ignore library files while stepping
|
||||
return None
|
||||
|
||||
# print('trace_dispatch', base, frame.f_lineno, event, frame.f_code.co_name, file_type)
|
||||
if additional_info.is_tracing:
|
||||
return None #we don't wan't to trace code invoked from pydevd_frame.trace_dispatch
|
||||
|
||||
if event == 'call' and py_db.signature_factory:
|
||||
# We can only have a call when entering a context, so, check at this level, not at the PyDBFrame.
|
||||
send_signature_call_trace(py_db, frame, filename)
|
||||
|
||||
# Just create PyDBFrame directly (removed support for Python versions < 2.5, which required keeping a weak
|
||||
# reference to the frame).
|
||||
ret = PyDBFrame((py_db, filename, additional_info, t, frame_skips_cache, (frame.f_code.co_name, frame.f_code.co_firstlineno, filename))).trace_dispatch(frame, event, arg)
|
||||
if ret is None:
|
||||
cache_skips[cache_key] = 1
|
||||
return None
|
||||
|
||||
# IFDEF CYTHON
|
||||
# return SafeCallWrapper(ret)
|
||||
# ELSE
|
||||
return ret
|
||||
# ENDIF
|
||||
|
||||
except SystemExit:
|
||||
return None
|
||||
|
||||
except Exception:
|
||||
if py_db._finish_debugging_session:
|
||||
return None # Don't log errors when we're shutting down.
|
||||
# Log it
|
||||
try:
|
||||
if traceback is not None:
|
||||
# This can actually happen during the interpreter shutdown in Python 2.7
|
||||
traceback.print_exc()
|
||||
except:
|
||||
# Error logging? We're really in the interpreter shutdown...
|
||||
# (https://github.com/fabioz/PyDev.Debugger/issues/8)
|
||||
pass
|
||||
return None
|
||||
|
||||
|
||||
if IS_IRONPYTHON:
|
||||
# This is far from ideal, as we'll leak frames (we'll always have the last created frame, not really
|
||||
# the last topmost frame saved -- this should be Ok for our usage, but it may leak frames and things
|
||||
# may live longer... as IronPython is garbage-collected, things should live longer anyways, so, it
|
||||
# shouldn't be an issue as big as it's in CPython -- it may still be annoying, but this should
|
||||
# be a reasonable workaround until IronPython itself is able to provide that functionality).
|
||||
#
|
||||
# See: https://github.com/IronLanguages/main/issues/1630
|
||||
from _pydevd_bundle.pydevd_additional_thread_info_regular import _tid_to_last_frame
|
||||
|
||||
_original_call = ThreadTracer.__call__
|
||||
|
||||
def __call__(self, frame, event, arg):
|
||||
_tid_to_last_frame[self._args[1].ident] = frame
|
||||
return _original_call(self, frame, event, arg)
|
||||
|
||||
ThreadTracer.__call__ = __call__
|
||||
|
||||
108
ptvsd/pydevd/_pydevd_bundle/pydevd_traceproperty.py
Normal file
108
ptvsd/pydevd/_pydevd_bundle/pydevd_traceproperty.py
Normal file
|
|
@ -0,0 +1,108 @@
|
|||
'''For debug purpose we are replacing actual builtin property by the debug property
|
||||
'''
|
||||
from _pydevd_bundle.pydevd_comm import get_global_debugger
|
||||
from _pydevd_bundle.pydevd_constants import DebugInfoHolder, IS_PY2
|
||||
import pydevd_tracing
|
||||
|
||||
#=======================================================================================================================
|
||||
# replace_builtin_property
|
||||
#=======================================================================================================================
|
||||
def replace_builtin_property(new_property=None):
|
||||
if new_property is None:
|
||||
new_property = DebugProperty
|
||||
original = property
|
||||
if IS_PY2:
|
||||
try:
|
||||
import __builtin__
|
||||
__builtin__.__dict__['property'] = new_property
|
||||
except:
|
||||
if DebugInfoHolder.DEBUG_TRACE_LEVEL:
|
||||
import traceback;traceback.print_exc() #@Reimport
|
||||
else:
|
||||
try:
|
||||
import builtins #Python 3.0 does not have the __builtin__ module @UnresolvedImport
|
||||
builtins.__dict__['property'] = new_property
|
||||
except:
|
||||
if DebugInfoHolder.DEBUG_TRACE_LEVEL:
|
||||
import traceback;traceback.print_exc() #@Reimport
|
||||
return original
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# DebugProperty
|
||||
#=======================================================================================================================
|
||||
class DebugProperty(object):
|
||||
"""A custom property which allows python property to get
|
||||
controlled by the debugger and selectively disable/re-enable
|
||||
the tracing.
|
||||
"""
|
||||
|
||||
|
||||
def __init__(self, fget=None, fset=None, fdel=None, doc=None):
|
||||
self.fget = fget
|
||||
self.fset = fset
|
||||
self.fdel = fdel
|
||||
self.__doc__ = doc
|
||||
|
||||
|
||||
def __get__(self, obj, objtype=None):
|
||||
if obj is None:
|
||||
return self
|
||||
global_debugger = get_global_debugger()
|
||||
try:
|
||||
if global_debugger is not None and global_debugger.disable_property_getter_trace:
|
||||
pydevd_tracing.SetTrace(None)
|
||||
if self.fget is None:
|
||||
raise AttributeError("unreadable attribute")
|
||||
return self.fget(obj)
|
||||
finally:
|
||||
if global_debugger is not None:
|
||||
pydevd_tracing.SetTrace(global_debugger.trace_dispatch)
|
||||
|
||||
|
||||
def __set__(self, obj, value):
|
||||
global_debugger = get_global_debugger()
|
||||
try:
|
||||
if global_debugger is not None and global_debugger.disable_property_setter_trace:
|
||||
pydevd_tracing.SetTrace(None)
|
||||
if self.fset is None:
|
||||
raise AttributeError("can't set attribute")
|
||||
self.fset(obj, value)
|
||||
finally:
|
||||
if global_debugger is not None:
|
||||
pydevd_tracing.SetTrace(global_debugger.trace_dispatch)
|
||||
|
||||
|
||||
def __delete__(self, obj):
|
||||
global_debugger = get_global_debugger()
|
||||
try:
|
||||
if global_debugger is not None and global_debugger.disable_property_deleter_trace:
|
||||
pydevd_tracing.SetTrace(None)
|
||||
if self.fdel is None:
|
||||
raise AttributeError("can't delete attribute")
|
||||
self.fdel(obj)
|
||||
finally:
|
||||
if global_debugger is not None:
|
||||
pydevd_tracing.SetTrace(global_debugger.trace_dispatch)
|
||||
|
||||
|
||||
def getter(self, fget):
|
||||
"""Overriding getter decorator for the property
|
||||
"""
|
||||
self.fget = fget
|
||||
return self
|
||||
|
||||
|
||||
def setter(self, fset):
|
||||
"""Overriding setter decorator for the property
|
||||
"""
|
||||
self.fset = fset
|
||||
return self
|
||||
|
||||
|
||||
def deleter(self, fdel):
|
||||
"""Overriding deleter decorator for the property
|
||||
"""
|
||||
self.fdel = fdel
|
||||
return self
|
||||
|
||||
205
ptvsd/pydevd/_pydevd_bundle/pydevd_utils.py
Normal file
205
ptvsd/pydevd/_pydevd_bundle/pydevd_utils.py
Normal file
|
|
@ -0,0 +1,205 @@
|
|||
from __future__ import nested_scopes
|
||||
import traceback
|
||||
import os
|
||||
|
||||
try:
|
||||
from urllib import quote
|
||||
except:
|
||||
from urllib.parse import quote # @UnresolvedImport
|
||||
|
||||
import inspect
|
||||
from _pydevd_bundle.pydevd_constants import IS_PY3K
|
||||
import sys
|
||||
from _pydev_bundle import pydev_log
|
||||
|
||||
def save_main_module(file, module_name):
|
||||
# patch provided by: Scott Schlesier - when script is run, it does not
|
||||
# use globals from pydevd:
|
||||
# This will prevent the pydevd script from contaminating the namespace for the script to be debugged
|
||||
# pretend pydevd is not the main module, and
|
||||
# convince the file to be debugged that it was loaded as main
|
||||
sys.modules[module_name] = sys.modules['__main__']
|
||||
sys.modules[module_name].__name__ = module_name
|
||||
from imp import new_module
|
||||
|
||||
m = new_module('__main__')
|
||||
sys.modules['__main__'] = m
|
||||
if hasattr(sys.modules[module_name], '__loader__'):
|
||||
m.__loader__ = getattr(sys.modules[module_name], '__loader__')
|
||||
m.__file__ = file
|
||||
|
||||
return m
|
||||
|
||||
|
||||
def to_number(x):
|
||||
if is_string(x):
|
||||
try:
|
||||
n = float(x)
|
||||
return n
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
l = x.find('(')
|
||||
if l != -1:
|
||||
y = x[0:l-1]
|
||||
#print y
|
||||
try:
|
||||
n = float(y)
|
||||
return n
|
||||
except ValueError:
|
||||
pass
|
||||
return None
|
||||
|
||||
def compare_object_attrs_key(x):
|
||||
if '__len__' == x:
|
||||
as_number = to_number(x)
|
||||
if as_number is None:
|
||||
as_number = 99999999
|
||||
# __len__ should appear after other attributes in a list.
|
||||
return (1, as_number)
|
||||
else:
|
||||
return (-1, to_string(x))
|
||||
|
||||
if IS_PY3K:
|
||||
def is_string(x):
|
||||
return isinstance(x, str)
|
||||
|
||||
else:
|
||||
def is_string(x):
|
||||
return isinstance(x, basestring)
|
||||
|
||||
def to_string(x):
|
||||
if is_string(x):
|
||||
return x
|
||||
else:
|
||||
return str(x)
|
||||
|
||||
def print_exc():
|
||||
if traceback:
|
||||
traceback.print_exc()
|
||||
|
||||
if IS_PY3K:
|
||||
def quote_smart(s, safe='/'):
|
||||
return quote(s, safe)
|
||||
else:
|
||||
def quote_smart(s, safe='/'):
|
||||
if isinstance(s, unicode):
|
||||
s = s.encode('utf-8')
|
||||
|
||||
return quote(s, safe)
|
||||
|
||||
|
||||
def get_clsname_for_code(code, frame):
|
||||
clsname = None
|
||||
if len(code.co_varnames) > 0:
|
||||
# We are checking the first argument of the function
|
||||
# (`self` or `cls` for methods).
|
||||
first_arg_name = code.co_varnames[0]
|
||||
if first_arg_name in frame.f_locals:
|
||||
first_arg_obj = frame.f_locals[first_arg_name]
|
||||
if inspect.isclass(first_arg_obj): # class method
|
||||
first_arg_class = first_arg_obj
|
||||
else: # instance method
|
||||
first_arg_class = first_arg_obj.__class__
|
||||
func_name = code.co_name
|
||||
if hasattr(first_arg_class, func_name):
|
||||
method = getattr(first_arg_class, func_name)
|
||||
func_code = None
|
||||
if hasattr(method, 'func_code'): # Python2
|
||||
func_code = method.func_code
|
||||
elif hasattr(method, '__code__'): # Python3
|
||||
func_code = method.__code__
|
||||
if func_code and func_code == code:
|
||||
clsname = first_arg_class.__name__
|
||||
|
||||
return clsname
|
||||
|
||||
|
||||
def _get_project_roots(project_roots_cache=[]):
|
||||
# Note: the project_roots_cache is the same instance among the many calls to the method
|
||||
if not project_roots_cache:
|
||||
roots = os.getenv('IDE_PROJECT_ROOTS', '').split(os.pathsep)
|
||||
pydev_log.debug("IDE_PROJECT_ROOTS %s\n" % roots)
|
||||
new_roots = []
|
||||
for root in roots:
|
||||
new_roots.append(os.path.normcase(root))
|
||||
project_roots_cache.append(new_roots)
|
||||
return project_roots_cache[-1] # returns the project roots with case normalized
|
||||
|
||||
|
||||
def _get_library_roots(library_roots_cache=[]):
|
||||
# Note: the project_roots_cache is the same instance among the many calls to the method
|
||||
if not library_roots_cache:
|
||||
roots = os.getenv('LIBRARY_ROOTS', '').split(os.pathsep)
|
||||
pydev_log.debug("LIBRARY_ROOTS %s\n" % roots)
|
||||
new_roots = []
|
||||
for root in roots:
|
||||
new_roots.append(os.path.normcase(root))
|
||||
library_roots_cache.append(new_roots)
|
||||
return library_roots_cache[-1] # returns the project roots with case normalized
|
||||
|
||||
|
||||
def not_in_project_roots(filename, filename_to_not_in_scope_cache={}):
|
||||
# Note: the filename_to_not_in_scope_cache is the same instance among the many calls to the method
|
||||
try:
|
||||
return filename_to_not_in_scope_cache[filename]
|
||||
except:
|
||||
project_roots = _get_project_roots()
|
||||
original_filename = filename
|
||||
if not os.path.isabs(filename) and not filename.startswith('<'):
|
||||
filename = os.path.abspath(filename)
|
||||
filename = os.path.normcase(filename)
|
||||
for root in project_roots:
|
||||
if len(root) > 0 and filename.startswith(root):
|
||||
filename_to_not_in_scope_cache[original_filename] = False
|
||||
break
|
||||
else: # for else (only called if the break wasn't reached).
|
||||
filename_to_not_in_scope_cache[original_filename] = True
|
||||
|
||||
if not filename_to_not_in_scope_cache[original_filename]:
|
||||
# additional check if interpreter is situated in a project directory
|
||||
library_roots = _get_library_roots()
|
||||
for root in library_roots:
|
||||
if root != '' and filename.startswith(root):
|
||||
filename_to_not_in_scope_cache[original_filename] = True
|
||||
|
||||
# at this point it must be loaded.
|
||||
return filename_to_not_in_scope_cache[original_filename]
|
||||
|
||||
|
||||
def is_filter_enabled():
|
||||
return os.getenv('PYDEVD_FILTERS') is not None
|
||||
|
||||
|
||||
def is_filter_libraries():
|
||||
is_filter = os.getenv('PYDEVD_FILTER_LIBRARIES') is not None
|
||||
pydev_log.debug("PYDEVD_FILTER_LIBRARIES %s\n" % is_filter)
|
||||
return is_filter
|
||||
|
||||
|
||||
def _get_stepping_filters(filters_cache=[]):
|
||||
if not filters_cache:
|
||||
filters = os.getenv('PYDEVD_FILTERS', '').split(';')
|
||||
pydev_log.debug("PYDEVD_FILTERS %s\n" % filters)
|
||||
new_filters = []
|
||||
for new_filter in filters:
|
||||
new_filters.append(new_filter)
|
||||
filters_cache.append(new_filters)
|
||||
return filters_cache[-1]
|
||||
|
||||
|
||||
def is_ignored_by_filter(filename, filename_to_ignored_by_filters_cache={}):
|
||||
try:
|
||||
return filename_to_ignored_by_filters_cache[filename]
|
||||
except:
|
||||
import fnmatch
|
||||
for stepping_filter in _get_stepping_filters():
|
||||
if fnmatch.fnmatch(filename, stepping_filter):
|
||||
pydev_log.debug("File %s ignored by filter %s" % (filename, stepping_filter))
|
||||
filename_to_ignored_by_filters_cache[filename] = True
|
||||
break
|
||||
else:
|
||||
filename_to_ignored_by_filters_cache[filename] = False
|
||||
|
||||
return filename_to_ignored_by_filters_cache[filename]
|
||||
|
||||
608
ptvsd/pydevd/_pydevd_bundle/pydevd_vars.py
Normal file
608
ptvsd/pydevd/_pydevd_bundle/pydevd_vars.py
Normal file
|
|
@ -0,0 +1,608 @@
|
|||
""" pydevd_vars deals with variables:
|
||||
resolution/conversion to XML.
|
||||
"""
|
||||
import pickle
|
||||
from _pydevd_bundle.pydevd_constants import get_frame, get_thread_id, xrange
|
||||
|
||||
from _pydevd_bundle.pydevd_custom_frames import get_custom_frame
|
||||
from _pydevd_bundle.pydevd_xml import ExceptionOnEvaluate, get_type, var_to_xml
|
||||
from _pydev_imps._pydev_saved_modules import thread
|
||||
|
||||
try:
|
||||
from StringIO import StringIO
|
||||
except ImportError:
|
||||
from io import StringIO
|
||||
import sys # @Reimport
|
||||
|
||||
from _pydev_imps._pydev_saved_modules import threading
|
||||
import traceback
|
||||
from _pydevd_bundle import pydevd_save_locals
|
||||
from _pydev_bundle.pydev_imports import Exec, quote, execfile
|
||||
from _pydevd_bundle.pydevd_utils import to_string
|
||||
|
||||
SENTINEL_VALUE = []
|
||||
|
||||
# ------------------------------------------------------------------------------------------------------ class for errors
|
||||
|
||||
class VariableError(RuntimeError): pass
|
||||
|
||||
|
||||
class FrameNotFoundError(RuntimeError): pass
|
||||
|
||||
|
||||
def _iter_frames(initialFrame):
|
||||
'''NO-YIELD VERSION: Iterates through all the frames starting at the specified frame (which will be the first returned item)'''
|
||||
# cannot use yield
|
||||
frames = []
|
||||
|
||||
while initialFrame is not None:
|
||||
frames.append(initialFrame)
|
||||
initialFrame = initialFrame.f_back
|
||||
|
||||
return frames
|
||||
|
||||
|
||||
def dump_frames(thread_id):
|
||||
sys.stdout.write('dumping frames\n')
|
||||
if thread_id != get_thread_id(threading.currentThread()):
|
||||
raise VariableError("find_frame: must execute on same thread")
|
||||
|
||||
curFrame = get_frame()
|
||||
for frame in _iter_frames(curFrame):
|
||||
sys.stdout.write('%s\n' % pickle.dumps(frame))
|
||||
|
||||
|
||||
# ===============================================================================
|
||||
# AdditionalFramesContainer
|
||||
# ===============================================================================
|
||||
class AdditionalFramesContainer:
|
||||
lock = thread.allocate_lock()
|
||||
additional_frames = {} # dict of dicts
|
||||
|
||||
|
||||
def add_additional_frame_by_id(thread_id, frames_by_id):
|
||||
AdditionalFramesContainer.additional_frames[thread_id] = frames_by_id
|
||||
|
||||
|
||||
addAdditionalFrameById = add_additional_frame_by_id # Backward compatibility
|
||||
|
||||
|
||||
def remove_additional_frame_by_id(thread_id):
|
||||
del AdditionalFramesContainer.additional_frames[thread_id]
|
||||
|
||||
|
||||
removeAdditionalFrameById = remove_additional_frame_by_id # Backward compatibility
|
||||
|
||||
|
||||
def has_additional_frames_by_id(thread_id):
|
||||
return thread_id in AdditionalFramesContainer.additional_frames
|
||||
|
||||
|
||||
def get_additional_frames_by_id(thread_id):
|
||||
return AdditionalFramesContainer.additional_frames.get(thread_id)
|
||||
|
||||
|
||||
def find_frame(thread_id, frame_id):
|
||||
""" returns a frame on the thread that has a given frame_id """
|
||||
try:
|
||||
curr_thread_id = get_thread_id(threading.currentThread())
|
||||
if thread_id != curr_thread_id:
|
||||
try:
|
||||
return get_custom_frame(thread_id, frame_id) # I.e.: thread_id could be a stackless frame id + thread_id.
|
||||
except:
|
||||
pass
|
||||
|
||||
raise VariableError("find_frame: must execute on same thread (%s != %s)" % (thread_id, curr_thread_id))
|
||||
|
||||
lookingFor = int(frame_id)
|
||||
|
||||
if AdditionalFramesContainer.additional_frames:
|
||||
if thread_id in AdditionalFramesContainer.additional_frames:
|
||||
frame = AdditionalFramesContainer.additional_frames[thread_id].get(lookingFor)
|
||||
|
||||
if frame is not None:
|
||||
return frame
|
||||
|
||||
curFrame = get_frame()
|
||||
if frame_id == "*":
|
||||
return curFrame # any frame is specified with "*"
|
||||
|
||||
frameFound = None
|
||||
|
||||
for frame in _iter_frames(curFrame):
|
||||
if lookingFor == id(frame):
|
||||
frameFound = frame
|
||||
del frame
|
||||
break
|
||||
|
||||
del frame
|
||||
|
||||
# Important: python can hold a reference to the frame from the current context
|
||||
# if an exception is raised, so, if we don't explicitly add those deletes
|
||||
# we might have those variables living much more than we'd want to.
|
||||
|
||||
# I.e.: sys.exc_info holding reference to frame that raises exception (so, other places
|
||||
# need to call sys.exc_clear())
|
||||
del curFrame
|
||||
|
||||
if frameFound is None:
|
||||
msgFrames = ''
|
||||
i = 0
|
||||
|
||||
for frame in _iter_frames(get_frame()):
|
||||
i += 1
|
||||
msgFrames += str(id(frame))
|
||||
if i % 5 == 0:
|
||||
msgFrames += '\n'
|
||||
else:
|
||||
msgFrames += ' - '
|
||||
|
||||
errMsg = '''find_frame: frame not found.
|
||||
Looking for thread_id:%s, frame_id:%s
|
||||
Current thread_id:%s, available frames:
|
||||
%s\n
|
||||
''' % (thread_id, lookingFor, curr_thread_id, msgFrames)
|
||||
|
||||
sys.stderr.write(errMsg)
|
||||
return None
|
||||
|
||||
return frameFound
|
||||
except:
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
return None
|
||||
|
||||
|
||||
def getVariable(thread_id, frame_id, scope, attrs):
|
||||
"""
|
||||
returns the value of a variable
|
||||
|
||||
:scope: can be BY_ID, EXPRESSION, GLOBAL, LOCAL, FRAME
|
||||
|
||||
BY_ID means we'll traverse the list of all objects alive to get the object.
|
||||
|
||||
:attrs: after reaching the proper scope, we have to get the attributes until we find
|
||||
the proper location (i.e.: obj\tattr1\tattr2)
|
||||
|
||||
:note: when BY_ID is used, the frame_id is considered the id of the object to find and
|
||||
not the frame (as we don't care about the frame in this case).
|
||||
"""
|
||||
if scope == 'BY_ID':
|
||||
if thread_id != get_thread_id(threading.currentThread()):
|
||||
raise VariableError("getVariable: must execute on same thread")
|
||||
|
||||
try:
|
||||
import gc
|
||||
objects = gc.get_objects()
|
||||
except:
|
||||
pass # Not all python variants have it.
|
||||
else:
|
||||
frame_id = int(frame_id)
|
||||
for var in objects:
|
||||
if id(var) == frame_id:
|
||||
if attrs is not None:
|
||||
attrList = attrs.split('\t')
|
||||
for k in attrList:
|
||||
_type, _typeName, resolver = get_type(var)
|
||||
var = resolver.resolve(var, k)
|
||||
|
||||
return var
|
||||
|
||||
# If it didn't return previously, we coudn't find it by id (i.e.: alrceady garbage collected).
|
||||
sys.stderr.write('Unable to find object with id: %s\n' % (frame_id,))
|
||||
return None
|
||||
|
||||
frame = find_frame(thread_id, frame_id)
|
||||
if frame is None:
|
||||
return {}
|
||||
|
||||
if attrs is not None:
|
||||
attrList = attrs.split('\t')
|
||||
else:
|
||||
attrList = []
|
||||
|
||||
for attr in attrList:
|
||||
attr.replace("@_@TAB_CHAR@_@", '\t')
|
||||
|
||||
if scope == 'EXPRESSION':
|
||||
for count in xrange(len(attrList)):
|
||||
if count == 0:
|
||||
# An Expression can be in any scope (globals/locals), therefore it needs to evaluated as an expression
|
||||
var = evaluate_expression(thread_id, frame_id, attrList[count], False)
|
||||
else:
|
||||
_type, _typeName, resolver = get_type(var)
|
||||
var = resolver.resolve(var, attrList[count])
|
||||
else:
|
||||
if scope == "GLOBAL":
|
||||
var = frame.f_globals
|
||||
del attrList[0] # globals are special, and they get a single dummy unused attribute
|
||||
else:
|
||||
# in a frame access both locals and globals as Python does
|
||||
var = {}
|
||||
var.update(frame.f_globals)
|
||||
var.update(frame.f_locals)
|
||||
|
||||
for k in attrList:
|
||||
_type, _typeName, resolver = get_type(var)
|
||||
var = resolver.resolve(var, k)
|
||||
|
||||
return var
|
||||
|
||||
|
||||
def resolve_compound_variable(thread_id, frame_id, scope, attrs):
|
||||
""" returns the value of the compound variable as a dictionary"""
|
||||
|
||||
var = getVariable(thread_id, frame_id, scope, attrs)
|
||||
|
||||
try:
|
||||
_type, _typeName, resolver = get_type(var)
|
||||
return resolver.get_dictionary(var)
|
||||
except:
|
||||
sys.stderr.write('Error evaluating: thread_id: %s\nframe_id: %s\nscope: %s\nattrs: %s\n' % (
|
||||
thread_id, frame_id, scope, attrs,))
|
||||
traceback.print_exc()
|
||||
|
||||
|
||||
def resolve_var(var, attrs):
|
||||
attrList = attrs.split('\t')
|
||||
|
||||
for k in attrList:
|
||||
type, _typeName, resolver = get_type(var)
|
||||
|
||||
var = resolver.resolve(var, k)
|
||||
|
||||
try:
|
||||
type, _typeName, resolver = get_type(var)
|
||||
return resolver.get_dictionary(var)
|
||||
except:
|
||||
traceback.print_exc()
|
||||
|
||||
|
||||
def custom_operation(thread_id, frame_id, scope, attrs, style, code_or_file, operation_fn_name):
|
||||
"""
|
||||
We'll execute the code_or_file and then search in the namespace the operation_fn_name to execute with the given var.
|
||||
|
||||
code_or_file: either some code (i.e.: from pprint import pprint) or a file to be executed.
|
||||
operation_fn_name: the name of the operation to execute after the exec (i.e.: pprint)
|
||||
"""
|
||||
expressionValue = getVariable(thread_id, frame_id, scope, attrs)
|
||||
|
||||
try:
|
||||
namespace = {'__name__': '<custom_operation>'}
|
||||
if style == "EXECFILE":
|
||||
namespace['__file__'] = code_or_file
|
||||
execfile(code_or_file, namespace, namespace)
|
||||
else: # style == EXEC
|
||||
namespace['__file__'] = '<customOperationCode>'
|
||||
Exec(code_or_file, namespace, namespace)
|
||||
|
||||
return str(namespace[operation_fn_name](expressionValue))
|
||||
except:
|
||||
traceback.print_exc()
|
||||
|
||||
|
||||
def eval_in_context(expression, globals, locals):
|
||||
result = None
|
||||
try:
|
||||
result = eval(expression, globals, locals)
|
||||
except Exception:
|
||||
s = StringIO()
|
||||
traceback.print_exc(file=s)
|
||||
result = s.getvalue()
|
||||
|
||||
try:
|
||||
try:
|
||||
etype, value, tb = sys.exc_info()
|
||||
result = value
|
||||
finally:
|
||||
etype = value = tb = None
|
||||
except:
|
||||
pass
|
||||
|
||||
result = ExceptionOnEvaluate(result)
|
||||
|
||||
# Ok, we have the initial error message, but let's see if we're dealing with a name mangling error...
|
||||
try:
|
||||
if '__' in expression:
|
||||
# Try to handle '__' name mangling...
|
||||
split = expression.split('.')
|
||||
curr = locals.get(split[0])
|
||||
for entry in split[1:]:
|
||||
if entry.startswith('__') and not hasattr(curr, entry):
|
||||
entry = '_%s%s' % (curr.__class__.__name__, entry)
|
||||
curr = getattr(curr, entry)
|
||||
|
||||
result = curr
|
||||
except:
|
||||
pass
|
||||
return result
|
||||
|
||||
|
||||
def evaluate_expression(thread_id, frame_id, expression, doExec):
|
||||
'''returns the result of the evaluated expression
|
||||
@param doExec: determines if we should do an exec or an eval
|
||||
'''
|
||||
frame = find_frame(thread_id, frame_id)
|
||||
if frame is None:
|
||||
return
|
||||
|
||||
# Not using frame.f_globals because of https://sourceforge.net/tracker2/?func=detail&aid=2541355&group_id=85796&atid=577329
|
||||
# (Names not resolved in generator expression in method)
|
||||
# See message: http://mail.python.org/pipermail/python-list/2009-January/526522.html
|
||||
updated_globals = {}
|
||||
updated_globals.update(frame.f_globals)
|
||||
updated_globals.update(frame.f_locals) # locals later because it has precedence over the actual globals
|
||||
|
||||
try:
|
||||
expression = str(expression.replace('@LINE@', '\n'))
|
||||
|
||||
if doExec:
|
||||
try:
|
||||
# try to make it an eval (if it is an eval we can print it, otherwise we'll exec it and
|
||||
# it will have whatever the user actually did)
|
||||
compiled = compile(expression, '<string>', 'eval')
|
||||
except:
|
||||
Exec(expression, updated_globals, frame.f_locals)
|
||||
pydevd_save_locals.save_locals(frame)
|
||||
else:
|
||||
result = eval(compiled, updated_globals, frame.f_locals)
|
||||
if result is not None: # Only print if it's not None (as python does)
|
||||
sys.stdout.write('%s\n' % (result,))
|
||||
return
|
||||
|
||||
else:
|
||||
return eval_in_context(expression, updated_globals, frame.f_locals)
|
||||
finally:
|
||||
# Should not be kept alive if an exception happens and this frame is kept in the stack.
|
||||
del updated_globals
|
||||
del frame
|
||||
|
||||
|
||||
def change_attr_expression(thread_id, frame_id, attr, expression, dbg, value=SENTINEL_VALUE):
|
||||
'''Changes some attribute in a given frame.
|
||||
'''
|
||||
frame = find_frame(thread_id, frame_id)
|
||||
if frame is None:
|
||||
return
|
||||
|
||||
try:
|
||||
expression = expression.replace('@LINE@', '\n')
|
||||
|
||||
if dbg.plugin and value is SENTINEL_VALUE:
|
||||
result = dbg.plugin.change_variable(frame, attr, expression)
|
||||
if result:
|
||||
return result
|
||||
|
||||
if attr[:7] == "Globals":
|
||||
attr = attr[8:]
|
||||
if attr in frame.f_globals:
|
||||
if value is SENTINEL_VALUE:
|
||||
value = eval(expression, frame.f_globals, frame.f_locals)
|
||||
frame.f_globals[attr] = value
|
||||
return frame.f_globals[attr]
|
||||
else:
|
||||
if '.' not in attr: # i.e.: if we have a '.', we're changing some attribute of a local var.
|
||||
if pydevd_save_locals.is_save_locals_available():
|
||||
if value is SENTINEL_VALUE:
|
||||
value = eval(expression, frame.f_globals, frame.f_locals)
|
||||
frame.f_locals[attr] = value
|
||||
pydevd_save_locals.save_locals(frame)
|
||||
return frame.f_locals[attr]
|
||||
|
||||
# default way (only works for changing it in the topmost frame)
|
||||
if value is SENTINEL_VALUE:
|
||||
value = eval(expression, frame.f_globals, frame.f_locals)
|
||||
result = value
|
||||
Exec('%s=%s' % (attr, expression), frame.f_globals, frame.f_locals)
|
||||
return result
|
||||
|
||||
|
||||
except Exception:
|
||||
traceback.print_exc()
|
||||
|
||||
|
||||
MAXIMUM_ARRAY_SIZE = 100
|
||||
MAX_SLICE_SIZE = 1000
|
||||
|
||||
|
||||
def table_like_struct_to_xml(array, name, roffset, coffset, rows, cols, format):
|
||||
_, type_name, _ = get_type(array)
|
||||
if type_name == 'ndarray':
|
||||
array, metaxml, r, c, f = array_to_meta_xml(array, name, format)
|
||||
xml = metaxml
|
||||
format = '%' + f
|
||||
if rows == -1 and cols == -1:
|
||||
rows = r
|
||||
cols = c
|
||||
xml += array_to_xml(array, roffset, coffset, rows, cols, format)
|
||||
elif type_name == 'DataFrame':
|
||||
xml = dataframe_to_xml(array, name, roffset, coffset, rows, cols, format)
|
||||
else:
|
||||
raise VariableError("Do not know how to convert type %s to table" % (type_name))
|
||||
|
||||
return "<xml>%s</xml>" % xml
|
||||
|
||||
|
||||
def array_to_xml(array, roffset, coffset, rows, cols, format):
|
||||
xml = ""
|
||||
rows = min(rows, MAXIMUM_ARRAY_SIZE)
|
||||
cols = min(cols, MAXIMUM_ARRAY_SIZE)
|
||||
|
||||
# there is no obvious rule for slicing (at least 5 choices)
|
||||
if len(array) == 1 and (rows > 1 or cols > 1):
|
||||
array = array[0]
|
||||
if array.size > len(array):
|
||||
array = array[roffset:, coffset:]
|
||||
rows = min(rows, len(array))
|
||||
cols = min(cols, len(array[0]))
|
||||
if len(array) == 1:
|
||||
array = array[0]
|
||||
elif array.size == len(array):
|
||||
if roffset == 0 and rows == 1:
|
||||
array = array[coffset:]
|
||||
cols = min(cols, len(array))
|
||||
elif coffset == 0 and cols == 1:
|
||||
array = array[roffset:]
|
||||
rows = min(rows, len(array))
|
||||
|
||||
xml += "<arraydata rows=\"%s\" cols=\"%s\"/>" % (rows, cols)
|
||||
for row in xrange(rows):
|
||||
xml += "<row index=\"%s\"/>" % to_string(row)
|
||||
for col in xrange(cols):
|
||||
value = array
|
||||
if rows == 1 or cols == 1:
|
||||
if rows == 1 and cols == 1:
|
||||
value = array[0]
|
||||
else:
|
||||
if rows == 1:
|
||||
dim = col
|
||||
else:
|
||||
dim = row
|
||||
value = array[dim]
|
||||
if "ndarray" in str(type(value)):
|
||||
value = value[0]
|
||||
else:
|
||||
value = array[row][col]
|
||||
value = format % value
|
||||
xml += var_to_xml(value, '')
|
||||
return xml
|
||||
|
||||
|
||||
def array_to_meta_xml(array, name, format):
|
||||
type = array.dtype.kind
|
||||
slice = name
|
||||
l = len(array.shape)
|
||||
|
||||
# initial load, compute slice
|
||||
if format == '%':
|
||||
if l > 2:
|
||||
slice += '[0]' * (l - 2)
|
||||
for r in xrange(l - 2):
|
||||
array = array[0]
|
||||
if type == 'f':
|
||||
format = '.5f'
|
||||
elif type == 'i' or type == 'u':
|
||||
format = 'd'
|
||||
else:
|
||||
format = 's'
|
||||
else:
|
||||
format = format.replace('%', '')
|
||||
|
||||
l = len(array.shape)
|
||||
reslice = ""
|
||||
if l > 2:
|
||||
raise Exception("%s has more than 2 dimensions." % slice)
|
||||
elif l == 1:
|
||||
# special case with 1D arrays arr[i, :] - row, but arr[:, i] - column with equal shape and ndim
|
||||
# http://stackoverflow.com/questions/16837946/numpy-a-2-rows-1-column-file-loadtxt-returns-1row-2-columns
|
||||
# explanation: http://stackoverflow.com/questions/15165170/how-do-i-maintain-row-column-orientation-of-vectors-in-numpy?rq=1
|
||||
# we use kind of a hack - get information about memory from C_CONTIGUOUS
|
||||
is_row = array.flags['C_CONTIGUOUS']
|
||||
|
||||
if is_row:
|
||||
rows = 1
|
||||
cols = min(len(array), MAX_SLICE_SIZE)
|
||||
if cols < len(array):
|
||||
reslice = '[0:%s]' % (cols)
|
||||
array = array[0:cols]
|
||||
else:
|
||||
cols = 1
|
||||
rows = min(len(array), MAX_SLICE_SIZE)
|
||||
if rows < len(array):
|
||||
reslice = '[0:%s]' % (rows)
|
||||
array = array[0:rows]
|
||||
elif l == 2:
|
||||
rows = min(array.shape[-2], MAX_SLICE_SIZE)
|
||||
cols = min(array.shape[-1], MAX_SLICE_SIZE)
|
||||
if cols < array.shape[-1] or rows < array.shape[-2]:
|
||||
reslice = '[0:%s, 0:%s]' % (rows, cols)
|
||||
array = array[0:rows, 0:cols]
|
||||
|
||||
# avoid slice duplication
|
||||
if not slice.endswith(reslice):
|
||||
slice += reslice
|
||||
|
||||
bounds = (0, 0)
|
||||
if type in "biufc":
|
||||
bounds = (array.min(), array.max())
|
||||
xml = '<array slice=\"%s\" rows=\"%s\" cols=\"%s\" format=\"%s\" type=\"%s\" max=\"%s\" min=\"%s\"/>' % \
|
||||
(slice, rows, cols, format, type, bounds[1], bounds[0])
|
||||
return array, xml, rows, cols, format
|
||||
|
||||
|
||||
|
||||
def dataframe_to_xml(df, name, roffset, coffset, rows, cols, format):
|
||||
"""
|
||||
:type df: pandas.core.frame.DataFrame
|
||||
:type name: str
|
||||
:type coffset: int
|
||||
:type roffset: int
|
||||
:type rows: int
|
||||
:type cols: int
|
||||
:type format: str
|
||||
|
||||
|
||||
"""
|
||||
num_rows = min(df.shape[0], MAX_SLICE_SIZE)
|
||||
num_cols = min(df.shape[1], MAX_SLICE_SIZE)
|
||||
if (num_rows, num_cols) != df.shape:
|
||||
df = df.iloc[0:num_rows, 0: num_cols]
|
||||
slice = '.iloc[0:%s, 0:%s]' % (num_rows, num_cols)
|
||||
else:
|
||||
slice = ''
|
||||
slice = name + slice
|
||||
xml = '<array slice=\"%s\" rows=\"%s\" cols=\"%s\" format=\"\" type=\"\" max=\"0\" min=\"0\"/>\n' % \
|
||||
(slice, num_rows, num_cols)
|
||||
|
||||
if (rows, cols) == (-1, -1):
|
||||
rows, cols = num_rows, num_cols
|
||||
|
||||
rows = min(rows, MAXIMUM_ARRAY_SIZE)
|
||||
cols = min(min(cols, MAXIMUM_ARRAY_SIZE), num_cols)
|
||||
# need to precompute column bounds here before slicing!
|
||||
col_bounds = [None] * cols
|
||||
for col in xrange(cols):
|
||||
dtype = df.dtypes.iloc[coffset + col].kind
|
||||
if dtype in "biufc":
|
||||
cvalues = df.iloc[:, coffset + col]
|
||||
bounds = (cvalues.min(), cvalues.max())
|
||||
else:
|
||||
bounds = (0, 0)
|
||||
col_bounds[col] = bounds
|
||||
|
||||
df = df.iloc[roffset: roffset + rows, coffset: coffset + cols]
|
||||
rows, cols = df.shape
|
||||
|
||||
xml += "<headerdata rows=\"%s\" cols=\"%s\">\n" % (rows, cols)
|
||||
format = format.replace('%', '')
|
||||
col_formats = []
|
||||
|
||||
get_label = lambda label: str(label) if not isinstance(label, tuple) else '/'.join(map(str, label))
|
||||
|
||||
for col in xrange(cols):
|
||||
dtype = df.dtypes.iloc[col].kind
|
||||
if dtype == 'f' and format:
|
||||
fmt = format
|
||||
elif dtype == 'f':
|
||||
fmt = '.5f'
|
||||
elif dtype == 'i' or dtype == 'u':
|
||||
fmt= 'd'
|
||||
else:
|
||||
fmt= 's'
|
||||
col_formats.append('%' + fmt)
|
||||
bounds = col_bounds[col]
|
||||
|
||||
xml += '<colheader index=\"%s\" label=\"%s\" type=\"%s\" format=\"%s\" max=\"%s\" min=\"%s\" />\n' % \
|
||||
(str(col), get_label(df.axes[1].values[col]), dtype, fmt, bounds[1], bounds[0])
|
||||
for row, label in enumerate(iter(df.axes[0])):
|
||||
xml += "<rowheader index=\"%s\" label = \"%s\"/>\n" % \
|
||||
(str(row), get_label(label))
|
||||
xml += "</headerdata>\n"
|
||||
xml += "<arraydata rows=\"%s\" cols=\"%s\"/>\n" % (rows, cols)
|
||||
for row in xrange(rows):
|
||||
xml += "<row index=\"%s\"/>\n" % str(row)
|
||||
for col in xrange(cols):
|
||||
value = df.iat[row, col]
|
||||
value = col_formats[col] % value
|
||||
xml += var_to_xml(value, '')
|
||||
return xml
|
||||
41
ptvsd/pydevd/_pydevd_bundle/pydevd_vm_type.py
Normal file
41
ptvsd/pydevd/_pydevd_bundle/pydevd_vm_type.py
Normal file
|
|
@ -0,0 +1,41 @@
|
|||
import sys
|
||||
|
||||
#=======================================================================================================================
|
||||
# PydevdVmType
|
||||
#=======================================================================================================================
|
||||
class PydevdVmType:
|
||||
|
||||
PYTHON = 'python'
|
||||
JYTHON = 'jython'
|
||||
vm_type = None
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# set_vm_type
|
||||
#=======================================================================================================================
|
||||
def set_vm_type(vm_type):
|
||||
PydevdVmType.vm_type = vm_type
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# get_vm_type
|
||||
#=======================================================================================================================
|
||||
def get_vm_type():
|
||||
if PydevdVmType.vm_type is None:
|
||||
setup_type()
|
||||
return PydevdVmType.vm_type
|
||||
|
||||
|
||||
#=======================================================================================================================
|
||||
# setup_type
|
||||
#=======================================================================================================================
|
||||
def setup_type(str=None):
|
||||
if str is not None:
|
||||
PydevdVmType.vm_type = str
|
||||
return
|
||||
|
||||
if sys.platform.startswith("java"):
|
||||
PydevdVmType.vm_type = PydevdVmType.JYTHON
|
||||
else:
|
||||
PydevdVmType.vm_type = PydevdVmType.PYTHON
|
||||
|
||||
362
ptvsd/pydevd/_pydevd_bundle/pydevd_xml.py
Normal file
362
ptvsd/pydevd/_pydevd_bundle/pydevd_xml.py
Normal file
|
|
@ -0,0 +1,362 @@
|
|||
from _pydev_bundle import pydev_log
|
||||
import traceback
|
||||
from _pydevd_bundle import pydevd_extension_utils
|
||||
from _pydevd_bundle import pydevd_resolver
|
||||
import sys
|
||||
from _pydevd_bundle.pydevd_constants import dict_iter_items, dict_keys, IS_PY3K, \
|
||||
MAXIMUM_VARIABLE_REPRESENTATION_SIZE, RETURN_VALUES_DICT
|
||||
from _pydev_bundle.pydev_imports import quote
|
||||
from _pydevd_bundle.pydevd_extension_api import TypeResolveProvider, StrPresentationProvider
|
||||
|
||||
try:
|
||||
import types
|
||||
|
||||
frame_type = types.FrameType
|
||||
except:
|
||||
frame_type = None
|
||||
|
||||
try:
|
||||
from xml.sax.saxutils import escape
|
||||
|
||||
|
||||
def make_valid_xml_value(s):
|
||||
return escape(s, {'"': '"'})
|
||||
except:
|
||||
# Simple replacement if it's not there.
|
||||
def make_valid_xml_value(s):
|
||||
return s.replace('<', '<').replace('>', '>').replace('"', '"')
|
||||
|
||||
|
||||
class ExceptionOnEvaluate:
|
||||
def __init__(self, result):
|
||||
self.result = result
|
||||
|
||||
|
||||
_IS_JYTHON = sys.platform.startswith("java")
|
||||
|
||||
|
||||
def _create_default_type_map():
|
||||
if not _IS_JYTHON:
|
||||
default_type_map = [
|
||||
# None means that it should not be treated as a compound variable
|
||||
|
||||
# isintance does not accept a tuple on some versions of python, so, we must declare it expanded
|
||||
(type(None), None,),
|
||||
(int, None),
|
||||
(float, None),
|
||||
(complex, None),
|
||||
(str, None),
|
||||
(tuple, pydevd_resolver.tupleResolver),
|
||||
(list, pydevd_resolver.tupleResolver),
|
||||
(dict, pydevd_resolver.dictResolver),
|
||||
]
|
||||
try:
|
||||
default_type_map.append((long, None)) # @UndefinedVariable
|
||||
except:
|
||||
pass # not available on all python versions
|
||||
|
||||
try:
|
||||
default_type_map.append((unicode, None)) # @UndefinedVariable
|
||||
except:
|
||||
pass # not available on all python versions
|
||||
|
||||
try:
|
||||
default_type_map.append((set, pydevd_resolver.setResolver))
|
||||
except:
|
||||
pass # not available on all python versions
|
||||
|
||||
try:
|
||||
default_type_map.append((frozenset, pydevd_resolver.setResolver))
|
||||
except:
|
||||
pass # not available on all python versions
|
||||
|
||||
try:
|
||||
from django.utils.datastructures import MultiValueDict
|
||||
default_type_map.insert(0, (MultiValueDict, pydevd_resolver.multiValueDictResolver))
|
||||
# we should put it before dict
|
||||
except:
|
||||
pass # django may not be installed
|
||||
|
||||
try:
|
||||
from django.forms import BaseForm
|
||||
default_type_map.insert(0, (BaseForm, pydevd_resolver.djangoFormResolver))
|
||||
# we should put it before instance resolver
|
||||
except:
|
||||
pass # django may not be installed
|
||||
|
||||
try:
|
||||
from collections import deque
|
||||
default_type_map.append((deque, pydevd_resolver.dequeResolver))
|
||||
except:
|
||||
pass
|
||||
|
||||
if frame_type is not None:
|
||||
default_type_map.append((frame_type, pydevd_resolver.frameResolver))
|
||||
|
||||
else:
|
||||
from org.python import core # @UnresolvedImport
|
||||
default_type_map = [
|
||||
(core.PyNone, None),
|
||||
(core.PyInteger, None),
|
||||
(core.PyLong, None),
|
||||
(core.PyFloat, None),
|
||||
(core.PyComplex, None),
|
||||
(core.PyString, None),
|
||||
(core.PyTuple, pydevd_resolver.tupleResolver),
|
||||
(core.PyList, pydevd_resolver.tupleResolver),
|
||||
(core.PyDictionary, pydevd_resolver.dictResolver),
|
||||
(core.PyStringMap, pydevd_resolver.dictResolver),
|
||||
]
|
||||
if hasattr(core, 'PyJavaInstance'):
|
||||
# Jython 2.5b3 removed it.
|
||||
default_type_map.append((core.PyJavaInstance, pydevd_resolver.instanceResolver))
|
||||
|
||||
return default_type_map
|
||||
|
||||
|
||||
class TypeResolveHandler(object):
|
||||
NO_PROVIDER = [] # Sentinel value (any mutable object to be used as a constant would be valid).
|
||||
|
||||
def __init__(self):
|
||||
# Note: don't initialize with the types we already know about so that the extensions can override
|
||||
# the default resolvers that are already available if they want.
|
||||
self._type_to_resolver_cache = {}
|
||||
self._type_to_str_provider_cache = {}
|
||||
self._initialized = False
|
||||
|
||||
def _initialize(self):
|
||||
self._default_type_map = _create_default_type_map()
|
||||
self._resolve_providers = pydevd_extension_utils.extensions_of_type(TypeResolveProvider)
|
||||
self._str_providers = pydevd_extension_utils.extensions_of_type(StrPresentationProvider)
|
||||
self._initialized = True
|
||||
|
||||
def get_type(self, o):
|
||||
try:
|
||||
try:
|
||||
# Faster than type(o) as we don't need the function call.
|
||||
type_object = o.__class__
|
||||
except:
|
||||
# Not all objects have __class__ (i.e.: there are bad bindings around).
|
||||
type_object = type(o)
|
||||
|
||||
type_name = type_object.__name__
|
||||
except:
|
||||
# This happens for org.python.core.InitModule
|
||||
return 'Unable to get Type', 'Unable to get Type', None
|
||||
|
||||
return self._get_type(o, type_object, type_name)
|
||||
|
||||
def _get_type(self, o, type_object, type_name):
|
||||
resolver = self._type_to_resolver_cache.get(type_object)
|
||||
if resolver is not None:
|
||||
return type_object, type_name, resolver
|
||||
|
||||
if not self._initialized:
|
||||
self._initialize()
|
||||
|
||||
try:
|
||||
for resolver in self._resolve_providers:
|
||||
if resolver.can_provide(type_object, type_name):
|
||||
# Cache it
|
||||
self._type_to_resolver_cache[type_object] = resolver
|
||||
return type_object, type_name, resolver
|
||||
|
||||
for t in self._default_type_map:
|
||||
if isinstance(o, t[0]):
|
||||
# Cache it
|
||||
resolver = t[1]
|
||||
self._type_to_resolver_cache[type_object] = resolver
|
||||
return (type_object, type_name, resolver)
|
||||
except:
|
||||
traceback.print_exc()
|
||||
|
||||
# No match return default (and cache it).
|
||||
resolver = pydevd_resolver.defaultResolver
|
||||
self._type_to_resolver_cache[type_object] = resolver
|
||||
return type_object, type_name, resolver
|
||||
|
||||
if _IS_JYTHON:
|
||||
_base_get_type = _get_type
|
||||
|
||||
def _get_type(self, o, type_object, type_name):
|
||||
if type_name == 'org.python.core.PyJavaInstance':
|
||||
return type_object, type_name, pydevd_resolver.instanceResolver
|
||||
|
||||
if type_name == 'org.python.core.PyArray':
|
||||
return type_object, type_name, pydevd_resolver.jyArrayResolver
|
||||
|
||||
return self._base_get_type(o, type_name, type_name)
|
||||
|
||||
def str_from_providers(self, o, type_object, type_name):
|
||||
provider = self._type_to_str_provider_cache.get(type_object)
|
||||
|
||||
if provider is self.NO_PROVIDER:
|
||||
return None
|
||||
|
||||
if provider is not None:
|
||||
return provider.get_str(o)
|
||||
|
||||
if not self._initialized:
|
||||
self._initialize()
|
||||
|
||||
for provider in self._str_providers:
|
||||
if provider.can_provide(type_object, type_name):
|
||||
self._type_to_str_provider_cache[type_object] = provider
|
||||
return provider.get_str(o)
|
||||
|
||||
self._type_to_str_provider_cache[type_object] = self.NO_PROVIDER
|
||||
return None
|
||||
|
||||
|
||||
_TYPE_RESOLVE_HANDLER = TypeResolveHandler()
|
||||
|
||||
"""
|
||||
def get_type(o):
|
||||
Receives object and returns a triple (typeObject, typeString, resolver).
|
||||
|
||||
resolver != None means that variable is a container, and should be displayed as a hierarchy.
|
||||
|
||||
Use the resolver to get its attributes.
|
||||
|
||||
All container objects should have a resolver.
|
||||
"""
|
||||
get_type = _TYPE_RESOLVE_HANDLER.get_type
|
||||
|
||||
_str_from_providers = _TYPE_RESOLVE_HANDLER.str_from_providers
|
||||
|
||||
|
||||
def return_values_from_dict_to_xml(return_dict):
|
||||
res = ""
|
||||
for name, val in dict_iter_items(return_dict):
|
||||
res += var_to_xml(val, name, additional_in_xml=' isRetVal="True"')
|
||||
return res
|
||||
|
||||
|
||||
def frame_vars_to_xml(frame_f_locals, hidden_ns=None):
|
||||
""" dumps frame variables to XML
|
||||
<var name="var_name" scope="local" type="type" value="value"/>
|
||||
"""
|
||||
xml = ""
|
||||
|
||||
keys = dict_keys(frame_f_locals)
|
||||
if hasattr(keys, 'sort'):
|
||||
keys.sort() # Python 3.0 does not have it
|
||||
else:
|
||||
keys = sorted(keys) # Jython 2.1 does not have it
|
||||
|
||||
return_values_xml = ''
|
||||
|
||||
for k in keys:
|
||||
try:
|
||||
v = frame_f_locals[k]
|
||||
if k == RETURN_VALUES_DICT:
|
||||
for name, val in dict_iter_items(v):
|
||||
return_values_xml += var_to_xml(val, name, additional_in_xml=' isRetVal="True"')
|
||||
|
||||
else:
|
||||
if hidden_ns is not None and k in hidden_ns:
|
||||
xml += var_to_xml(v, str(k), additional_in_xml=' isIPythonHidden="True"')
|
||||
else:
|
||||
xml += var_to_xml(v, str(k))
|
||||
except Exception:
|
||||
traceback.print_exc()
|
||||
pydev_log.error("Unexpected error, recovered safely.\n")
|
||||
|
||||
# Show return values as the first entry.
|
||||
return return_values_xml + xml
|
||||
|
||||
|
||||
def var_to_xml(val, name, doTrim=True, additional_in_xml=''):
|
||||
""" single variable or dictionary to xml representation """
|
||||
|
||||
try:
|
||||
# This should be faster than isinstance (but we have to protect against not having a '__class__' attribute).
|
||||
is_exception_on_eval = val.__class__ == ExceptionOnEvaluate
|
||||
except:
|
||||
is_exception_on_eval = False
|
||||
|
||||
if is_exception_on_eval:
|
||||
v = val.result
|
||||
else:
|
||||
v = val
|
||||
|
||||
_type, typeName, resolver = get_type(v)
|
||||
type_qualifier = getattr(_type, "__module__", "")
|
||||
try:
|
||||
str_from_provider = _str_from_providers(v, _type, typeName)
|
||||
if str_from_provider is not None:
|
||||
value = str_from_provider
|
||||
elif hasattr(v, '__class__'):
|
||||
if v.__class__ == frame_type:
|
||||
value = pydevd_resolver.frameResolver.get_frame_name(v)
|
||||
|
||||
elif v.__class__ in (list, tuple):
|
||||
if len(v) > 300:
|
||||
value = '%s: %s' % (str(v.__class__), '<Too big to print. Len: %s>' % (len(v),))
|
||||
else:
|
||||
value = '%s: %s' % (str(v.__class__), v)
|
||||
else:
|
||||
try:
|
||||
cName = str(v.__class__)
|
||||
if cName.find('.') != -1:
|
||||
cName = cName.split('.')[-1]
|
||||
|
||||
elif cName.find("'") != -1: # does not have '.' (could be something like <type 'int'>)
|
||||
cName = cName[cName.index("'") + 1:]
|
||||
|
||||
if cName.endswith("'>"):
|
||||
cName = cName[:-2]
|
||||
except:
|
||||
cName = str(v.__class__)
|
||||
|
||||
value = '%s: %s' % (cName, v)
|
||||
else:
|
||||
value = str(v)
|
||||
except:
|
||||
try:
|
||||
value = repr(v)
|
||||
except:
|
||||
value = 'Unable to get repr for %s' % v.__class__
|
||||
|
||||
try:
|
||||
name = quote(name, '/>_= ') # TODO: Fix PY-5834 without using quote
|
||||
except:
|
||||
pass
|
||||
|
||||
xml = '<var name="%s" type="%s" ' % (make_valid_xml_value(name), make_valid_xml_value(typeName))
|
||||
|
||||
if type_qualifier:
|
||||
xml_qualifier = 'qualifier="%s"' % make_valid_xml_value(type_qualifier)
|
||||
else:
|
||||
xml_qualifier = ''
|
||||
|
||||
if value:
|
||||
# cannot be too big... communication may not handle it.
|
||||
if len(value) > MAXIMUM_VARIABLE_REPRESENTATION_SIZE and doTrim:
|
||||
value = value[0:MAXIMUM_VARIABLE_REPRESENTATION_SIZE]
|
||||
value += '...'
|
||||
|
||||
# fix to work with unicode values
|
||||
try:
|
||||
if not IS_PY3K:
|
||||
if value.__class__ == unicode: # @UndefinedVariable
|
||||
value = value.encode('utf-8')
|
||||
else:
|
||||
if value.__class__ == bytes:
|
||||
value = value.encode('utf-8')
|
||||
except TypeError: # in java, unicode is a function
|
||||
pass
|
||||
|
||||
xml_value = ' value="%s"' % (make_valid_xml_value(quote(value, '/>_= ')))
|
||||
else:
|
||||
xml_value = ''
|
||||
|
||||
if is_exception_on_eval:
|
||||
xml_container = ' isErrorOnEval="True"'
|
||||
else:
|
||||
if resolver is not None:
|
||||
xml_container = ' isContainer="True"'
|
||||
else:
|
||||
xml_container = ''
|
||||
|
||||
return ''.join((xml, xml_qualifier, xml_value, xml_container, additional_in_xml, ' />\n'))
|
||||
0
ptvsd/pydevd/_pydevd_frame_eval/__init__.py
Normal file
0
ptvsd/pydevd/_pydevd_frame_eval/__init__.py
Normal file
|
|
@ -0,0 +1,31 @@
|
|||
try:
|
||||
from _pydevd_frame_eval.pydevd_frame_evaluator import frame_eval_func, stop_frame_eval, enable_cache_frames_without_breaks, dummy_trace_dispatch
|
||||
except ImportError:
|
||||
try:
|
||||
import struct
|
||||
import sys
|
||||
try:
|
||||
is_64bits = sys.maxsize > 2**32
|
||||
except:
|
||||
# In Jython this call fails, but this is Ok, we don't support Jython for speedups anyways.
|
||||
raise ImportError
|
||||
plat = '32'
|
||||
if is_64bits:
|
||||
plat = '64'
|
||||
|
||||
# We also accept things as:
|
||||
#
|
||||
# _pydevd_frame_eval.pydevd_frame_evaluator_win32_27_32
|
||||
# _pydevd_frame_eval.pydevd_frame_evaluator_win32_34_64
|
||||
#
|
||||
# to have multiple pre-compiled pyds distributed along the IDE
|
||||
# (generated by build_tools/build_binaries_windows.py).
|
||||
|
||||
mod_name = 'pydevd_frame_evaluator_%s_%s%s_%s' % (sys.platform, sys.version_info[0], sys.version_info[1], plat)
|
||||
check_name = '_pydevd_frame_eval.%s' % (mod_name,)
|
||||
mod = __import__(check_name)
|
||||
mod = getattr(mod, mod_name)
|
||||
frame_eval_func, stop_frame_eval, enable_cache_frames_without_breaks, dummy_trace_dispatch = \
|
||||
mod.frame_eval_func, mod.stop_frame_eval, mod.enable_cache_frames_without_breaks, mod.dummy_trace_dispatch
|
||||
except ImportError:
|
||||
raise
|
||||
28
ptvsd/pydevd/_pydevd_frame_eval/pydevd_frame_eval_main.py
Normal file
28
ptvsd/pydevd/_pydevd_frame_eval/pydevd_frame_eval_main.py
Normal file
|
|
@ -0,0 +1,28 @@
|
|||
import os
|
||||
import sys
|
||||
|
||||
IS_PY36 = False
|
||||
if sys.version_info[0] == 3 and sys.version_info[1] == 6:
|
||||
IS_PY36 = True
|
||||
|
||||
frame_eval_func = None
|
||||
stop_frame_eval = None
|
||||
enable_cache_frames_without_breaks = None
|
||||
dummy_trace_dispatch = None
|
||||
|
||||
USE_FRAME_EVAL = os.environ.get('PYDEVD_USE_FRAME_EVAL', None)
|
||||
|
||||
if USE_FRAME_EVAL == 'NO':
|
||||
frame_eval_func, stop_frame_eval = None, None
|
||||
|
||||
else:
|
||||
if IS_PY36:
|
||||
try:
|
||||
from _pydevd_frame_eval.pydevd_frame_eval_cython_wrapper import frame_eval_func, stop_frame_eval, enable_cache_frames_without_breaks, \
|
||||
dummy_trace_dispatch
|
||||
except ImportError:
|
||||
from _pydev_bundle.pydev_monkey import log_error_once
|
||||
|
||||
dirname = os.path.dirname(os.path.dirname(__file__))
|
||||
log_error_once("warning: Debugger speedups using cython not found. Run '\"%s\" \"%s\" build_ext --inplace' to build." % (
|
||||
sys.executable, os.path.join(dirname, 'setup_cython.py')))
|
||||
6771
ptvsd/pydevd/_pydevd_frame_eval/pydevd_frame_evaluator.c
Normal file
6771
ptvsd/pydevd/_pydevd_frame_eval/pydevd_frame_evaluator.c
Normal file
File diff suppressed because it is too large
Load diff
99
ptvsd/pydevd/_pydevd_frame_eval/pydevd_frame_evaluator.pxd
Normal file
99
ptvsd/pydevd/_pydevd_frame_eval/pydevd_frame_evaluator.pxd
Normal file
|
|
@ -0,0 +1,99 @@
|
|||
from cpython.mem cimport PyMem_Malloc, PyMem_Free
|
||||
|
||||
cdef extern from *:
|
||||
ctypedef void PyObject
|
||||
ctypedef struct PyCodeObject:
|
||||
int co_argcount; # arguments, except *args */
|
||||
int co_kwonlyargcount; # keyword only arguments */
|
||||
int co_nlocals; # local variables */
|
||||
int co_stacksize; # entries needed for evaluation stack */
|
||||
int co_flags; # CO_..., see below */
|
||||
int co_firstlineno; # first source line number */
|
||||
PyObject *co_code; # instruction opcodes */
|
||||
PyObject *co_consts; # list (constants used) */
|
||||
PyObject *co_names; # list of strings (names used) */
|
||||
PyObject *co_varnames; # tuple of strings (local variable names) */
|
||||
PyObject *co_freevars; # tuple of strings (free variable names) */
|
||||
PyObject *co_cellvars; # tuple of strings (cell variable names) */
|
||||
unsigned char *co_cell2arg; # Maps cell vars which are arguments. */
|
||||
PyObject *co_filename; # unicode (where it was loaded from) */
|
||||
PyObject *co_name; # unicode (name, for reference) */
|
||||
PyObject *co_lnotab; # string (encoding addr<->lineno mapping) See
|
||||
# Objects/lnotab_notes.txt for details. */
|
||||
void *co_zombieframe; # for optimization only (see frameobject.c) */
|
||||
PyObject *co_weakreflist; # to support weakrefs to code objects */
|
||||
void *co_extra;
|
||||
|
||||
cdef extern from "frameobject.h":
|
||||
ctypedef struct PyFrameObject:
|
||||
PyCodeObject *f_code # code segment
|
||||
PyObject *f_builtins # builtin symbol table (PyDictObject)
|
||||
PyObject *f_globals # global symbol table (PyDictObject) */
|
||||
PyObject *f_locals # local symbol table (any mapping) */
|
||||
PyObject **f_valuestack #
|
||||
PyObject **f_stacktop
|
||||
PyObject *f_trace # Trace function */
|
||||
PyObject *f_exc_type
|
||||
PyObject *f_exc_value
|
||||
PyObject *f_exc_traceback
|
||||
PyObject *f_gen;
|
||||
|
||||
int f_lasti; #/* Last instruction if called */
|
||||
int f_lineno; #/* Current line number */
|
||||
int f_iblock; #/* index in f_blockstack */
|
||||
char f_executing; #/* whether the frame is still executing */
|
||||
PyObject *f_localsplus[1];
|
||||
|
||||
cdef extern from "code.h":
|
||||
ctypedef void freefunc(void *)
|
||||
int _PyCode_GetExtra(PyObject *code, Py_ssize_t index, void **extra)
|
||||
int _PyCode_SetExtra(PyObject *code, Py_ssize_t index, void *extra)
|
||||
|
||||
cdef extern from "Python.h":
|
||||
void Py_INCREF(object o)
|
||||
void Py_DECREF(object o)
|
||||
object PyImport_ImportModule(char *name)
|
||||
PyObject* PyObject_CallFunction(PyObject *callable, const char *format, ...)
|
||||
object PyObject_GetAttrString(object o, char *attr_name)
|
||||
|
||||
cdef extern from "pystate.h":
|
||||
ctypedef PyObject* _PyFrameEvalFunction(PyFrameObject *frame, int exc)
|
||||
|
||||
ctypedef struct PyInterpreterState:
|
||||
PyInterpreterState *next
|
||||
PyInterpreterState *tstate_head
|
||||
|
||||
PyObject *modules
|
||||
|
||||
PyObject *modules_by_index
|
||||
PyObject *sysdict
|
||||
PyObject *builtins
|
||||
PyObject *importlib
|
||||
|
||||
PyObject *codec_search_path
|
||||
PyObject *codec_search_cache
|
||||
PyObject *codec_error_registry
|
||||
int codecs_initialized
|
||||
int fscodec_initialized
|
||||
|
||||
int dlopenflags
|
||||
|
||||
PyObject *builtins_copy
|
||||
PyObject *import_func
|
||||
# Initialized to PyEval_EvalFrameDefault().
|
||||
_PyFrameEvalFunction eval_frame
|
||||
|
||||
ctypedef struct PyThreadState:
|
||||
PyThreadState *prev
|
||||
PyThreadState *next
|
||||
PyInterpreterState *interp
|
||||
# ...
|
||||
|
||||
PyThreadState *PyThreadState_Get()
|
||||
|
||||
cdef extern from "ceval.h":
|
||||
int _PyEval_RequestCodeExtraIndex(freefunc)
|
||||
PyFrameObject *PyEval_GetFrame()
|
||||
PyObject* PyEval_CallFunction(PyObject *callable, const char *format, ...)
|
||||
|
||||
PyObject* _PyEval_EvalFrameDefault(PyFrameObject *frame, int exc)
|
||||
159
ptvsd/pydevd/_pydevd_frame_eval/pydevd_frame_evaluator.pyx
Normal file
159
ptvsd/pydevd/_pydevd_frame_eval/pydevd_frame_evaluator.pyx
Normal file
|
|
@ -0,0 +1,159 @@
|
|||
import dis
|
||||
from _pydev_imps._pydev_saved_modules import threading
|
||||
from _pydevd_bundle.pydevd_additional_thread_info import PyDBAdditionalThreadInfo
|
||||
from _pydevd_bundle.pydevd_comm import get_global_debugger
|
||||
from _pydevd_bundle.pydevd_dont_trace_files import DONT_TRACE
|
||||
from _pydevd_frame_eval.pydevd_frame_tracing import pydev_trace_code_wrapper, update_globals_dict, dummy_tracing_holder
|
||||
from _pydevd_frame_eval.pydevd_modify_bytecode import insert_code
|
||||
from pydevd_file_utils import get_abs_path_real_path_and_base_from_frame, NORM_PATHS_AND_BASE_CONTAINER
|
||||
|
||||
AVOID_RECURSION = [
|
||||
'pydevd_additional_thread_info_regular.py',
|
||||
'threading.py',
|
||||
'_weakrefset.py'
|
||||
]
|
||||
|
||||
get_file_type = DONT_TRACE.get
|
||||
NO_BREAKS_IN_FRAME = 1
|
||||
|
||||
|
||||
class UseCodeExtraHolder:
|
||||
# Use this flag in order to disable co_extra field
|
||||
use_code_extra = True
|
||||
# Keep the index of co_extra in a thread-local storage
|
||||
local = threading.local()
|
||||
local.index = -1
|
||||
|
||||
|
||||
def is_use_code_extra():
|
||||
return UseCodeExtraHolder.use_code_extra
|
||||
|
||||
|
||||
# enable using `co_extra` field in order to cache frames without breakpoints
|
||||
def enable_cache_frames_without_breaks(new_value):
|
||||
UseCodeExtraHolder.use_code_extra = new_value
|
||||
|
||||
|
||||
cpdef dummy_trace_dispatch(frame, str event, arg):
|
||||
return None
|
||||
|
||||
|
||||
cdef PyObject* get_bytecode_while_frame_eval(PyFrameObject *frame_obj, int exc):
|
||||
frame = <object> frame_obj
|
||||
cdef str filepath = frame.f_code.co_filename
|
||||
cdef bint skip_file = exc
|
||||
cdef void* extra = NULL
|
||||
cdef int* extra_value = NULL
|
||||
cdef int thread_index = -1
|
||||
|
||||
if is_use_code_extra is None or AVOID_RECURSION is None:
|
||||
# Sometimes during process shutdown these global variables become None
|
||||
return _PyEval_EvalFrameDefault(frame_obj, exc)
|
||||
|
||||
if is_use_code_extra():
|
||||
extra = PyMem_Malloc(sizeof(int))
|
||||
try:
|
||||
thread_index = UseCodeExtraHolder.local.index
|
||||
except:
|
||||
pass
|
||||
if thread_index != -1:
|
||||
_PyCode_GetExtra(<PyObject*> frame.f_code, thread_index, &extra)
|
||||
if extra is not NULL:
|
||||
extra_value = <int*> extra
|
||||
if extra_value[0] == NO_BREAKS_IN_FRAME:
|
||||
return _PyEval_EvalFrameDefault(frame_obj, exc)
|
||||
|
||||
for file in AVOID_RECURSION:
|
||||
# we can't call any other function without this check, because we can get stack overflow
|
||||
for path_separator in ('/', '\\'):
|
||||
if filepath.endswith(path_separator + file):
|
||||
skip_file = True
|
||||
break
|
||||
|
||||
if not skip_file:
|
||||
try:
|
||||
t = threading.currentThread()
|
||||
except:
|
||||
skip_file = True
|
||||
|
||||
if not skip_file:
|
||||
try:
|
||||
additional_info = t.additional_info
|
||||
if additional_info is None:
|
||||
raise AttributeError()
|
||||
except:
|
||||
additional_info = t.additional_info = PyDBAdditionalThreadInfo()
|
||||
# request `co_extra` inside every new thread
|
||||
thread_index = _PyEval_RequestCodeExtraIndex(PyMem_Free)
|
||||
UseCodeExtraHolder.local.index = thread_index
|
||||
|
||||
if additional_info.is_tracing or getattr(t, 'pydev_do_not_trace', None):
|
||||
return _PyEval_EvalFrameDefault(frame_obj, exc)
|
||||
|
||||
additional_info.is_tracing = True
|
||||
try:
|
||||
abs_path_real_path_and_base = NORM_PATHS_AND_BASE_CONTAINER[frame.f_code.co_filename]
|
||||
except:
|
||||
abs_path_real_path_and_base = get_abs_path_real_path_and_base_from_frame(frame)
|
||||
|
||||
file_type = get_file_type(abs_path_real_path_and_base[-1]) #we don't want to debug anything related to pydevd
|
||||
if file_type is not None:
|
||||
additional_info.is_tracing = False
|
||||
return _PyEval_EvalFrameDefault(frame_obj, exc)
|
||||
|
||||
was_break = False
|
||||
main_debugger = get_global_debugger()
|
||||
breakpoints = main_debugger.breakpoints.get(abs_path_real_path_and_base[1])
|
||||
code_object = frame.f_code
|
||||
if breakpoints:
|
||||
breakpoints_to_update = []
|
||||
for offset, line in dis.findlinestarts(code_object):
|
||||
if line in breakpoints:
|
||||
breakpoint = breakpoints[line]
|
||||
if code_object not in breakpoint.code_objects:
|
||||
# This check is needed for generator functions, because after each yield the new frame is created
|
||||
# but the former code object is used
|
||||
success, new_code = insert_code(frame.f_code, pydev_trace_code_wrapper.__code__, line)
|
||||
if success:
|
||||
breakpoints_to_update.append(breakpoint)
|
||||
Py_INCREF(new_code)
|
||||
frame_obj.f_code = <PyCodeObject *> new_code
|
||||
was_break = True
|
||||
else:
|
||||
main_debugger.set_trace_for_frame_and_parents(frame)
|
||||
was_break = False
|
||||
break
|
||||
if was_break:
|
||||
update_globals_dict(frame.f_globals)
|
||||
for bp in breakpoints_to_update:
|
||||
bp.code_objects.add(frame.f_code)
|
||||
else:
|
||||
if main_debugger.has_plugin_line_breaks:
|
||||
can_not_skip = main_debugger.plugin.can_not_skip(main_debugger, None, frame)
|
||||
if can_not_skip:
|
||||
was_break = True
|
||||
main_debugger.SetTrace(main_debugger.trace_dispatch)
|
||||
main_debugger.set_trace_for_frame_and_parents(frame)
|
||||
|
||||
if not was_break:
|
||||
extra_value = <int*> PyMem_Malloc(sizeof(int))
|
||||
extra_value[0] = NO_BREAKS_IN_FRAME
|
||||
try:
|
||||
thread_index = UseCodeExtraHolder.local.index
|
||||
except:
|
||||
pass
|
||||
if thread_index != -1:
|
||||
_PyCode_SetExtra(<PyObject*> code_object, thread_index, extra_value)
|
||||
|
||||
additional_info.is_tracing = False
|
||||
return _PyEval_EvalFrameDefault(frame_obj, exc)
|
||||
|
||||
def frame_eval_func():
|
||||
cdef PyThreadState *state = PyThreadState_Get()
|
||||
state.interp.eval_frame = get_bytecode_while_frame_eval
|
||||
global dummy_tracing_holder
|
||||
dummy_tracing_holder.set_trace_func(dummy_trace_dispatch)
|
||||
|
||||
def stop_frame_eval():
|
||||
cdef PyThreadState *state = PyThreadState_Get()
|
||||
state.interp.eval_frame = _PyEval_EvalFrameDefault
|
||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue