mirror of
https://github.com/python/cpython.git
synced 2025-07-28 13:44:43 +00:00
Updated the warnings, linecache, inspect, traceback, site, and doctest modules
to work correctly with modules imported from zipfiles or via other PEP 302 __loader__ objects. Tests and doc updates are included.
This commit is contained in:
parent
7731dfdaad
commit
4703211080
10 changed files with 159 additions and 30 deletions
|
@ -15,7 +15,7 @@ the formatted traceback.
|
|||
|
||||
The \module{linecache} module defines the following functions:
|
||||
|
||||
\begin{funcdesc}{getline}{filename, lineno}
|
||||
\begin{funcdesc}{getline}{filename, lineno\optional{, module_globals}}
|
||||
Get line \var{lineno} from file named \var{filename}. This function
|
||||
will never throw an exception --- it will return \code{''} on errors
|
||||
(the terminating newline character will be included for lines that are
|
||||
|
@ -23,7 +23,11 @@ found).
|
|||
|
||||
If a file named \var{filename} is not found, the function will look
|
||||
for it in the module\indexiii{module}{search}{path} search path,
|
||||
\code{sys.path}.
|
||||
\code{sys.path}, after first checking for a PEP 302 \code{__loader__}
|
||||
in \var{module_globals}, in case the module was imported from a zipfile
|
||||
or other non-filesystem import source.
|
||||
|
||||
\versionadded[The \var{module_globals} parameter was added]{2.5}
|
||||
\end{funcdesc}
|
||||
|
||||
\begin{funcdesc}{clearcache}{}
|
||||
|
|
|
@ -169,7 +169,8 @@ the latter would defeat the purpose of the warning message).
|
|||
\end{funcdesc}
|
||||
|
||||
\begin{funcdesc}{warn_explicit}{message, category, filename,
|
||||
lineno\optional{, module\optional{, registry}}}
|
||||
lineno\optional{, module\optional{, registry\optional{,
|
||||
module_globals}}}}
|
||||
This is a low-level interface to the functionality of
|
||||
\function{warn()}, passing in explicitly the message, category,
|
||||
filename and line number, and optionally the module name and the
|
||||
|
@ -179,6 +180,11 @@ stripped; if no registry is passed, the warning is never suppressed.
|
|||
\var{message} must be a string and \var{category} a subclass of
|
||||
\exception{Warning} or \var{message} may be a \exception{Warning} instance,
|
||||
in which case \var{category} will be ignored.
|
||||
|
||||
\var{module_globals}, if supplied, should be the global namespace in use
|
||||
by the code for which the warning is issued. (This argument is used to
|
||||
support displaying source for modules found in zipfiles or other
|
||||
non-filesystem import sources, and was added in Python 2.5.)
|
||||
\end{funcdesc}
|
||||
|
||||
\begin{funcdesc}{showwarning}{message, category, filename,
|
||||
|
|
|
@ -236,6 +236,15 @@ def _normalize_module(module, depth=2):
|
|||
else:
|
||||
raise TypeError("Expected a module, string, or None")
|
||||
|
||||
def _load_testfile(filename, package, module_relative):
|
||||
if module_relative:
|
||||
package = _normalize_module(package, 3)
|
||||
filename = _module_relative_path(package, filename)
|
||||
if hasattr(package, '__loader__'):
|
||||
if hasattr(package.__loader__, 'get_data'):
|
||||
return package.__loader__.get_data(filename), filename
|
||||
return open(filename).read(), filename
|
||||
|
||||
def _indent(s, indent=4):
|
||||
"""
|
||||
Add the given number of space characters to the beginning every
|
||||
|
@ -1319,13 +1328,13 @@ class DocTestRunner:
|
|||
__LINECACHE_FILENAME_RE = re.compile(r'<doctest '
|
||||
r'(?P<name>[\w\.]+)'
|
||||
r'\[(?P<examplenum>\d+)\]>$')
|
||||
def __patched_linecache_getlines(self, filename):
|
||||
def __patched_linecache_getlines(self, filename, module_globals=None):
|
||||
m = self.__LINECACHE_FILENAME_RE.match(filename)
|
||||
if m and m.group('name') == self.test.name:
|
||||
example = self.test.examples[int(m.group('examplenum'))]
|
||||
return example.source.splitlines(True)
|
||||
else:
|
||||
return self.save_linecache_getlines(filename)
|
||||
return self.save_linecache_getlines(filename, module_globals)
|
||||
|
||||
def run(self, test, compileflags=None, out=None, clear_globs=True):
|
||||
"""
|
||||
|
@ -1933,9 +1942,7 @@ def testfile(filename, module_relative=True, name=None, package=None,
|
|||
"relative paths.")
|
||||
|
||||
# Relativize the path
|
||||
if module_relative:
|
||||
package = _normalize_module(package)
|
||||
filename = _module_relative_path(package, filename)
|
||||
text, filename = _load_testfile(filename, package, module_relative)
|
||||
|
||||
# If no name was given, then use the file's name.
|
||||
if name is None:
|
||||
|
@ -1955,8 +1962,7 @@ def testfile(filename, module_relative=True, name=None, package=None,
|
|||
runner = DocTestRunner(verbose=verbose, optionflags=optionflags)
|
||||
|
||||
# Read the file, convert it to a test, and run it.
|
||||
s = open(filename).read()
|
||||
test = parser.get_doctest(s, globs, name, filename, 0)
|
||||
test = parser.get_doctest(text, globs, name, filename, 0)
|
||||
runner.run(test)
|
||||
|
||||
if report:
|
||||
|
@ -2336,15 +2342,13 @@ def DocFileTest(path, module_relative=True, package=None,
|
|||
"relative paths.")
|
||||
|
||||
# Relativize the path.
|
||||
if module_relative:
|
||||
package = _normalize_module(package)
|
||||
path = _module_relative_path(package, path)
|
||||
doc, path = _load_testfile(path, package, module_relative)
|
||||
|
||||
if "__file__" not in globs:
|
||||
globs["__file__"] = path
|
||||
|
||||
# Find the file and read it.
|
||||
name = os.path.basename(path)
|
||||
doc = open(path).read()
|
||||
|
||||
# Convert it to a test, and wrap it in a DocFileCase.
|
||||
test = parser.get_doctest(doc, globs, name, path, 0)
|
||||
|
|
|
@ -353,7 +353,7 @@ def getsourcefile(object):
|
|||
if 'b' in mode and string.lower(filename[-len(suffix):]) == suffix:
|
||||
# Looks like a binary file. We want to only return a text file.
|
||||
return None
|
||||
if os.path.exists(filename):
|
||||
if os.path.exists(filename) or hasattr(getmodule(object),'__loader__'):
|
||||
return filename
|
||||
|
||||
def getabsfile(object):
|
||||
|
@ -379,7 +379,7 @@ def getmodule(object):
|
|||
if file in modulesbyfile:
|
||||
return sys.modules.get(modulesbyfile[file])
|
||||
for module in sys.modules.values():
|
||||
if hasattr(module, '__file__'):
|
||||
if ismodule(module) and hasattr(module, '__file__'):
|
||||
modulesbyfile[
|
||||
os.path.realpath(
|
||||
getabsfile(module))] = module.__name__
|
||||
|
@ -406,7 +406,7 @@ def findsource(object):
|
|||
in the file and the line number indexes a line in that list. An IOError
|
||||
is raised if the source code cannot be retrieved."""
|
||||
file = getsourcefile(object) or getfile(object)
|
||||
lines = linecache.getlines(file)
|
||||
lines = linecache.getlines(file, getmodule(object).__dict__)
|
||||
if not lines:
|
||||
raise IOError('could not get source code')
|
||||
|
||||
|
|
|
@ -10,8 +10,8 @@ import os
|
|||
|
||||
__all__ = ["getline", "clearcache", "checkcache"]
|
||||
|
||||
def getline(filename, lineno):
|
||||
lines = getlines(filename)
|
||||
def getline(filename, lineno, module_globals=None):
|
||||
lines = getlines(filename, module_globals)
|
||||
if 1 <= lineno <= len(lines):
|
||||
return lines[lineno-1]
|
||||
else:
|
||||
|
@ -30,14 +30,14 @@ def clearcache():
|
|||
cache = {}
|
||||
|
||||
|
||||
def getlines(filename):
|
||||
def getlines(filename, module_globals=None):
|
||||
"""Get the lines for a file from the cache.
|
||||
Update the cache if it doesn't contain an entry for this file already."""
|
||||
|
||||
if filename in cache:
|
||||
return cache[filename][2]
|
||||
else:
|
||||
return updatecache(filename)
|
||||
return updatecache(filename,module_globals)
|
||||
|
||||
|
||||
def checkcache(filename=None):
|
||||
|
@ -54,6 +54,8 @@ def checkcache(filename=None):
|
|||
|
||||
for filename in filenames:
|
||||
size, mtime, lines, fullname = cache[filename]
|
||||
if mtime is None:
|
||||
continue # no-op for files loaded via a __loader__
|
||||
try:
|
||||
stat = os.stat(fullname)
|
||||
except os.error:
|
||||
|
@ -63,7 +65,7 @@ def checkcache(filename=None):
|
|||
del cache[filename]
|
||||
|
||||
|
||||
def updatecache(filename):
|
||||
def updatecache(filename, module_globals=None):
|
||||
"""Update a cache entry and return its list of lines.
|
||||
If something's wrong, print a message, discard the cache entry,
|
||||
and return an empty list."""
|
||||
|
@ -72,12 +74,34 @@ def updatecache(filename):
|
|||
del cache[filename]
|
||||
if not filename or filename[0] + filename[-1] == '<>':
|
||||
return []
|
||||
|
||||
fullname = filename
|
||||
try:
|
||||
stat = os.stat(fullname)
|
||||
except os.error, msg:
|
||||
# Try looking through the module search path.
|
||||
basename = os.path.split(filename)[1]
|
||||
|
||||
# Try for a __loader__, if available
|
||||
if module_globals and '__loader__' in module_globals:
|
||||
name = module_globals.get('__name__')
|
||||
loader = module_globals['__loader__']
|
||||
get_source = getattr(loader, 'get_source' ,None)
|
||||
|
||||
if name and get_source:
|
||||
if basename.startswith(name.split('.')[-1]+'.'):
|
||||
try:
|
||||
data = get_source(name)
|
||||
except (ImportError,IOError):
|
||||
pass
|
||||
else:
|
||||
cache[filename] = (
|
||||
len(data), None,
|
||||
[line+'\n' for line in data.splitlines()], fullname
|
||||
)
|
||||
return cache[filename][2]
|
||||
|
||||
# Try looking through the module search path.
|
||||
|
||||
for dirname in sys.path:
|
||||
# When using imputil, sys.path may contain things other than
|
||||
# strings; ignore them when it happens.
|
||||
|
|
|
@ -69,6 +69,8 @@ def makepath(*paths):
|
|||
def abs__file__():
|
||||
"""Set all module' __file__ attribute to an absolute path"""
|
||||
for m in sys.modules.values():
|
||||
if hasattr(m,'__loader__'):
|
||||
continue # don't mess with a PEP 302-supplied __file__
|
||||
try:
|
||||
m.__file__ = os.path.abspath(m.__file__)
|
||||
except AttributeError:
|
||||
|
|
|
@ -12,7 +12,12 @@ from test import test_support
|
|||
from test.test_importhooks import ImportHooksBaseTestCase, test_src, test_co
|
||||
|
||||
import zipimport
|
||||
|
||||
import linecache
|
||||
import doctest
|
||||
import inspect
|
||||
import StringIO
|
||||
from traceback import extract_tb, extract_stack, print_tb
|
||||
raise_src = 'def do_raise(): raise TypeError\n'
|
||||
|
||||
# so we only run testAFakeZlib once if this test is run repeatedly
|
||||
# which happens when we look for ref leaks
|
||||
|
@ -54,7 +59,8 @@ class UncompressedZipImportTestCase(ImportHooksBaseTestCase):
|
|||
|
||||
def setUp(self):
|
||||
# We're reusing the zip archive path, so we must clear the
|
||||
# cached directory info.
|
||||
# cached directory info and linecache
|
||||
linecache.clearcache()
|
||||
zipimport._zip_directory_cache.clear()
|
||||
ImportHooksBaseTestCase.setUp(self)
|
||||
|
||||
|
@ -83,6 +89,11 @@ class UncompressedZipImportTestCase(ImportHooksBaseTestCase):
|
|||
|
||||
mod = __import__(".".join(modules), globals(), locals(),
|
||||
["__dummy__"])
|
||||
|
||||
call = kw.get('call')
|
||||
if call is not None:
|
||||
call(mod)
|
||||
|
||||
if expected_ext:
|
||||
file = mod.get_file()
|
||||
self.assertEquals(file, os.path.join(TEMP_ZIP,
|
||||
|
@ -249,6 +260,74 @@ class UncompressedZipImportTestCase(ImportHooksBaseTestCase):
|
|||
self.doTest(".py", files, TESTMOD,
|
||||
stuff="Some Stuff"*31)
|
||||
|
||||
def assertModuleSource(self, module):
|
||||
self.assertEqual(inspect.getsource(module), test_src)
|
||||
|
||||
def testGetSource(self):
|
||||
files = {TESTMOD + ".py": (NOW, test_src)}
|
||||
self.doTest(".py", files, TESTMOD, call=self.assertModuleSource)
|
||||
|
||||
def testGetCompiledSource(self):
|
||||
pyc = make_pyc(compile(test_src, "<???>", "exec"), NOW)
|
||||
files = {TESTMOD + ".py": (NOW, test_src),
|
||||
TESTMOD + pyc_ext: (NOW, pyc)}
|
||||
self.doTest(pyc_ext, files, TESTMOD, call=self.assertModuleSource)
|
||||
|
||||
def runDoctest(self, callback):
|
||||
files = {TESTMOD + ".py": (NOW, test_src),
|
||||
"xyz.txt": (NOW, ">>> log.append(True)\n")}
|
||||
self.doTest(".py", files, TESTMOD, call=callback)
|
||||
|
||||
def doDoctestFile(self, module):
|
||||
log = []
|
||||
old_master, doctest.master = doctest.master, None
|
||||
try:
|
||||
doctest.testfile(
|
||||
'xyz.txt', package=module, module_relative=True,
|
||||
globs=locals()
|
||||
)
|
||||
finally:
|
||||
doctest.master = old_master
|
||||
self.assertEqual(log,[True])
|
||||
|
||||
def testDoctestFile(self):
|
||||
self.runDoctest(self.doDoctestFile)
|
||||
|
||||
def doDoctestSuite(self, module):
|
||||
log = []
|
||||
doctest.DocFileTest(
|
||||
'xyz.txt', package=module, module_relative=True,
|
||||
globs=locals()
|
||||
).run()
|
||||
self.assertEqual(log,[True])
|
||||
|
||||
def testDoctestSuite(self):
|
||||
self.runDoctest(self.doDoctestSuite)
|
||||
|
||||
|
||||
def doTraceback(self, module):
|
||||
try:
|
||||
module.do_raise()
|
||||
except:
|
||||
tb = sys.exc_info()[2].tb_next
|
||||
|
||||
f,lno,n,line = extract_tb(tb, 1)[0]
|
||||
self.assertEqual(line, raise_src.strip())
|
||||
|
||||
f,lno,n,line = extract_stack(tb.tb_frame, 1)[0]
|
||||
self.assertEqual(line, raise_src.strip())
|
||||
|
||||
s = StringIO.StringIO()
|
||||
print_tb(tb, 1, s)
|
||||
self.failUnless(s.getvalue().endswith(raise_src))
|
||||
else:
|
||||
raise AssertionError("This ought to be impossible")
|
||||
|
||||
def testTraceback(self):
|
||||
files = {TESTMOD + ".py": (NOW, raise_src)}
|
||||
self.doTest(None, files, TESTMOD, call=self.doTraceback)
|
||||
|
||||
|
||||
class CompressedZipImportTestCase(UncompressedZipImportTestCase):
|
||||
compression = ZIP_DEFLATED
|
||||
|
||||
|
|
|
@ -66,7 +66,7 @@ def print_tb(tb, limit=None, file=None):
|
|||
_print(file,
|
||||
' File "%s", line %d, in %s' % (filename,lineno,name))
|
||||
linecache.checkcache(filename)
|
||||
line = linecache.getline(filename, lineno)
|
||||
line = linecache.getline(filename, lineno, f.f_globals)
|
||||
if line: _print(file, ' ' + line.strip())
|
||||
tb = tb.tb_next
|
||||
n = n+1
|
||||
|
@ -98,7 +98,7 @@ def extract_tb(tb, limit = None):
|
|||
filename = co.co_filename
|
||||
name = co.co_name
|
||||
linecache.checkcache(filename)
|
||||
line = linecache.getline(filename, lineno)
|
||||
line = linecache.getline(filename, lineno, f.f_globals)
|
||||
if line: line = line.strip()
|
||||
else: line = None
|
||||
list.append((filename, lineno, name, line))
|
||||
|
@ -281,7 +281,7 @@ def extract_stack(f=None, limit = None):
|
|||
filename = co.co_filename
|
||||
name = co.co_name
|
||||
linecache.checkcache(filename)
|
||||
line = linecache.getline(filename, lineno)
|
||||
line = linecache.getline(filename, lineno, f.f_globals)
|
||||
if line: line = line.strip()
|
||||
else: line = None
|
||||
list.append((filename, lineno, name, line))
|
||||
|
|
|
@ -58,10 +58,11 @@ def warn(message, category=None, stacklevel=1):
|
|||
if not filename:
|
||||
filename = module
|
||||
registry = globals.setdefault("__warningregistry__", {})
|
||||
warn_explicit(message, category, filename, lineno, module, registry)
|
||||
warn_explicit(message, category, filename, lineno, module, registry,
|
||||
globals)
|
||||
|
||||
def warn_explicit(message, category, filename, lineno,
|
||||
module=None, registry=None):
|
||||
module=None, registry=None, module_globals=None):
|
||||
if module is None:
|
||||
module = filename or "<unknown>"
|
||||
if module[-3:].lower() == ".py":
|
||||
|
@ -92,6 +93,11 @@ def warn_explicit(message, category, filename, lineno,
|
|||
if action == "ignore":
|
||||
registry[key] = 1
|
||||
return
|
||||
|
||||
# Prime the linecache for formatting, in case the
|
||||
# "file" is actually in a zipfile or something.
|
||||
linecache.getlines(filename, module_globals)
|
||||
|
||||
if action == "error":
|
||||
raise message
|
||||
# Other actions
|
||||
|
|
|
@ -30,6 +30,10 @@ Extension Modules
|
|||
Library
|
||||
-------
|
||||
|
||||
- The warnings, linecache, inspect, traceback, site, and doctest modules
|
||||
were updated to work correctly with modules imported from zipfiles or
|
||||
via other PEP 302 __loader__ objects.
|
||||
|
||||
- Patch #1467770: Reduce usage of subprocess._active to processes which
|
||||
the application hasn't waited on.
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue