mirror of
https://github.com/python/cpython.git
synced 2025-09-27 02:39:58 +00:00
Merged revisions 78982,78986 via svnmerge from
svn+ssh://pythondev@svn.python.org/python/trunk ........ r78982 | florent.xicluna | 2010-03-15 15:00:58 +0100 (lun, 15 mar 2010) | 2 lines Remove py3k deprecation warnings from these Unicode tools. ........ r78986 | florent.xicluna | 2010-03-15 19:08:58 +0100 (lun, 15 mar 2010) | 3 lines Issue #7783 and #7787: open_urlresource invalidates the outdated files from the local cache. Use this feature to fix test_normalization. ........
This commit is contained in:
parent
faa663f03d
commit
f089fd67fc
5 changed files with 40 additions and 20 deletions
|
@ -33,6 +33,7 @@ __all__ = ["Error", "TestFailed", "ResourceDenied", "import_module",
|
||||||
"reap_children", "cpython_only", "check_impl_detail", "get_attribute",
|
"reap_children", "cpython_only", "check_impl_detail", "get_attribute",
|
||||||
"swap_item", "swap_attr"]
|
"swap_item", "swap_attr"]
|
||||||
|
|
||||||
|
|
||||||
class Error(Exception):
|
class Error(Exception):
|
||||||
"""Base class for regression test exceptions."""
|
"""Base class for regression test exceptions."""
|
||||||
|
|
||||||
|
@ -444,12 +445,29 @@ def check_syntax_error(testcase, statement):
|
||||||
def open_urlresource(url, *args, **kw):
|
def open_urlresource(url, *args, **kw):
|
||||||
import urllib.request, urllib.parse
|
import urllib.request, urllib.parse
|
||||||
|
|
||||||
requires('urlfetch')
|
check = kw.pop('check', None)
|
||||||
|
|
||||||
filename = urllib.parse.urlparse(url)[2].split('/')[-1] # '/': it's URL!
|
filename = urllib.parse.urlparse(url)[2].split('/')[-1] # '/': it's URL!
|
||||||
|
|
||||||
fn = os.path.join(os.path.dirname(__file__), "data", filename)
|
fn = os.path.join(os.path.dirname(__file__), "data", filename)
|
||||||
|
|
||||||
|
def check_valid_file(fn):
|
||||||
|
f = open(fn, *args, **kw)
|
||||||
|
if check is None:
|
||||||
|
return f
|
||||||
|
elif check(f):
|
||||||
|
f.seek(0)
|
||||||
|
return f
|
||||||
|
f.close()
|
||||||
|
|
||||||
if os.path.exists(fn):
|
if os.path.exists(fn):
|
||||||
return open(fn, *args, **kw)
|
f = check_valid_file(fn)
|
||||||
|
if f is not None:
|
||||||
|
return f
|
||||||
|
unlink(fn)
|
||||||
|
|
||||||
|
# Verify the requirement before downloading the file
|
||||||
|
requires('urlfetch')
|
||||||
|
|
||||||
print('\tfetching %s ...' % url, file=get_original_stdout())
|
print('\tfetching %s ...' % url, file=get_original_stdout())
|
||||||
f = urllib.request.urlopen(url, timeout=15)
|
f = urllib.request.urlopen(url, timeout=15)
|
||||||
|
@ -461,7 +479,12 @@ def open_urlresource(url, *args, **kw):
|
||||||
s = f.read()
|
s = f.read()
|
||||||
finally:
|
finally:
|
||||||
f.close()
|
f.close()
|
||||||
return open(fn, *args, **kw)
|
|
||||||
|
f = check_valid_file(fn)
|
||||||
|
if f is not None:
|
||||||
|
return f
|
||||||
|
raise TestFailed('invalid resource "%s"' % fn)
|
||||||
|
|
||||||
|
|
||||||
class WarningsRecorder(object):
|
class WarningsRecorder(object):
|
||||||
"""Convenience wrapper for the warnings list returned on
|
"""Convenience wrapper for the warnings list returned on
|
||||||
|
|
|
@ -9,14 +9,9 @@ from unicodedata import normalize, unidata_version
|
||||||
TESTDATAFILE = "NormalizationTest.txt"
|
TESTDATAFILE = "NormalizationTest.txt"
|
||||||
TESTDATAURL = "http://www.unicode.org/Public/" + unidata_version + "/ucd/" + TESTDATAFILE
|
TESTDATAURL = "http://www.unicode.org/Public/" + unidata_version + "/ucd/" + TESTDATAFILE
|
||||||
|
|
||||||
# Verify we have the correct version of the test data file.
|
def check_version(testfile):
|
||||||
TESTDATAPATH = os.path.join(os.path.dirname(__file__), "data", TESTDATAFILE)
|
hdr = testfile.readline()
|
||||||
if os.path.exists(TESTDATAPATH):
|
return unidata_version in hdr
|
||||||
f = open(TESTDATAPATH, encoding='utf-8')
|
|
||||||
l = f.readline()
|
|
||||||
f.close()
|
|
||||||
if not unidata_version in l:
|
|
||||||
os.unlink(testdatafile)
|
|
||||||
|
|
||||||
class RangeError(Exception):
|
class RangeError(Exception):
|
||||||
pass
|
pass
|
||||||
|
@ -42,13 +37,15 @@ def unistr(data):
|
||||||
|
|
||||||
class NormalizationTest(unittest.TestCase):
|
class NormalizationTest(unittest.TestCase):
|
||||||
def test_main(self):
|
def test_main(self):
|
||||||
|
part = None
|
||||||
part1_data = {}
|
part1_data = {}
|
||||||
# Hit the exception early
|
# Hit the exception early
|
||||||
try:
|
try:
|
||||||
open_urlresource(TESTDATAURL, encoding="utf-8")
|
testdata = open_urlresource(TESTDATAURL, encoding="utf-8",
|
||||||
|
check=check_version)
|
||||||
except (IOError, HTTPException):
|
except (IOError, HTTPException):
|
||||||
self.skipTest("Could not retrieve " + TESTDATAURL)
|
self.skipTest("Could not retrieve " + TESTDATAURL)
|
||||||
for line in open_urlresource(TESTDATAURL, encoding="utf-8"):
|
for line in testdata:
|
||||||
if '#' in line:
|
if '#' in line:
|
||||||
line = line.split('#')[0]
|
line = line.split('#')[0]
|
||||||
line = line.strip()
|
line = line.strip()
|
||||||
|
|
|
@ -910,6 +910,9 @@ Documentation
|
||||||
Tests
|
Tests
|
||||||
-----
|
-----
|
||||||
|
|
||||||
|
- Issue #7783: test.test_support.open_urlresource invalidates the outdated
|
||||||
|
files from the local cache.
|
||||||
|
|
||||||
- Issue #7849: Now the utility ``check_warnings`` verifies if the warnings are
|
- Issue #7849: Now the utility ``check_warnings`` verifies if the warnings are
|
||||||
effectively raised.
|
effectively raised.
|
||||||
|
|
||||||
|
|
|
@ -40,8 +40,7 @@ mapRE = re.compile('((?:0x[0-9a-fA-F]+\+?)+)'
|
||||||
'\s*'
|
'\s*'
|
||||||
'(#.+)?')
|
'(#.+)?')
|
||||||
|
|
||||||
def parsecodes(codes,
|
def parsecodes(codes, len=len, range=range):
|
||||||
len=len, filter=filter,range=range):
|
|
||||||
|
|
||||||
""" Converts code combinations to either a single code integer
|
""" Converts code combinations to either a single code integer
|
||||||
or a tuple of integers.
|
or a tuple of integers.
|
||||||
|
|
|
@ -517,8 +517,7 @@ def makeunicodetype(unicode, trace):
|
||||||
|
|
||||||
haswide = False
|
haswide = False
|
||||||
hasnonewide = False
|
hasnonewide = False
|
||||||
spaces.sort()
|
for codepoint in sorted(spaces):
|
||||||
for codepoint in spaces:
|
|
||||||
if codepoint < 0x10000:
|
if codepoint < 0x10000:
|
||||||
hasnonewide = True
|
hasnonewide = True
|
||||||
if codepoint >= 0x10000 and not haswide:
|
if codepoint >= 0x10000 and not haswide:
|
||||||
|
@ -546,8 +545,7 @@ def makeunicodetype(unicode, trace):
|
||||||
print(' switch (ch) {', file=fp)
|
print(' switch (ch) {', file=fp)
|
||||||
haswide = False
|
haswide = False
|
||||||
hasnonewide = False
|
hasnonewide = False
|
||||||
linebreaks.sort()
|
for codepoint in sorted(linebreaks):
|
||||||
for codepoint in linebreaks:
|
|
||||||
if codepoint < 0x10000:
|
if codepoint < 0x10000:
|
||||||
hasnonewide = True
|
hasnonewide = True
|
||||||
if codepoint >= 0x10000 and not haswide:
|
if codepoint >= 0x10000 and not haswide:
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue