mirror of
https://github.com/python/cpython.git
synced 2025-07-24 03:35:53 +00:00
Whitespace normalization.
This commit is contained in:
parent
15c1fe5047
commit
f733abb783
21 changed files with 128 additions and 128 deletions
|
@ -107,7 +107,7 @@ class CGIHTTPRequestHandler(SimpleHTTPServer.SimpleHTTPRequestHandler):
|
|||
"""Execute a CGI script."""
|
||||
path = self.path
|
||||
dir, rest = self.cgi_info
|
||||
|
||||
|
||||
i = path.find('/', len(dir) + 1)
|
||||
while i >= 0:
|
||||
nextdir = path[:i]
|
||||
|
|
|
@ -1318,26 +1318,26 @@ class CookieJar:
|
|||
self._cookies_lock.acquire()
|
||||
try:
|
||||
|
||||
self._policy._now = self._now = int(time.time())
|
||||
self._policy._now = self._now = int(time.time())
|
||||
|
||||
cookies = self._cookies_for_request(request)
|
||||
cookies = self._cookies_for_request(request)
|
||||
|
||||
attrs = self._cookie_attrs(cookies)
|
||||
if attrs:
|
||||
if not request.has_header("Cookie"):
|
||||
request.add_unredirected_header(
|
||||
"Cookie", "; ".join(attrs))
|
||||
attrs = self._cookie_attrs(cookies)
|
||||
if attrs:
|
||||
if not request.has_header("Cookie"):
|
||||
request.add_unredirected_header(
|
||||
"Cookie", "; ".join(attrs))
|
||||
|
||||
# if necessary, advertise that we know RFC 2965
|
||||
if (self._policy.rfc2965 and not self._policy.hide_cookie2 and
|
||||
not request.has_header("Cookie2")):
|
||||
for cookie in cookies:
|
||||
if cookie.version != 1:
|
||||
request.add_unredirected_header("Cookie2", '$Version="1"')
|
||||
break
|
||||
|
||||
# if necessary, advertise that we know RFC 2965
|
||||
if (self._policy.rfc2965 and not self._policy.hide_cookie2 and
|
||||
not request.has_header("Cookie2")):
|
||||
for cookie in cookies:
|
||||
if cookie.version != 1:
|
||||
request.add_unredirected_header("Cookie2", '$Version="1"')
|
||||
break
|
||||
|
||||
finally:
|
||||
self._cookies_lock.release()
|
||||
self._cookies_lock.release()
|
||||
|
||||
self.clear_expired_cookies()
|
||||
|
||||
|
@ -1609,7 +1609,7 @@ class CookieJar:
|
|||
|
||||
if self._policy.set_ok(cookie, request):
|
||||
self.set_cookie(cookie)
|
||||
|
||||
|
||||
|
||||
finally:
|
||||
self._cookies_lock.release()
|
||||
|
@ -1632,14 +1632,14 @@ class CookieJar:
|
|||
_debug("extract_cookies: %s", response.info())
|
||||
self._cookies_lock.acquire()
|
||||
try:
|
||||
self._policy._now = self._now = int(time.time())
|
||||
self._policy._now = self._now = int(time.time())
|
||||
|
||||
for cookie in self.make_cookies(response, request):
|
||||
if self._policy.set_ok(cookie, request):
|
||||
_debug(" setting cookie: %s", cookie)
|
||||
self.set_cookie(cookie)
|
||||
for cookie in self.make_cookies(response, request):
|
||||
if self._policy.set_ok(cookie, request):
|
||||
_debug(" setting cookie: %s", cookie)
|
||||
self.set_cookie(cookie)
|
||||
finally:
|
||||
self._cookies_lock.release()
|
||||
self._cookies_lock.release()
|
||||
|
||||
def clear(self, domain=None, path=None, name=None):
|
||||
"""Clear some cookies.
|
||||
|
@ -1677,11 +1677,11 @@ class CookieJar:
|
|||
"""
|
||||
self._cookies_lock.acquire()
|
||||
try:
|
||||
for cookie in self:
|
||||
if cookie.discard:
|
||||
self.clear(cookie.domain, cookie.path, cookie.name)
|
||||
for cookie in self:
|
||||
if cookie.discard:
|
||||
self.clear(cookie.domain, cookie.path, cookie.name)
|
||||
finally:
|
||||
self._cookies_lock.release()
|
||||
self._cookies_lock.release()
|
||||
|
||||
def clear_expired_cookies(self):
|
||||
"""Discard all expired cookies.
|
||||
|
@ -1695,12 +1695,12 @@ class CookieJar:
|
|||
"""
|
||||
self._cookies_lock.acquire()
|
||||
try:
|
||||
now = time.time()
|
||||
for cookie in self:
|
||||
if cookie.is_expired(now):
|
||||
self.clear(cookie.domain, cookie.path, cookie.name)
|
||||
now = time.time()
|
||||
for cookie in self:
|
||||
if cookie.is_expired(now):
|
||||
self.clear(cookie.domain, cookie.path, cookie.name)
|
||||
finally:
|
||||
self._cookies_lock.release()
|
||||
self._cookies_lock.release()
|
||||
|
||||
def __iter__(self):
|
||||
return deepvalues(self._cookies)
|
||||
|
@ -1774,16 +1774,16 @@ class FileCookieJar(CookieJar):
|
|||
self._cookies_lock.acquire()
|
||||
try:
|
||||
|
||||
old_state = copy.deepcopy(self._cookies)
|
||||
self._cookies = {}
|
||||
try:
|
||||
self.load(filename, ignore_discard, ignore_expires)
|
||||
except (LoadError, IOError):
|
||||
self._cookies = old_state
|
||||
raise
|
||||
old_state = copy.deepcopy(self._cookies)
|
||||
self._cookies = {}
|
||||
try:
|
||||
self.load(filename, ignore_discard, ignore_expires)
|
||||
except (LoadError, IOError):
|
||||
self._cookies = old_state
|
||||
raise
|
||||
|
||||
finally:
|
||||
self._cookies_lock.release()
|
||||
self._cookies_lock.release()
|
||||
|
||||
from _LWPCookieJar import LWPCookieJar, lwp_cookie_str
|
||||
from _MozillaCookieJar import MozillaCookieJar
|
||||
|
|
|
@ -246,5 +246,5 @@ def open(file, flag=None, mode=0666):
|
|||
else:
|
||||
# Turn off any bits that are set in the umask
|
||||
mode = mode & (~um)
|
||||
|
||||
|
||||
return _Database(file, mode)
|
||||
|
|
|
@ -332,7 +332,7 @@ class FTP:
|
|||
# 1xx or error messages for LIST), so we just discard
|
||||
# this response.
|
||||
if resp[0] == '2':
|
||||
resp = self.getresp()
|
||||
resp = self.getresp()
|
||||
if resp[0] != '1':
|
||||
raise error_reply, resp
|
||||
else:
|
||||
|
@ -342,7 +342,7 @@ class FTP:
|
|||
resp = self.sendcmd(cmd)
|
||||
# See above.
|
||||
if resp[0] == '2':
|
||||
resp = self.getresp()
|
||||
resp = self.getresp()
|
||||
if resp[0] != '1':
|
||||
raise error_reply, resp
|
||||
conn, sockaddr = sock.accept()
|
||||
|
|
|
@ -899,7 +899,7 @@ class HTTPConnection:
|
|||
except (AttributeError, OSError):
|
||||
# Don't send a length if this failed
|
||||
if self.debuglevel > 0: print "Cannot stat!!"
|
||||
|
||||
|
||||
if thelen is not None:
|
||||
self.putheader('Content-Length',thelen)
|
||||
for hdr, value in headers.iteritems():
|
||||
|
|
|
@ -71,7 +71,7 @@ class CodeContext:
|
|||
#
|
||||
# To avoid possible errors, all references to the inner workings
|
||||
# of Tk are executed inside try/except blocks.
|
||||
|
||||
|
||||
widgets_for_width_calc = self.editwin.text, self.editwin.text_frame
|
||||
|
||||
# calculate the required vertical padding
|
||||
|
@ -113,7 +113,7 @@ class CodeContext:
|
|||
# above it.
|
||||
self.label.pack(side="top", fill="x", expand=False,
|
||||
before=self.editwin.text_frame)
|
||||
|
||||
|
||||
else:
|
||||
self.label.destroy()
|
||||
self.label = None
|
||||
|
|
|
@ -50,9 +50,9 @@ class Dialog(Toplevel):
|
|||
# If the master is not viewable, don't
|
||||
# make the child transient, or else it
|
||||
# would be opened withdrawn
|
||||
if parent.winfo_viewable():
|
||||
if parent.winfo_viewable():
|
||||
self.transient(parent)
|
||||
|
||||
|
||||
if title:
|
||||
self.title(title)
|
||||
|
||||
|
|
|
@ -572,7 +572,7 @@ class _singlefileMailbox(Mailbox):
|
|||
# already have been generated (and presumably has been modified
|
||||
# by adding or deleting an item).
|
||||
assert self._toc is not None
|
||||
|
||||
|
||||
# Check length of self._file; if it's changed, some other process
|
||||
# has modified the mailbox since we scanned it.
|
||||
self._file.seek(0, 2)
|
||||
|
@ -581,7 +581,7 @@ class _singlefileMailbox(Mailbox):
|
|||
raise ExternalClashError('Size of mailbox file changed '
|
||||
'(expected %i, found %i)' %
|
||||
(self._file_length, cur_len))
|
||||
|
||||
|
||||
new_file = _create_temporary(self._path)
|
||||
try:
|
||||
new_toc = {}
|
||||
|
@ -1222,7 +1222,7 @@ class Babyl(_singlefileMailbox):
|
|||
self._next_key = len(self._toc)
|
||||
self._file.seek(0, 2)
|
||||
self._file_length = self._file.tell()
|
||||
|
||||
|
||||
def _pre_mailbox_hook(self, f):
|
||||
"""Called before writing the mailbox to file f."""
|
||||
f.write('BABYL OPTIONS:%sVersion: 5%sLabels:%s%s\037' %
|
||||
|
|
|
@ -236,7 +236,7 @@ _release_version = re.compile(r'([^0-9]+)'
|
|||
'[^(]*(?:\((.+)\))?')
|
||||
|
||||
# See also http://www.novell.com/coolsolutions/feature/11251.html
|
||||
# and http://linuxmafia.com/faq/Admin/release-files.html
|
||||
# and http://linuxmafia.com/faq/Admin/release-files.html
|
||||
# and http://data.linux-ntfs.org/rpm/whichrpm
|
||||
# and http://www.die.net/doc/linux/man/man1/lsb_release.1.html
|
||||
|
||||
|
@ -245,7 +245,7 @@ _supported_dists = ('SuSE', 'debian', 'fedora', 'redhat', 'centos',
|
|||
'gentoo', 'UnitedLinux')
|
||||
|
||||
def _parse_release_file(firstline):
|
||||
|
||||
|
||||
# Parse the first line
|
||||
m = _lsb_release_version.match(firstline)
|
||||
if m is not None:
|
||||
|
@ -268,7 +268,7 @@ def _parse_release_file(firstline):
|
|||
return '', version, id
|
||||
|
||||
def _test_parse_release_file():
|
||||
|
||||
|
||||
for input, output in (
|
||||
# Examples of release file contents:
|
||||
('SuSE Linux 9.3 (x86-64)', ('SuSE Linux ', '9.3', 'x86-64'))
|
||||
|
@ -324,7 +324,7 @@ def linux_distribution(distname='', version='', id='',
|
|||
break
|
||||
else:
|
||||
return _dist_try_harder(distname,version,id)
|
||||
|
||||
|
||||
# Read the first line
|
||||
f = open('/etc/'+file, 'r')
|
||||
firstline = f.readline()
|
||||
|
@ -340,7 +340,7 @@ def linux_distribution(distname='', version='', id='',
|
|||
return distname, version, id
|
||||
|
||||
# To maintain backwards compatibility:
|
||||
|
||||
|
||||
def dist(distname='',version='',id='',
|
||||
|
||||
supported_dists=_supported_dists):
|
||||
|
@ -1358,7 +1358,7 @@ def python_branch():
|
|||
If not available, an empty string is returned.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
return _sys_version()[2]
|
||||
|
||||
def python_revision():
|
||||
|
|
|
@ -123,7 +123,7 @@ def fork():
|
|||
os.close(tmp_fd)
|
||||
else:
|
||||
os.close(slave_fd)
|
||||
|
||||
|
||||
# Parent and child process.
|
||||
return pid, master_fd
|
||||
|
||||
|
|
|
@ -1120,7 +1120,7 @@ class Popen(object):
|
|||
# we can write up to PIPE_BUF bytes without risk
|
||||
# blocking. POSIX defines PIPE_BUF >= 512
|
||||
bytes_written = os.write(self.stdin.fileno(), buffer(input, input_offset, 512))
|
||||
input_offset += bytes_written
|
||||
input_offset += bytes_written
|
||||
if input_offset >= len(input):
|
||||
self.stdin.close()
|
||||
write_set.remove(self.stdin)
|
||||
|
|
|
@ -15,7 +15,7 @@ class SortedDict(UserDict.UserDict):
|
|||
result = self.data.keys()
|
||||
result.sort()
|
||||
return result
|
||||
|
||||
|
||||
def values(self):
|
||||
result = self.items()
|
||||
return [i[1] for i in values]
|
||||
|
@ -446,12 +446,12 @@ class SortedTestCase(RawConfigParserTestCase):
|
|||
"o2=3\n"
|
||||
"o1=4\n"
|
||||
"[a]\n"
|
||||
"k=v\n")
|
||||
"k=v\n")
|
||||
output = StringIO.StringIO()
|
||||
self.cf.write(output)
|
||||
self.assertEquals(output.getvalue(),
|
||||
"[a]\n"
|
||||
"k = v\n\n"
|
||||
"k = v\n\n"
|
||||
"[b]\n"
|
||||
"o1 = 4\n"
|
||||
"o2 = 3\n"
|
||||
|
|
|
@ -49,7 +49,7 @@ class DumbDBMTestCase(unittest.TestCase):
|
|||
f.close()
|
||||
finally:
|
||||
os.umask(old_umask)
|
||||
|
||||
|
||||
expected_mode = 0635
|
||||
if os.name != 'posix':
|
||||
# Windows only supports setting the read-only attribute.
|
||||
|
@ -61,7 +61,7 @@ class DumbDBMTestCase(unittest.TestCase):
|
|||
self.assertEqual(stat.S_IMODE(st.st_mode), expected_mode)
|
||||
st = os.stat(_fname + '.dir')
|
||||
self.assertEqual(stat.S_IMODE(st.st_mode), expected_mode)
|
||||
|
||||
|
||||
def test_close_twice(self):
|
||||
f = dumbdbm.open(_fname)
|
||||
f['a'] = 'b'
|
||||
|
|
|
@ -138,7 +138,7 @@ class TestGzip(unittest.TestCase):
|
|||
y = f.read(10)
|
||||
f.close()
|
||||
self.assertEquals(y, data1[20:30])
|
||||
|
||||
|
||||
def test_seek_write(self):
|
||||
# Try seek, write test
|
||||
f = gzip.GzipFile(self.filename, 'w')
|
||||
|
|
|
@ -681,11 +681,11 @@ class TestMaildir(TestMailbox):
|
|||
box = self._factory(self._path, factory=dummy_factory)
|
||||
folder = box.add_folder('folder1')
|
||||
self.assert_(folder._factory is dummy_factory)
|
||||
|
||||
|
||||
folder1_alias = box.get_folder('folder1')
|
||||
self.assert_(folder1_alias._factory is dummy_factory)
|
||||
|
||||
|
||||
|
||||
|
||||
class _TestMboxMMDF(TestMailbox):
|
||||
|
||||
|
@ -805,7 +805,7 @@ class TestMH(TestMailbox):
|
|||
def dummy_factory (s):
|
||||
return None
|
||||
self._box = self._factory(self._path, dummy_factory)
|
||||
|
||||
|
||||
new_folder = self._box.add_folder('foo.bar')
|
||||
folder0 = self._box.get_folder('foo.bar')
|
||||
folder0.add(self._template % 'bar')
|
||||
|
@ -901,7 +901,7 @@ class TestMH(TestMailbox):
|
|||
self.assert_(self._box.get_sequences() ==
|
||||
{'foo':[1, 2, 3, 4, 5],
|
||||
'unseen':[1], 'bar':[3], 'replied':[3]})
|
||||
|
||||
|
||||
def _get_lock_path(self):
|
||||
return os.path.join(self._path, '.mh_sequences.lock')
|
||||
|
||||
|
|
|
@ -116,7 +116,7 @@ class MboxTestCase(unittest.TestCase):
|
|||
|
||||
def tearDown(self):
|
||||
os.unlink(self._path)
|
||||
|
||||
|
||||
def test_from_regex (self):
|
||||
# Testing new regex from bug #1633678
|
||||
f = open(self._path, 'w')
|
||||
|
|
|
@ -120,7 +120,7 @@ else:
|
|||
##if False and lines != ['In child, calling os.setsid()',
|
||||
## 'Good: OSError was raised.', '']:
|
||||
## raise TestFailed("Unexpected output from child: %r" % line)
|
||||
|
||||
|
||||
(pid, status) = os.waitpid(pid, 0)
|
||||
res = status >> 8
|
||||
debug("Child (%d) exited with status %d (%d)."%(pid, res, status))
|
||||
|
@ -140,8 +140,8 @@ else:
|
|||
## pass
|
||||
##else:
|
||||
## raise TestFailed("Read from master_fd did not raise exception")
|
||||
|
||||
|
||||
|
||||
|
||||
os.close(master_fd)
|
||||
|
||||
# pty.fork() passed.
|
||||
|
|
|
@ -15,7 +15,7 @@ class ResourceTest(unittest.TestCase):
|
|||
self.assertRaises(TypeError, resource.setrlimit, 42, 42, 42)
|
||||
|
||||
def test_fsize_ismax(self):
|
||||
|
||||
|
||||
try:
|
||||
(cur, max) = resource.getrlimit(resource.RLIMIT_FSIZE)
|
||||
except AttributeError:
|
||||
|
@ -39,7 +39,7 @@ class ResourceTest(unittest.TestCase):
|
|||
# versions of Python were terminated by an uncaught SIGXFSZ, but
|
||||
# pythonrun.c has been fixed to ignore that exception. If so, the
|
||||
# write() should return EFBIG when the limit is exceeded.
|
||||
|
||||
|
||||
# At least one platform has an unlimited RLIMIT_FSIZE and attempts
|
||||
# to change it raise ValueError instead.
|
||||
try:
|
||||
|
|
|
@ -473,7 +473,7 @@ class SetSubclassWithKeywordArgs(set):
|
|||
set.__init__(self, iterable)
|
||||
|
||||
class TestSetSubclassWithKeywordArgs(TestSet):
|
||||
|
||||
|
||||
def test_keywords_in_subclass(self):
|
||||
'SF bug #1486663 -- this used to erroneously raise a TypeError'
|
||||
SetSubclassWithKeywordArgs(newarg=1)
|
||||
|
@ -1460,7 +1460,7 @@ def test_main(verbose=None):
|
|||
test_classes = (
|
||||
TestSet,
|
||||
TestSetSubclass,
|
||||
TestSetSubclassWithKeywordArgs,
|
||||
TestSetSubclassWithKeywordArgs,
|
||||
TestFrozenSet,
|
||||
TestFrozenSetSubclass,
|
||||
TestSetOfSets,
|
||||
|
|
|
@ -119,7 +119,7 @@ for prefix in ('', '@', '<', '>', '=', '!'):
|
|||
cp, bp, hp, ip, lp, fp, dp, tp = struct.unpack(format, s)
|
||||
if (cp != c or bp != b or hp != h or ip != i or lp != l or
|
||||
int(100 * fp) != int(100 * f) or int(100 * dp) != int(100 * d) or
|
||||
tp != t):
|
||||
tp != t):
|
||||
# ^^^ calculate only to two decimal places
|
||||
raise TestFailed, "unpack/pack not transitive (%s, %s)" % (
|
||||
str(format), str((cp, bp, hp, ip, lp, fp, dp, tp)))
|
||||
|
@ -160,11 +160,11 @@ tests = [
|
|||
('f', -2.0, '\300\000\000\000', '\000\000\000\300', 0),
|
||||
('d', -2.0, '\300\000\000\000\000\000\000\000',
|
||||
'\000\000\000\000\000\000\000\300', 0),
|
||||
('t', 0, '\0', '\0', 0),
|
||||
('t', 3, '\1', '\1', 1),
|
||||
('t', True, '\1', '\1', 0),
|
||||
('t', [], '\0', '\0', 1),
|
||||
('t', (1,), '\1', '\1', 1),
|
||||
('t', 0, '\0', '\0', 0),
|
||||
('t', 3, '\1', '\1', 1),
|
||||
('t', True, '\1', '\1', 0),
|
||||
('t', [], '\0', '\0', 1),
|
||||
('t', (1,), '\1', '\1', 1),
|
||||
]
|
||||
|
||||
for fmt, arg, big, lil, asy in tests:
|
||||
|
@ -621,48 +621,48 @@ test_pack_into()
|
|||
test_pack_into_fn()
|
||||
|
||||
def test_bool():
|
||||
for prefix in tuple("<>!=")+('',):
|
||||
false = (), [], [], '', 0
|
||||
true = [1], 'test', 5, -1, 0xffffffffL+1, 0xffffffff/2
|
||||
|
||||
falseFormat = prefix + 't' * len(false)
|
||||
if verbose:
|
||||
print 'trying bool pack/unpack on', false, 'using format', falseFormat
|
||||
packedFalse = struct.pack(falseFormat, *false)
|
||||
unpackedFalse = struct.unpack(falseFormat, packedFalse)
|
||||
|
||||
trueFormat = prefix + 't' * len(true)
|
||||
if verbose:
|
||||
print 'trying bool pack/unpack on', true, 'using format', trueFormat
|
||||
packedTrue = struct.pack(trueFormat, *true)
|
||||
unpackedTrue = struct.unpack(trueFormat, packedTrue)
|
||||
|
||||
if len(true) != len(unpackedTrue):
|
||||
raise TestFailed('unpacked true array is not of same size as input')
|
||||
if len(false) != len(unpackedFalse):
|
||||
raise TestFailed('unpacked false array is not of same size as input')
|
||||
|
||||
for t in unpackedFalse:
|
||||
if t is not False:
|
||||
raise TestFailed('%r did not unpack as False' % t)
|
||||
for t in unpackedTrue:
|
||||
if t is not True:
|
||||
raise TestFailed('%r did not unpack as false' % t)
|
||||
|
||||
if prefix and verbose:
|
||||
print 'trying size of bool with format %r' % (prefix+'t')
|
||||
packed = struct.pack(prefix+'t', 1)
|
||||
|
||||
if len(packed) != struct.calcsize(prefix+'t'):
|
||||
raise TestFailed('packed length is not equal to calculated size')
|
||||
|
||||
if len(packed) != 1 and prefix:
|
||||
raise TestFailed('encoded bool is not one byte: %r' % packed)
|
||||
elif not prefix and verbose:
|
||||
print 'size of bool in native format is %i' % (len(packed))
|
||||
|
||||
for c in '\x01\x7f\xff\x0f\xf0':
|
||||
if struct.unpack('>t', c)[0] is not True:
|
||||
raise TestFailed('%c did not unpack as True' % c)
|
||||
for prefix in tuple("<>!=")+('',):
|
||||
false = (), [], [], '', 0
|
||||
true = [1], 'test', 5, -1, 0xffffffffL+1, 0xffffffff/2
|
||||
|
||||
falseFormat = prefix + 't' * len(false)
|
||||
if verbose:
|
||||
print 'trying bool pack/unpack on', false, 'using format', falseFormat
|
||||
packedFalse = struct.pack(falseFormat, *false)
|
||||
unpackedFalse = struct.unpack(falseFormat, packedFalse)
|
||||
|
||||
trueFormat = prefix + 't' * len(true)
|
||||
if verbose:
|
||||
print 'trying bool pack/unpack on', true, 'using format', trueFormat
|
||||
packedTrue = struct.pack(trueFormat, *true)
|
||||
unpackedTrue = struct.unpack(trueFormat, packedTrue)
|
||||
|
||||
if len(true) != len(unpackedTrue):
|
||||
raise TestFailed('unpacked true array is not of same size as input')
|
||||
if len(false) != len(unpackedFalse):
|
||||
raise TestFailed('unpacked false array is not of same size as input')
|
||||
|
||||
for t in unpackedFalse:
|
||||
if t is not False:
|
||||
raise TestFailed('%r did not unpack as False' % t)
|
||||
for t in unpackedTrue:
|
||||
if t is not True:
|
||||
raise TestFailed('%r did not unpack as false' % t)
|
||||
|
||||
if prefix and verbose:
|
||||
print 'trying size of bool with format %r' % (prefix+'t')
|
||||
packed = struct.pack(prefix+'t', 1)
|
||||
|
||||
if len(packed) != struct.calcsize(prefix+'t'):
|
||||
raise TestFailed('packed length is not equal to calculated size')
|
||||
|
||||
if len(packed) != 1 and prefix:
|
||||
raise TestFailed('encoded bool is not one byte: %r' % packed)
|
||||
elif not prefix and verbose:
|
||||
print 'size of bool in native format is %i' % (len(packed))
|
||||
|
||||
for c in '\x01\x7f\xff\x0f\xf0':
|
||||
if struct.unpack('>t', c)[0] is not True:
|
||||
raise TestFailed('%c did not unpack as True' % c)
|
||||
|
||||
test_bool()
|
||||
|
|
|
@ -270,7 +270,7 @@ def open_urlresource(url):
|
|||
print >> get_original_stdout(), '\tfetching %s ...' % url
|
||||
fn, _ = urllib.urlretrieve(url, filename)
|
||||
return open(fn)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def guard_warnings_filter():
|
||||
"""Guard the warnings filter from being permanently changed."""
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue