mirror of
https://github.com/python/cpython.git
synced 2025-09-14 04:37:29 +00:00
Whitespace normalization.
This commit is contained in:
parent
15c1fe5047
commit
f733abb783
21 changed files with 128 additions and 128 deletions
|
@ -107,7 +107,7 @@ class CGIHTTPRequestHandler(SimpleHTTPServer.SimpleHTTPRequestHandler):
|
||||||
"""Execute a CGI script."""
|
"""Execute a CGI script."""
|
||||||
path = self.path
|
path = self.path
|
||||||
dir, rest = self.cgi_info
|
dir, rest = self.cgi_info
|
||||||
|
|
||||||
i = path.find('/', len(dir) + 1)
|
i = path.find('/', len(dir) + 1)
|
||||||
while i >= 0:
|
while i >= 0:
|
||||||
nextdir = path[:i]
|
nextdir = path[:i]
|
||||||
|
|
|
@ -1318,26 +1318,26 @@ class CookieJar:
|
||||||
self._cookies_lock.acquire()
|
self._cookies_lock.acquire()
|
||||||
try:
|
try:
|
||||||
|
|
||||||
self._policy._now = self._now = int(time.time())
|
self._policy._now = self._now = int(time.time())
|
||||||
|
|
||||||
cookies = self._cookies_for_request(request)
|
cookies = self._cookies_for_request(request)
|
||||||
|
|
||||||
attrs = self._cookie_attrs(cookies)
|
attrs = self._cookie_attrs(cookies)
|
||||||
if attrs:
|
if attrs:
|
||||||
if not request.has_header("Cookie"):
|
if not request.has_header("Cookie"):
|
||||||
request.add_unredirected_header(
|
request.add_unredirected_header(
|
||||||
"Cookie", "; ".join(attrs))
|
"Cookie", "; ".join(attrs))
|
||||||
|
|
||||||
|
# if necessary, advertise that we know RFC 2965
|
||||||
|
if (self._policy.rfc2965 and not self._policy.hide_cookie2 and
|
||||||
|
not request.has_header("Cookie2")):
|
||||||
|
for cookie in cookies:
|
||||||
|
if cookie.version != 1:
|
||||||
|
request.add_unredirected_header("Cookie2", '$Version="1"')
|
||||||
|
break
|
||||||
|
|
||||||
# if necessary, advertise that we know RFC 2965
|
|
||||||
if (self._policy.rfc2965 and not self._policy.hide_cookie2 and
|
|
||||||
not request.has_header("Cookie2")):
|
|
||||||
for cookie in cookies:
|
|
||||||
if cookie.version != 1:
|
|
||||||
request.add_unredirected_header("Cookie2", '$Version="1"')
|
|
||||||
break
|
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
self._cookies_lock.release()
|
self._cookies_lock.release()
|
||||||
|
|
||||||
self.clear_expired_cookies()
|
self.clear_expired_cookies()
|
||||||
|
|
||||||
|
@ -1609,7 +1609,7 @@ class CookieJar:
|
||||||
|
|
||||||
if self._policy.set_ok(cookie, request):
|
if self._policy.set_ok(cookie, request):
|
||||||
self.set_cookie(cookie)
|
self.set_cookie(cookie)
|
||||||
|
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
self._cookies_lock.release()
|
self._cookies_lock.release()
|
||||||
|
@ -1632,14 +1632,14 @@ class CookieJar:
|
||||||
_debug("extract_cookies: %s", response.info())
|
_debug("extract_cookies: %s", response.info())
|
||||||
self._cookies_lock.acquire()
|
self._cookies_lock.acquire()
|
||||||
try:
|
try:
|
||||||
self._policy._now = self._now = int(time.time())
|
self._policy._now = self._now = int(time.time())
|
||||||
|
|
||||||
for cookie in self.make_cookies(response, request):
|
for cookie in self.make_cookies(response, request):
|
||||||
if self._policy.set_ok(cookie, request):
|
if self._policy.set_ok(cookie, request):
|
||||||
_debug(" setting cookie: %s", cookie)
|
_debug(" setting cookie: %s", cookie)
|
||||||
self.set_cookie(cookie)
|
self.set_cookie(cookie)
|
||||||
finally:
|
finally:
|
||||||
self._cookies_lock.release()
|
self._cookies_lock.release()
|
||||||
|
|
||||||
def clear(self, domain=None, path=None, name=None):
|
def clear(self, domain=None, path=None, name=None):
|
||||||
"""Clear some cookies.
|
"""Clear some cookies.
|
||||||
|
@ -1677,11 +1677,11 @@ class CookieJar:
|
||||||
"""
|
"""
|
||||||
self._cookies_lock.acquire()
|
self._cookies_lock.acquire()
|
||||||
try:
|
try:
|
||||||
for cookie in self:
|
for cookie in self:
|
||||||
if cookie.discard:
|
if cookie.discard:
|
||||||
self.clear(cookie.domain, cookie.path, cookie.name)
|
self.clear(cookie.domain, cookie.path, cookie.name)
|
||||||
finally:
|
finally:
|
||||||
self._cookies_lock.release()
|
self._cookies_lock.release()
|
||||||
|
|
||||||
def clear_expired_cookies(self):
|
def clear_expired_cookies(self):
|
||||||
"""Discard all expired cookies.
|
"""Discard all expired cookies.
|
||||||
|
@ -1695,12 +1695,12 @@ class CookieJar:
|
||||||
"""
|
"""
|
||||||
self._cookies_lock.acquire()
|
self._cookies_lock.acquire()
|
||||||
try:
|
try:
|
||||||
now = time.time()
|
now = time.time()
|
||||||
for cookie in self:
|
for cookie in self:
|
||||||
if cookie.is_expired(now):
|
if cookie.is_expired(now):
|
||||||
self.clear(cookie.domain, cookie.path, cookie.name)
|
self.clear(cookie.domain, cookie.path, cookie.name)
|
||||||
finally:
|
finally:
|
||||||
self._cookies_lock.release()
|
self._cookies_lock.release()
|
||||||
|
|
||||||
def __iter__(self):
|
def __iter__(self):
|
||||||
return deepvalues(self._cookies)
|
return deepvalues(self._cookies)
|
||||||
|
@ -1774,16 +1774,16 @@ class FileCookieJar(CookieJar):
|
||||||
self._cookies_lock.acquire()
|
self._cookies_lock.acquire()
|
||||||
try:
|
try:
|
||||||
|
|
||||||
old_state = copy.deepcopy(self._cookies)
|
old_state = copy.deepcopy(self._cookies)
|
||||||
self._cookies = {}
|
self._cookies = {}
|
||||||
try:
|
try:
|
||||||
self.load(filename, ignore_discard, ignore_expires)
|
self.load(filename, ignore_discard, ignore_expires)
|
||||||
except (LoadError, IOError):
|
except (LoadError, IOError):
|
||||||
self._cookies = old_state
|
self._cookies = old_state
|
||||||
raise
|
raise
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
self._cookies_lock.release()
|
self._cookies_lock.release()
|
||||||
|
|
||||||
from _LWPCookieJar import LWPCookieJar, lwp_cookie_str
|
from _LWPCookieJar import LWPCookieJar, lwp_cookie_str
|
||||||
from _MozillaCookieJar import MozillaCookieJar
|
from _MozillaCookieJar import MozillaCookieJar
|
||||||
|
|
|
@ -246,5 +246,5 @@ def open(file, flag=None, mode=0666):
|
||||||
else:
|
else:
|
||||||
# Turn off any bits that are set in the umask
|
# Turn off any bits that are set in the umask
|
||||||
mode = mode & (~um)
|
mode = mode & (~um)
|
||||||
|
|
||||||
return _Database(file, mode)
|
return _Database(file, mode)
|
||||||
|
|
|
@ -332,7 +332,7 @@ class FTP:
|
||||||
# 1xx or error messages for LIST), so we just discard
|
# 1xx or error messages for LIST), so we just discard
|
||||||
# this response.
|
# this response.
|
||||||
if resp[0] == '2':
|
if resp[0] == '2':
|
||||||
resp = self.getresp()
|
resp = self.getresp()
|
||||||
if resp[0] != '1':
|
if resp[0] != '1':
|
||||||
raise error_reply, resp
|
raise error_reply, resp
|
||||||
else:
|
else:
|
||||||
|
@ -342,7 +342,7 @@ class FTP:
|
||||||
resp = self.sendcmd(cmd)
|
resp = self.sendcmd(cmd)
|
||||||
# See above.
|
# See above.
|
||||||
if resp[0] == '2':
|
if resp[0] == '2':
|
||||||
resp = self.getresp()
|
resp = self.getresp()
|
||||||
if resp[0] != '1':
|
if resp[0] != '1':
|
||||||
raise error_reply, resp
|
raise error_reply, resp
|
||||||
conn, sockaddr = sock.accept()
|
conn, sockaddr = sock.accept()
|
||||||
|
|
|
@ -899,7 +899,7 @@ class HTTPConnection:
|
||||||
except (AttributeError, OSError):
|
except (AttributeError, OSError):
|
||||||
# Don't send a length if this failed
|
# Don't send a length if this failed
|
||||||
if self.debuglevel > 0: print "Cannot stat!!"
|
if self.debuglevel > 0: print "Cannot stat!!"
|
||||||
|
|
||||||
if thelen is not None:
|
if thelen is not None:
|
||||||
self.putheader('Content-Length',thelen)
|
self.putheader('Content-Length',thelen)
|
||||||
for hdr, value in headers.iteritems():
|
for hdr, value in headers.iteritems():
|
||||||
|
|
|
@ -71,7 +71,7 @@ class CodeContext:
|
||||||
#
|
#
|
||||||
# To avoid possible errors, all references to the inner workings
|
# To avoid possible errors, all references to the inner workings
|
||||||
# of Tk are executed inside try/except blocks.
|
# of Tk are executed inside try/except blocks.
|
||||||
|
|
||||||
widgets_for_width_calc = self.editwin.text, self.editwin.text_frame
|
widgets_for_width_calc = self.editwin.text, self.editwin.text_frame
|
||||||
|
|
||||||
# calculate the required vertical padding
|
# calculate the required vertical padding
|
||||||
|
@ -113,7 +113,7 @@ class CodeContext:
|
||||||
# above it.
|
# above it.
|
||||||
self.label.pack(side="top", fill="x", expand=False,
|
self.label.pack(side="top", fill="x", expand=False,
|
||||||
before=self.editwin.text_frame)
|
before=self.editwin.text_frame)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
self.label.destroy()
|
self.label.destroy()
|
||||||
self.label = None
|
self.label = None
|
||||||
|
|
|
@ -50,9 +50,9 @@ class Dialog(Toplevel):
|
||||||
# If the master is not viewable, don't
|
# If the master is not viewable, don't
|
||||||
# make the child transient, or else it
|
# make the child transient, or else it
|
||||||
# would be opened withdrawn
|
# would be opened withdrawn
|
||||||
if parent.winfo_viewable():
|
if parent.winfo_viewable():
|
||||||
self.transient(parent)
|
self.transient(parent)
|
||||||
|
|
||||||
if title:
|
if title:
|
||||||
self.title(title)
|
self.title(title)
|
||||||
|
|
||||||
|
|
|
@ -572,7 +572,7 @@ class _singlefileMailbox(Mailbox):
|
||||||
# already have been generated (and presumably has been modified
|
# already have been generated (and presumably has been modified
|
||||||
# by adding or deleting an item).
|
# by adding or deleting an item).
|
||||||
assert self._toc is not None
|
assert self._toc is not None
|
||||||
|
|
||||||
# Check length of self._file; if it's changed, some other process
|
# Check length of self._file; if it's changed, some other process
|
||||||
# has modified the mailbox since we scanned it.
|
# has modified the mailbox since we scanned it.
|
||||||
self._file.seek(0, 2)
|
self._file.seek(0, 2)
|
||||||
|
@ -581,7 +581,7 @@ class _singlefileMailbox(Mailbox):
|
||||||
raise ExternalClashError('Size of mailbox file changed '
|
raise ExternalClashError('Size of mailbox file changed '
|
||||||
'(expected %i, found %i)' %
|
'(expected %i, found %i)' %
|
||||||
(self._file_length, cur_len))
|
(self._file_length, cur_len))
|
||||||
|
|
||||||
new_file = _create_temporary(self._path)
|
new_file = _create_temporary(self._path)
|
||||||
try:
|
try:
|
||||||
new_toc = {}
|
new_toc = {}
|
||||||
|
@ -1222,7 +1222,7 @@ class Babyl(_singlefileMailbox):
|
||||||
self._next_key = len(self._toc)
|
self._next_key = len(self._toc)
|
||||||
self._file.seek(0, 2)
|
self._file.seek(0, 2)
|
||||||
self._file_length = self._file.tell()
|
self._file_length = self._file.tell()
|
||||||
|
|
||||||
def _pre_mailbox_hook(self, f):
|
def _pre_mailbox_hook(self, f):
|
||||||
"""Called before writing the mailbox to file f."""
|
"""Called before writing the mailbox to file f."""
|
||||||
f.write('BABYL OPTIONS:%sVersion: 5%sLabels:%s%s\037' %
|
f.write('BABYL OPTIONS:%sVersion: 5%sLabels:%s%s\037' %
|
||||||
|
|
|
@ -236,7 +236,7 @@ _release_version = re.compile(r'([^0-9]+)'
|
||||||
'[^(]*(?:\((.+)\))?')
|
'[^(]*(?:\((.+)\))?')
|
||||||
|
|
||||||
# See also http://www.novell.com/coolsolutions/feature/11251.html
|
# See also http://www.novell.com/coolsolutions/feature/11251.html
|
||||||
# and http://linuxmafia.com/faq/Admin/release-files.html
|
# and http://linuxmafia.com/faq/Admin/release-files.html
|
||||||
# and http://data.linux-ntfs.org/rpm/whichrpm
|
# and http://data.linux-ntfs.org/rpm/whichrpm
|
||||||
# and http://www.die.net/doc/linux/man/man1/lsb_release.1.html
|
# and http://www.die.net/doc/linux/man/man1/lsb_release.1.html
|
||||||
|
|
||||||
|
@ -245,7 +245,7 @@ _supported_dists = ('SuSE', 'debian', 'fedora', 'redhat', 'centos',
|
||||||
'gentoo', 'UnitedLinux')
|
'gentoo', 'UnitedLinux')
|
||||||
|
|
||||||
def _parse_release_file(firstline):
|
def _parse_release_file(firstline):
|
||||||
|
|
||||||
# Parse the first line
|
# Parse the first line
|
||||||
m = _lsb_release_version.match(firstline)
|
m = _lsb_release_version.match(firstline)
|
||||||
if m is not None:
|
if m is not None:
|
||||||
|
@ -268,7 +268,7 @@ def _parse_release_file(firstline):
|
||||||
return '', version, id
|
return '', version, id
|
||||||
|
|
||||||
def _test_parse_release_file():
|
def _test_parse_release_file():
|
||||||
|
|
||||||
for input, output in (
|
for input, output in (
|
||||||
# Examples of release file contents:
|
# Examples of release file contents:
|
||||||
('SuSE Linux 9.3 (x86-64)', ('SuSE Linux ', '9.3', 'x86-64'))
|
('SuSE Linux 9.3 (x86-64)', ('SuSE Linux ', '9.3', 'x86-64'))
|
||||||
|
@ -324,7 +324,7 @@ def linux_distribution(distname='', version='', id='',
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
return _dist_try_harder(distname,version,id)
|
return _dist_try_harder(distname,version,id)
|
||||||
|
|
||||||
# Read the first line
|
# Read the first line
|
||||||
f = open('/etc/'+file, 'r')
|
f = open('/etc/'+file, 'r')
|
||||||
firstline = f.readline()
|
firstline = f.readline()
|
||||||
|
@ -340,7 +340,7 @@ def linux_distribution(distname='', version='', id='',
|
||||||
return distname, version, id
|
return distname, version, id
|
||||||
|
|
||||||
# To maintain backwards compatibility:
|
# To maintain backwards compatibility:
|
||||||
|
|
||||||
def dist(distname='',version='',id='',
|
def dist(distname='',version='',id='',
|
||||||
|
|
||||||
supported_dists=_supported_dists):
|
supported_dists=_supported_dists):
|
||||||
|
@ -1358,7 +1358,7 @@ def python_branch():
|
||||||
If not available, an empty string is returned.
|
If not available, an empty string is returned.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
return _sys_version()[2]
|
return _sys_version()[2]
|
||||||
|
|
||||||
def python_revision():
|
def python_revision():
|
||||||
|
|
|
@ -123,7 +123,7 @@ def fork():
|
||||||
os.close(tmp_fd)
|
os.close(tmp_fd)
|
||||||
else:
|
else:
|
||||||
os.close(slave_fd)
|
os.close(slave_fd)
|
||||||
|
|
||||||
# Parent and child process.
|
# Parent and child process.
|
||||||
return pid, master_fd
|
return pid, master_fd
|
||||||
|
|
||||||
|
|
|
@ -1120,7 +1120,7 @@ class Popen(object):
|
||||||
# we can write up to PIPE_BUF bytes without risk
|
# we can write up to PIPE_BUF bytes without risk
|
||||||
# blocking. POSIX defines PIPE_BUF >= 512
|
# blocking. POSIX defines PIPE_BUF >= 512
|
||||||
bytes_written = os.write(self.stdin.fileno(), buffer(input, input_offset, 512))
|
bytes_written = os.write(self.stdin.fileno(), buffer(input, input_offset, 512))
|
||||||
input_offset += bytes_written
|
input_offset += bytes_written
|
||||||
if input_offset >= len(input):
|
if input_offset >= len(input):
|
||||||
self.stdin.close()
|
self.stdin.close()
|
||||||
write_set.remove(self.stdin)
|
write_set.remove(self.stdin)
|
||||||
|
|
|
@ -15,7 +15,7 @@ class SortedDict(UserDict.UserDict):
|
||||||
result = self.data.keys()
|
result = self.data.keys()
|
||||||
result.sort()
|
result.sort()
|
||||||
return result
|
return result
|
||||||
|
|
||||||
def values(self):
|
def values(self):
|
||||||
result = self.items()
|
result = self.items()
|
||||||
return [i[1] for i in values]
|
return [i[1] for i in values]
|
||||||
|
@ -446,12 +446,12 @@ class SortedTestCase(RawConfigParserTestCase):
|
||||||
"o2=3\n"
|
"o2=3\n"
|
||||||
"o1=4\n"
|
"o1=4\n"
|
||||||
"[a]\n"
|
"[a]\n"
|
||||||
"k=v\n")
|
"k=v\n")
|
||||||
output = StringIO.StringIO()
|
output = StringIO.StringIO()
|
||||||
self.cf.write(output)
|
self.cf.write(output)
|
||||||
self.assertEquals(output.getvalue(),
|
self.assertEquals(output.getvalue(),
|
||||||
"[a]\n"
|
"[a]\n"
|
||||||
"k = v\n\n"
|
"k = v\n\n"
|
||||||
"[b]\n"
|
"[b]\n"
|
||||||
"o1 = 4\n"
|
"o1 = 4\n"
|
||||||
"o2 = 3\n"
|
"o2 = 3\n"
|
||||||
|
|
|
@ -49,7 +49,7 @@ class DumbDBMTestCase(unittest.TestCase):
|
||||||
f.close()
|
f.close()
|
||||||
finally:
|
finally:
|
||||||
os.umask(old_umask)
|
os.umask(old_umask)
|
||||||
|
|
||||||
expected_mode = 0635
|
expected_mode = 0635
|
||||||
if os.name != 'posix':
|
if os.name != 'posix':
|
||||||
# Windows only supports setting the read-only attribute.
|
# Windows only supports setting the read-only attribute.
|
||||||
|
@ -61,7 +61,7 @@ class DumbDBMTestCase(unittest.TestCase):
|
||||||
self.assertEqual(stat.S_IMODE(st.st_mode), expected_mode)
|
self.assertEqual(stat.S_IMODE(st.st_mode), expected_mode)
|
||||||
st = os.stat(_fname + '.dir')
|
st = os.stat(_fname + '.dir')
|
||||||
self.assertEqual(stat.S_IMODE(st.st_mode), expected_mode)
|
self.assertEqual(stat.S_IMODE(st.st_mode), expected_mode)
|
||||||
|
|
||||||
def test_close_twice(self):
|
def test_close_twice(self):
|
||||||
f = dumbdbm.open(_fname)
|
f = dumbdbm.open(_fname)
|
||||||
f['a'] = 'b'
|
f['a'] = 'b'
|
||||||
|
|
|
@ -138,7 +138,7 @@ class TestGzip(unittest.TestCase):
|
||||||
y = f.read(10)
|
y = f.read(10)
|
||||||
f.close()
|
f.close()
|
||||||
self.assertEquals(y, data1[20:30])
|
self.assertEquals(y, data1[20:30])
|
||||||
|
|
||||||
def test_seek_write(self):
|
def test_seek_write(self):
|
||||||
# Try seek, write test
|
# Try seek, write test
|
||||||
f = gzip.GzipFile(self.filename, 'w')
|
f = gzip.GzipFile(self.filename, 'w')
|
||||||
|
|
|
@ -681,11 +681,11 @@ class TestMaildir(TestMailbox):
|
||||||
box = self._factory(self._path, factory=dummy_factory)
|
box = self._factory(self._path, factory=dummy_factory)
|
||||||
folder = box.add_folder('folder1')
|
folder = box.add_folder('folder1')
|
||||||
self.assert_(folder._factory is dummy_factory)
|
self.assert_(folder._factory is dummy_factory)
|
||||||
|
|
||||||
folder1_alias = box.get_folder('folder1')
|
folder1_alias = box.get_folder('folder1')
|
||||||
self.assert_(folder1_alias._factory is dummy_factory)
|
self.assert_(folder1_alias._factory is dummy_factory)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class _TestMboxMMDF(TestMailbox):
|
class _TestMboxMMDF(TestMailbox):
|
||||||
|
|
||||||
|
@ -805,7 +805,7 @@ class TestMH(TestMailbox):
|
||||||
def dummy_factory (s):
|
def dummy_factory (s):
|
||||||
return None
|
return None
|
||||||
self._box = self._factory(self._path, dummy_factory)
|
self._box = self._factory(self._path, dummy_factory)
|
||||||
|
|
||||||
new_folder = self._box.add_folder('foo.bar')
|
new_folder = self._box.add_folder('foo.bar')
|
||||||
folder0 = self._box.get_folder('foo.bar')
|
folder0 = self._box.get_folder('foo.bar')
|
||||||
folder0.add(self._template % 'bar')
|
folder0.add(self._template % 'bar')
|
||||||
|
@ -901,7 +901,7 @@ class TestMH(TestMailbox):
|
||||||
self.assert_(self._box.get_sequences() ==
|
self.assert_(self._box.get_sequences() ==
|
||||||
{'foo':[1, 2, 3, 4, 5],
|
{'foo':[1, 2, 3, 4, 5],
|
||||||
'unseen':[1], 'bar':[3], 'replied':[3]})
|
'unseen':[1], 'bar':[3], 'replied':[3]})
|
||||||
|
|
||||||
def _get_lock_path(self):
|
def _get_lock_path(self):
|
||||||
return os.path.join(self._path, '.mh_sequences.lock')
|
return os.path.join(self._path, '.mh_sequences.lock')
|
||||||
|
|
||||||
|
|
|
@ -116,7 +116,7 @@ class MboxTestCase(unittest.TestCase):
|
||||||
|
|
||||||
def tearDown(self):
|
def tearDown(self):
|
||||||
os.unlink(self._path)
|
os.unlink(self._path)
|
||||||
|
|
||||||
def test_from_regex (self):
|
def test_from_regex (self):
|
||||||
# Testing new regex from bug #1633678
|
# Testing new regex from bug #1633678
|
||||||
f = open(self._path, 'w')
|
f = open(self._path, 'w')
|
||||||
|
|
|
@ -120,7 +120,7 @@ else:
|
||||||
##if False and lines != ['In child, calling os.setsid()',
|
##if False and lines != ['In child, calling os.setsid()',
|
||||||
## 'Good: OSError was raised.', '']:
|
## 'Good: OSError was raised.', '']:
|
||||||
## raise TestFailed("Unexpected output from child: %r" % line)
|
## raise TestFailed("Unexpected output from child: %r" % line)
|
||||||
|
|
||||||
(pid, status) = os.waitpid(pid, 0)
|
(pid, status) = os.waitpid(pid, 0)
|
||||||
res = status >> 8
|
res = status >> 8
|
||||||
debug("Child (%d) exited with status %d (%d)."%(pid, res, status))
|
debug("Child (%d) exited with status %d (%d)."%(pid, res, status))
|
||||||
|
@ -140,8 +140,8 @@ else:
|
||||||
## pass
|
## pass
|
||||||
##else:
|
##else:
|
||||||
## raise TestFailed("Read from master_fd did not raise exception")
|
## raise TestFailed("Read from master_fd did not raise exception")
|
||||||
|
|
||||||
|
|
||||||
os.close(master_fd)
|
os.close(master_fd)
|
||||||
|
|
||||||
# pty.fork() passed.
|
# pty.fork() passed.
|
||||||
|
|
|
@ -15,7 +15,7 @@ class ResourceTest(unittest.TestCase):
|
||||||
self.assertRaises(TypeError, resource.setrlimit, 42, 42, 42)
|
self.assertRaises(TypeError, resource.setrlimit, 42, 42, 42)
|
||||||
|
|
||||||
def test_fsize_ismax(self):
|
def test_fsize_ismax(self):
|
||||||
|
|
||||||
try:
|
try:
|
||||||
(cur, max) = resource.getrlimit(resource.RLIMIT_FSIZE)
|
(cur, max) = resource.getrlimit(resource.RLIMIT_FSIZE)
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
|
@ -39,7 +39,7 @@ class ResourceTest(unittest.TestCase):
|
||||||
# versions of Python were terminated by an uncaught SIGXFSZ, but
|
# versions of Python were terminated by an uncaught SIGXFSZ, but
|
||||||
# pythonrun.c has been fixed to ignore that exception. If so, the
|
# pythonrun.c has been fixed to ignore that exception. If so, the
|
||||||
# write() should return EFBIG when the limit is exceeded.
|
# write() should return EFBIG when the limit is exceeded.
|
||||||
|
|
||||||
# At least one platform has an unlimited RLIMIT_FSIZE and attempts
|
# At least one platform has an unlimited RLIMIT_FSIZE and attempts
|
||||||
# to change it raise ValueError instead.
|
# to change it raise ValueError instead.
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -473,7 +473,7 @@ class SetSubclassWithKeywordArgs(set):
|
||||||
set.__init__(self, iterable)
|
set.__init__(self, iterable)
|
||||||
|
|
||||||
class TestSetSubclassWithKeywordArgs(TestSet):
|
class TestSetSubclassWithKeywordArgs(TestSet):
|
||||||
|
|
||||||
def test_keywords_in_subclass(self):
|
def test_keywords_in_subclass(self):
|
||||||
'SF bug #1486663 -- this used to erroneously raise a TypeError'
|
'SF bug #1486663 -- this used to erroneously raise a TypeError'
|
||||||
SetSubclassWithKeywordArgs(newarg=1)
|
SetSubclassWithKeywordArgs(newarg=1)
|
||||||
|
@ -1460,7 +1460,7 @@ def test_main(verbose=None):
|
||||||
test_classes = (
|
test_classes = (
|
||||||
TestSet,
|
TestSet,
|
||||||
TestSetSubclass,
|
TestSetSubclass,
|
||||||
TestSetSubclassWithKeywordArgs,
|
TestSetSubclassWithKeywordArgs,
|
||||||
TestFrozenSet,
|
TestFrozenSet,
|
||||||
TestFrozenSetSubclass,
|
TestFrozenSetSubclass,
|
||||||
TestSetOfSets,
|
TestSetOfSets,
|
||||||
|
|
|
@ -119,7 +119,7 @@ for prefix in ('', '@', '<', '>', '=', '!'):
|
||||||
cp, bp, hp, ip, lp, fp, dp, tp = struct.unpack(format, s)
|
cp, bp, hp, ip, lp, fp, dp, tp = struct.unpack(format, s)
|
||||||
if (cp != c or bp != b or hp != h or ip != i or lp != l or
|
if (cp != c or bp != b or hp != h or ip != i or lp != l or
|
||||||
int(100 * fp) != int(100 * f) or int(100 * dp) != int(100 * d) or
|
int(100 * fp) != int(100 * f) or int(100 * dp) != int(100 * d) or
|
||||||
tp != t):
|
tp != t):
|
||||||
# ^^^ calculate only to two decimal places
|
# ^^^ calculate only to two decimal places
|
||||||
raise TestFailed, "unpack/pack not transitive (%s, %s)" % (
|
raise TestFailed, "unpack/pack not transitive (%s, %s)" % (
|
||||||
str(format), str((cp, bp, hp, ip, lp, fp, dp, tp)))
|
str(format), str((cp, bp, hp, ip, lp, fp, dp, tp)))
|
||||||
|
@ -160,11 +160,11 @@ tests = [
|
||||||
('f', -2.0, '\300\000\000\000', '\000\000\000\300', 0),
|
('f', -2.0, '\300\000\000\000', '\000\000\000\300', 0),
|
||||||
('d', -2.0, '\300\000\000\000\000\000\000\000',
|
('d', -2.0, '\300\000\000\000\000\000\000\000',
|
||||||
'\000\000\000\000\000\000\000\300', 0),
|
'\000\000\000\000\000\000\000\300', 0),
|
||||||
('t', 0, '\0', '\0', 0),
|
('t', 0, '\0', '\0', 0),
|
||||||
('t', 3, '\1', '\1', 1),
|
('t', 3, '\1', '\1', 1),
|
||||||
('t', True, '\1', '\1', 0),
|
('t', True, '\1', '\1', 0),
|
||||||
('t', [], '\0', '\0', 1),
|
('t', [], '\0', '\0', 1),
|
||||||
('t', (1,), '\1', '\1', 1),
|
('t', (1,), '\1', '\1', 1),
|
||||||
]
|
]
|
||||||
|
|
||||||
for fmt, arg, big, lil, asy in tests:
|
for fmt, arg, big, lil, asy in tests:
|
||||||
|
@ -621,48 +621,48 @@ test_pack_into()
|
||||||
test_pack_into_fn()
|
test_pack_into_fn()
|
||||||
|
|
||||||
def test_bool():
|
def test_bool():
|
||||||
for prefix in tuple("<>!=")+('',):
|
for prefix in tuple("<>!=")+('',):
|
||||||
false = (), [], [], '', 0
|
false = (), [], [], '', 0
|
||||||
true = [1], 'test', 5, -1, 0xffffffffL+1, 0xffffffff/2
|
true = [1], 'test', 5, -1, 0xffffffffL+1, 0xffffffff/2
|
||||||
|
|
||||||
falseFormat = prefix + 't' * len(false)
|
falseFormat = prefix + 't' * len(false)
|
||||||
if verbose:
|
if verbose:
|
||||||
print 'trying bool pack/unpack on', false, 'using format', falseFormat
|
print 'trying bool pack/unpack on', false, 'using format', falseFormat
|
||||||
packedFalse = struct.pack(falseFormat, *false)
|
packedFalse = struct.pack(falseFormat, *false)
|
||||||
unpackedFalse = struct.unpack(falseFormat, packedFalse)
|
unpackedFalse = struct.unpack(falseFormat, packedFalse)
|
||||||
|
|
||||||
trueFormat = prefix + 't' * len(true)
|
trueFormat = prefix + 't' * len(true)
|
||||||
if verbose:
|
if verbose:
|
||||||
print 'trying bool pack/unpack on', true, 'using format', trueFormat
|
print 'trying bool pack/unpack on', true, 'using format', trueFormat
|
||||||
packedTrue = struct.pack(trueFormat, *true)
|
packedTrue = struct.pack(trueFormat, *true)
|
||||||
unpackedTrue = struct.unpack(trueFormat, packedTrue)
|
unpackedTrue = struct.unpack(trueFormat, packedTrue)
|
||||||
|
|
||||||
if len(true) != len(unpackedTrue):
|
if len(true) != len(unpackedTrue):
|
||||||
raise TestFailed('unpacked true array is not of same size as input')
|
raise TestFailed('unpacked true array is not of same size as input')
|
||||||
if len(false) != len(unpackedFalse):
|
if len(false) != len(unpackedFalse):
|
||||||
raise TestFailed('unpacked false array is not of same size as input')
|
raise TestFailed('unpacked false array is not of same size as input')
|
||||||
|
|
||||||
for t in unpackedFalse:
|
for t in unpackedFalse:
|
||||||
if t is not False:
|
if t is not False:
|
||||||
raise TestFailed('%r did not unpack as False' % t)
|
raise TestFailed('%r did not unpack as False' % t)
|
||||||
for t in unpackedTrue:
|
for t in unpackedTrue:
|
||||||
if t is not True:
|
if t is not True:
|
||||||
raise TestFailed('%r did not unpack as false' % t)
|
raise TestFailed('%r did not unpack as false' % t)
|
||||||
|
|
||||||
if prefix and verbose:
|
if prefix and verbose:
|
||||||
print 'trying size of bool with format %r' % (prefix+'t')
|
print 'trying size of bool with format %r' % (prefix+'t')
|
||||||
packed = struct.pack(prefix+'t', 1)
|
packed = struct.pack(prefix+'t', 1)
|
||||||
|
|
||||||
if len(packed) != struct.calcsize(prefix+'t'):
|
if len(packed) != struct.calcsize(prefix+'t'):
|
||||||
raise TestFailed('packed length is not equal to calculated size')
|
raise TestFailed('packed length is not equal to calculated size')
|
||||||
|
|
||||||
if len(packed) != 1 and prefix:
|
if len(packed) != 1 and prefix:
|
||||||
raise TestFailed('encoded bool is not one byte: %r' % packed)
|
raise TestFailed('encoded bool is not one byte: %r' % packed)
|
||||||
elif not prefix and verbose:
|
elif not prefix and verbose:
|
||||||
print 'size of bool in native format is %i' % (len(packed))
|
print 'size of bool in native format is %i' % (len(packed))
|
||||||
|
|
||||||
for c in '\x01\x7f\xff\x0f\xf0':
|
for c in '\x01\x7f\xff\x0f\xf0':
|
||||||
if struct.unpack('>t', c)[0] is not True:
|
if struct.unpack('>t', c)[0] is not True:
|
||||||
raise TestFailed('%c did not unpack as True' % c)
|
raise TestFailed('%c did not unpack as True' % c)
|
||||||
|
|
||||||
test_bool()
|
test_bool()
|
||||||
|
|
|
@ -270,7 +270,7 @@ def open_urlresource(url):
|
||||||
print >> get_original_stdout(), '\tfetching %s ...' % url
|
print >> get_original_stdout(), '\tfetching %s ...' % url
|
||||||
fn, _ = urllib.urlretrieve(url, filename)
|
fn, _ = urllib.urlretrieve(url, filename)
|
||||||
return open(fn)
|
return open(fn)
|
||||||
|
|
||||||
@contextmanager
|
@contextmanager
|
||||||
def guard_warnings_filter():
|
def guard_warnings_filter():
|
||||||
"""Guard the warnings filter from being permanently changed."""
|
"""Guard the warnings filter from being permanently changed."""
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue