mirror of
https://github.com/python/cpython.git
synced 2025-11-02 11:08:57 +00:00
Merged revisions 53538-53622 via svnmerge from
svn+ssh://pythondev@svn.python.org/python/trunk ........ r53545 | andrew.kuchling | 2007-01-24 21:06:41 +0100 (Wed, 24 Jan 2007) | 1 line Strengthen warning about using lock() ........ r53556 | thomas.heller | 2007-01-25 19:34:14 +0100 (Thu, 25 Jan 2007) | 3 lines Fix for #1643874: When calling SysAllocString, create a PyCObject which will eventually call SysFreeString to free the BSTR resource. ........ r53563 | andrew.kuchling | 2007-01-25 21:02:13 +0100 (Thu, 25 Jan 2007) | 1 line Add item ........ r53564 | brett.cannon | 2007-01-25 21:22:02 +0100 (Thu, 25 Jan 2007) | 8 lines Fix time.strptime's %U support. Basically rewrote the algorithm to be more generic so that one only has to shift certain values based on whether the week was specified to start on Monday or Sunday. Cut out a lot of edge case code compared to the previous version. Also broke algorithm out into its own function (that is private to the module). Fixes bug #1643943 (thanks Biran Nahas for the report). ........ r53570 | brett.cannon | 2007-01-26 00:30:39 +0100 (Fri, 26 Jan 2007) | 4 lines Remove specific mention of my name and email address from modules. Not really needed and all bug reports should go to the bug tracker, not directly to me. Plus I am not the only person to have edited these files at this point. ........ r53573 | fred.drake | 2007-01-26 17:28:44 +0100 (Fri, 26 Jan 2007) | 1 line fix typo (extraneous ")") ........ r53575 | georg.brandl | 2007-01-27 18:43:02 +0100 (Sat, 27 Jan 2007) | 4 lines Patch #1638243: the compiler package is now able to correctly compile a with statement; previously, executing code containing a with statement compiled by the compiler package crashed the interpreter. ........ r53578 | georg.brandl | 2007-01-27 18:59:42 +0100 (Sat, 27 Jan 2007) | 3 lines Patch #1634778: add missing encoding aliases for iso8859_15 and iso8859_16. ........ r53579 | georg.brandl | 2007-01-27 20:38:50 +0100 (Sat, 27 Jan 2007) | 2 lines Bug #1645944: os.access now returns bool but docstring is not updated ........ r53590 | brett.cannon | 2007-01-28 21:58:00 +0100 (Sun, 28 Jan 2007) | 2 lines Use the thread lock's context manager instead of a try/finally statement. ........ r53591 | brett.cannon | 2007-01-29 05:41:44 +0100 (Mon, 29 Jan 2007) | 2 lines Add a test for slicing an exception. ........ r53594 | andrew.kuchling | 2007-01-29 21:21:43 +0100 (Mon, 29 Jan 2007) | 1 line Minor edits to the curses HOWTO ........ r53596 | andrew.kuchling | 2007-01-29 21:55:40 +0100 (Mon, 29 Jan 2007) | 1 line Various minor edits ........ r53597 | andrew.kuchling | 2007-01-29 22:28:48 +0100 (Mon, 29 Jan 2007) | 1 line More edits ........ r53601 | tim.peters | 2007-01-30 04:03:46 +0100 (Tue, 30 Jan 2007) | 2 lines Whitespace normalization. ........ r53603 | georg.brandl | 2007-01-30 21:21:30 +0100 (Tue, 30 Jan 2007) | 2 lines Bug #1648191: typo in docs. ........ r53605 | brett.cannon | 2007-01-30 22:34:36 +0100 (Tue, 30 Jan 2007) | 8 lines No more raising of string exceptions! The next step of PEP 352 (for 2.6) causes raising a string exception to trigger a TypeError. Trying to catch a string exception raises a DeprecationWarning. References to string exceptions has been removed from the docs since they are now just an error. ........ r53618 | raymond.hettinger | 2007-02-01 22:02:59 +0100 (Thu, 01 Feb 2007) | 1 line Bug #1648179: set.update() not recognizing __iter__ overrides in dict subclasses. ........
This commit is contained in:
parent
08f00467b9
commit
9fe394c1be
46 changed files with 464 additions and 344 deletions
|
|
@ -107,7 +107,7 @@ class CGIHTTPRequestHandler(SimpleHTTPServer.SimpleHTTPRequestHandler):
|
|||
"""Execute a CGI script."""
|
||||
path = self.path
|
||||
dir, rest = self.cgi_info
|
||||
|
||||
|
||||
i = path.find('/', len(dir) + 1)
|
||||
while i >= 0:
|
||||
nextdir = path[:i]
|
||||
|
|
|
|||
|
|
@ -22,9 +22,6 @@ try:
|
|||
except:
|
||||
from dummy_thread import allocate_lock as _thread_allocate_lock
|
||||
|
||||
__author__ = "Brett Cannon"
|
||||
__email__ = "brett@python.org"
|
||||
|
||||
__all__ = ['strptime']
|
||||
|
||||
def _getlang():
|
||||
|
|
@ -273,11 +270,31 @@ _TimeRE_cache = TimeRE()
|
|||
_CACHE_MAX_SIZE = 5 # Max number of regexes stored in _regex_cache
|
||||
_regex_cache = {}
|
||||
|
||||
def _calc_julian_from_U_or_W(year, week_of_year, day_of_week, week_starts_Mon):
|
||||
"""Calculate the Julian day based on the year, week of the year, and day of
|
||||
the week, with week_start_day representing whether the week of the year
|
||||
assumes the week starts on Sunday or Monday (6 or 0)."""
|
||||
first_weekday = datetime_date(year, 1, 1).weekday()
|
||||
# If we are dealing with the %U directive (week starts on Sunday), it's
|
||||
# easier to just shift the view to Sunday being the first day of the
|
||||
# week.
|
||||
if not week_starts_Mon:
|
||||
first_weekday = (first_weekday + 1) % 7
|
||||
day_of_week = (day_of_week + 1) % 7
|
||||
# Need to watch out for a week 0 (when the first day of the year is not
|
||||
# the same as that specified by %U or %W).
|
||||
week_0_length = (7 - first_weekday) % 7
|
||||
if week_of_year == 0:
|
||||
return 1 + day_of_week - first_weekday
|
||||
else:
|
||||
days_to_week = week_0_length + (7 * (week_of_year - 1))
|
||||
return 1 + days_to_week + day_of_week
|
||||
|
||||
|
||||
def strptime(data_string, format="%a %b %d %H:%M:%S %Y"):
|
||||
"""Return a time struct based on the input string and the format string."""
|
||||
global _TimeRE_cache, _regex_cache
|
||||
_cache_lock.acquire()
|
||||
try:
|
||||
with _cache_lock:
|
||||
time_re = _TimeRE_cache
|
||||
locale_time = time_re.locale_time
|
||||
if _getlang() != locale_time.lang:
|
||||
|
|
@ -302,8 +319,6 @@ def strptime(data_string, format="%a %b %d %H:%M:%S %Y"):
|
|||
except IndexError:
|
||||
raise ValueError("stray %% in format '%s'" % format)
|
||||
_regex_cache[format] = format_regex
|
||||
finally:
|
||||
_cache_lock.release()
|
||||
found = format_regex.match(data_string)
|
||||
if not found:
|
||||
raise ValueError("time data %r does not match format %r" %
|
||||
|
|
@ -385,10 +400,10 @@ def strptime(data_string, format="%a %b %d %H:%M:%S %Y"):
|
|||
elif group_key in ('U', 'W'):
|
||||
week_of_year = int(found_dict[group_key])
|
||||
if group_key == 'U':
|
||||
# U starts week on Sunday
|
||||
# U starts week on Sunday.
|
||||
week_of_year_start = 6
|
||||
else:
|
||||
# W starts week on Monday
|
||||
# W starts week on Monday.
|
||||
week_of_year_start = 0
|
||||
elif group_key == 'Z':
|
||||
# Since -1 is default value only need to worry about setting tz if
|
||||
|
|
@ -406,42 +421,20 @@ def strptime(data_string, format="%a %b %d %H:%M:%S %Y"):
|
|||
tz = value
|
||||
break
|
||||
# If we know the week of the year and what day of that week, we can figure
|
||||
# out the Julian day of the year
|
||||
# Calculations below assume 0 is a Monday
|
||||
# out the Julian day of the year.
|
||||
if julian == -1 and week_of_year != -1 and weekday != -1:
|
||||
# Calculate how many days in week 0
|
||||
first_weekday = datetime_date(year, 1, 1).weekday()
|
||||
preceeding_days = 7 - first_weekday
|
||||
if preceeding_days == 7:
|
||||
preceeding_days = 0
|
||||
# Adjust for U directive so that calculations are not dependent on
|
||||
# directive used to figure out week of year
|
||||
if weekday == 6 and week_of_year_start == 6:
|
||||
week_of_year -= 1
|
||||
# If a year starts and ends on a Monday but a week is specified to
|
||||
# start on a Sunday we need to up the week to counter-balance the fact
|
||||
# that with %W that first Monday starts week 1 while with %U that is
|
||||
# week 0 and thus shifts everything by a week
|
||||
if weekday == 0 and first_weekday == 0 and week_of_year_start == 6:
|
||||
week_of_year += 1
|
||||
# If in week 0, then just figure out how many days from Jan 1 to day of
|
||||
# week specified, else calculate by multiplying week of year by 7,
|
||||
# adding in days in week 0, and the number of days from Monday to the
|
||||
# day of the week
|
||||
if week_of_year == 0:
|
||||
julian = 1 + weekday - first_weekday
|
||||
else:
|
||||
days_to_week = preceeding_days + (7 * (week_of_year - 1))
|
||||
julian = 1 + days_to_week + weekday
|
||||
week_starts_Mon = True if week_of_year_start == 0 else False
|
||||
julian = _calc_julian_from_U_or_W(year, week_of_year, weekday,
|
||||
week_starts_Mon)
|
||||
# Cannot pre-calculate datetime_date() since can change in Julian
|
||||
#calculation and thus could have different value for the day of the week
|
||||
#calculation
|
||||
# calculation and thus could have different value for the day of the week
|
||||
# calculation.
|
||||
if julian == -1:
|
||||
# Need to add 1 to result since first day of the year is 1, not 0.
|
||||
julian = datetime_date(year, month, day).toordinal() - \
|
||||
datetime_date(year, 1, 1).toordinal() + 1
|
||||
else: # Assume that if they bothered to include Julian day it will
|
||||
#be accurate
|
||||
# be accurate.
|
||||
datetime_result = datetime_date.fromordinal((julian - 1) + datetime_date(year, 1, 1).toordinal())
|
||||
year = datetime_result.year
|
||||
month = datetime_result.month
|
||||
|
|
|
|||
|
|
@ -914,6 +914,8 @@ class CodeGenerator:
|
|||
self.emit('LOAD_CONST', None)
|
||||
self.nextBlock(final)
|
||||
self.setups.push((END_FINALLY, final))
|
||||
self._implicitNameOp('LOAD', exitvar)
|
||||
self._implicitNameOp('DELETE', exitvar)
|
||||
self.emit('WITH_CLEANUP')
|
||||
self.emit('END_FINALLY')
|
||||
self.setups.pop()
|
||||
|
|
|
|||
|
|
@ -1018,7 +1018,7 @@ class Transformer:
|
|||
if nodelist[2][0] == token.COLON:
|
||||
var = None
|
||||
else:
|
||||
var = self.com_node(nodelist[2])
|
||||
var = self.com_assign(nodelist[2][2], OP_ASSIGN)
|
||||
return With(expr, var, body, lineno=nodelist[0][2])
|
||||
|
||||
def com_with_var(self, nodelist):
|
||||
|
|
|
|||
|
|
@ -1318,26 +1318,26 @@ class CookieJar:
|
|||
self._cookies_lock.acquire()
|
||||
try:
|
||||
|
||||
self._policy._now = self._now = int(time.time())
|
||||
self._policy._now = self._now = int(time.time())
|
||||
|
||||
cookies = self._cookies_for_request(request)
|
||||
cookies = self._cookies_for_request(request)
|
||||
|
||||
attrs = self._cookie_attrs(cookies)
|
||||
if attrs:
|
||||
if not request.has_header("Cookie"):
|
||||
request.add_unredirected_header(
|
||||
"Cookie", "; ".join(attrs))
|
||||
attrs = self._cookie_attrs(cookies)
|
||||
if attrs:
|
||||
if not request.has_header("Cookie"):
|
||||
request.add_unredirected_header(
|
||||
"Cookie", "; ".join(attrs))
|
||||
|
||||
# if necessary, advertise that we know RFC 2965
|
||||
if (self._policy.rfc2965 and not self._policy.hide_cookie2 and
|
||||
not request.has_header("Cookie2")):
|
||||
for cookie in cookies:
|
||||
if cookie.version != 1:
|
||||
request.add_unredirected_header("Cookie2", '$Version="1"')
|
||||
break
|
||||
|
||||
# if necessary, advertise that we know RFC 2965
|
||||
if (self._policy.rfc2965 and not self._policy.hide_cookie2 and
|
||||
not request.has_header("Cookie2")):
|
||||
for cookie in cookies:
|
||||
if cookie.version != 1:
|
||||
request.add_unredirected_header("Cookie2", '$Version="1"')
|
||||
break
|
||||
|
||||
finally:
|
||||
self._cookies_lock.release()
|
||||
self._cookies_lock.release()
|
||||
|
||||
self.clear_expired_cookies()
|
||||
|
||||
|
|
@ -1609,7 +1609,7 @@ class CookieJar:
|
|||
|
||||
if self._policy.set_ok(cookie, request):
|
||||
self.set_cookie(cookie)
|
||||
|
||||
|
||||
|
||||
finally:
|
||||
self._cookies_lock.release()
|
||||
|
|
@ -1632,14 +1632,14 @@ class CookieJar:
|
|||
_debug("extract_cookies: %s", response.info())
|
||||
self._cookies_lock.acquire()
|
||||
try:
|
||||
self._policy._now = self._now = int(time.time())
|
||||
self._policy._now = self._now = int(time.time())
|
||||
|
||||
for cookie in self.make_cookies(response, request):
|
||||
if self._policy.set_ok(cookie, request):
|
||||
_debug(" setting cookie: %s", cookie)
|
||||
self.set_cookie(cookie)
|
||||
for cookie in self.make_cookies(response, request):
|
||||
if self._policy.set_ok(cookie, request):
|
||||
_debug(" setting cookie: %s", cookie)
|
||||
self.set_cookie(cookie)
|
||||
finally:
|
||||
self._cookies_lock.release()
|
||||
self._cookies_lock.release()
|
||||
|
||||
def clear(self, domain=None, path=None, name=None):
|
||||
"""Clear some cookies.
|
||||
|
|
@ -1677,11 +1677,11 @@ class CookieJar:
|
|||
"""
|
||||
self._cookies_lock.acquire()
|
||||
try:
|
||||
for cookie in self:
|
||||
if cookie.discard:
|
||||
self.clear(cookie.domain, cookie.path, cookie.name)
|
||||
for cookie in self:
|
||||
if cookie.discard:
|
||||
self.clear(cookie.domain, cookie.path, cookie.name)
|
||||
finally:
|
||||
self._cookies_lock.release()
|
||||
self._cookies_lock.release()
|
||||
|
||||
def clear_expired_cookies(self):
|
||||
"""Discard all expired cookies.
|
||||
|
|
@ -1695,12 +1695,12 @@ class CookieJar:
|
|||
"""
|
||||
self._cookies_lock.acquire()
|
||||
try:
|
||||
now = time.time()
|
||||
for cookie in self:
|
||||
if cookie.is_expired(now):
|
||||
self.clear(cookie.domain, cookie.path, cookie.name)
|
||||
now = time.time()
|
||||
for cookie in self:
|
||||
if cookie.is_expired(now):
|
||||
self.clear(cookie.domain, cookie.path, cookie.name)
|
||||
finally:
|
||||
self._cookies_lock.release()
|
||||
self._cookies_lock.release()
|
||||
|
||||
def __iter__(self):
|
||||
return deepvalues(self._cookies)
|
||||
|
|
@ -1774,16 +1774,16 @@ class FileCookieJar(CookieJar):
|
|||
self._cookies_lock.acquire()
|
||||
try:
|
||||
|
||||
old_state = copy.deepcopy(self._cookies)
|
||||
self._cookies = {}
|
||||
try:
|
||||
self.load(filename, ignore_discard, ignore_expires)
|
||||
except (LoadError, IOError):
|
||||
self._cookies = old_state
|
||||
raise
|
||||
old_state = copy.deepcopy(self._cookies)
|
||||
self._cookies = {}
|
||||
try:
|
||||
self.load(filename, ignore_discard, ignore_expires)
|
||||
except (LoadError, IOError):
|
||||
self._cookies = old_state
|
||||
raise
|
||||
|
||||
finally:
|
||||
self._cookies_lock.release()
|
||||
self._cookies_lock.release()
|
||||
|
||||
from _LWPCookieJar import LWPCookieJar, lwp_cookie_str
|
||||
from _MozillaCookieJar import MozillaCookieJar
|
||||
|
|
|
|||
|
|
@ -243,5 +243,5 @@ def open(file, flag=None, mode=0666):
|
|||
else:
|
||||
# Turn off any bits that are set in the umask
|
||||
mode = mode & (~um)
|
||||
|
||||
|
||||
return _Database(file, mode)
|
||||
|
|
|
|||
|
|
@ -11,11 +11,8 @@ Suggested usage is::
|
|||
import dummy_thread as thread
|
||||
|
||||
"""
|
||||
__author__ = "Brett Cannon"
|
||||
__email__ = "brett@python.org"
|
||||
|
||||
# Exports only things specified by thread documentation
|
||||
# (skipping obsolete synonyms allocate(), start_new(), exit_thread())
|
||||
# Exports only things specified by thread documentation;
|
||||
# skipping obsolete synonyms allocate(), start_new(), exit_thread().
|
||||
__all__ = ['error', 'start_new_thread', 'exit', 'get_ident', 'allocate_lock',
|
||||
'interrupt_main', 'LockType']
|
||||
|
||||
|
|
|
|||
|
|
@ -5,11 +5,6 @@ to not have ``threading`` considered imported. Had ``threading`` been
|
|||
directly imported it would have made all subsequent imports succeed
|
||||
regardless of whether ``thread`` was available which is not desired.
|
||||
|
||||
:Author: Brett Cannon
|
||||
:Contact: brett@python.org
|
||||
|
||||
XXX: Try to get rid of ``_dummy_threading``.
|
||||
|
||||
"""
|
||||
from sys import modules as sys_modules
|
||||
|
||||
|
|
|
|||
|
|
@ -46,6 +46,7 @@ CHARSETS = {
|
|||
'iso-8859-13': (QP, QP, None),
|
||||
'iso-8859-14': (QP, QP, None),
|
||||
'iso-8859-15': (QP, QP, None),
|
||||
'iso-8859-16': (QP, QP, None),
|
||||
'windows-1252':(QP, QP, None),
|
||||
'viscii': (QP, QP, None),
|
||||
'us-ascii': (None, None, None),
|
||||
|
|
@ -81,6 +82,8 @@ ALIASES = {
|
|||
'latin-8': 'iso-8859-14',
|
||||
'latin_9': 'iso-8859-15',
|
||||
'latin-9': 'iso-8859-15',
|
||||
'latin_10':'iso-8859-16',
|
||||
'latin-10':'iso-8859-16',
|
||||
'cp949': 'ks_c_5601-1987',
|
||||
'euc_jp': 'euc-jp',
|
||||
'euc_kr': 'euc-kr',
|
||||
|
|
|
|||
|
|
@ -301,6 +301,8 @@ aliases = {
|
|||
|
||||
# iso8859_13 codec
|
||||
'iso_8859_13' : 'iso8859_13',
|
||||
'l7' : 'iso8859_13',
|
||||
'latin7' : 'iso8859_13',
|
||||
|
||||
# iso8859_14 codec
|
||||
'iso_8859_14' : 'iso8859_14',
|
||||
|
|
@ -312,6 +314,8 @@ aliases = {
|
|||
|
||||
# iso8859_15 codec
|
||||
'iso_8859_15' : 'iso8859_15',
|
||||
'l9' : 'iso8859_15',
|
||||
'latin9' : 'iso8859_15',
|
||||
|
||||
# iso8859_16 codec
|
||||
'iso_8859_16' : 'iso8859_16',
|
||||
|
|
|
|||
|
|
@ -333,7 +333,7 @@ class FTP:
|
|||
# 1xx or error messages for LIST), so we just discard
|
||||
# this response.
|
||||
if resp[0] == '2':
|
||||
resp = self.getresp()
|
||||
resp = self.getresp()
|
||||
if resp[0] != '1':
|
||||
raise error_reply, resp
|
||||
else:
|
||||
|
|
@ -343,7 +343,7 @@ class FTP:
|
|||
resp = self.sendcmd(cmd)
|
||||
# See above.
|
||||
if resp[0] == '2':
|
||||
resp = self.getresp()
|
||||
resp = self.getresp()
|
||||
if resp[0] != '1':
|
||||
raise error_reply, resp
|
||||
conn, sockaddr = sock.accept()
|
||||
|
|
|
|||
|
|
@ -899,7 +899,7 @@ class HTTPConnection:
|
|||
except (AttributeError, OSError):
|
||||
# Don't send a length if this failed
|
||||
if self.debuglevel > 0: print "Cannot stat!!"
|
||||
|
||||
|
||||
if thelen is not None:
|
||||
self.putheader('Content-Length',thelen)
|
||||
for hdr, value in headers.iteritems():
|
||||
|
|
|
|||
|
|
@ -71,7 +71,7 @@ class CodeContext:
|
|||
#
|
||||
# To avoid possible errors, all references to the inner workings
|
||||
# of Tk are executed inside try/except blocks.
|
||||
|
||||
|
||||
widgets_for_width_calc = self.editwin.text, self.editwin.text_frame
|
||||
|
||||
# calculate the required vertical padding
|
||||
|
|
@ -113,7 +113,7 @@ class CodeContext:
|
|||
# above it.
|
||||
self.label.pack(side="top", fill="x", expand=False,
|
||||
before=self.editwin.text_frame)
|
||||
|
||||
|
||||
else:
|
||||
self.label.destroy()
|
||||
self.label = None
|
||||
|
|
|
|||
|
|
@ -50,9 +50,9 @@ class Dialog(Toplevel):
|
|||
# If the master is not viewable, don't
|
||||
# make the child transient, or else it
|
||||
# would be opened withdrawn
|
||||
if parent.winfo_viewable():
|
||||
if parent.winfo_viewable():
|
||||
self.transient(parent)
|
||||
|
||||
|
||||
if title:
|
||||
self.title(title)
|
||||
|
||||
|
|
|
|||
|
|
@ -569,7 +569,7 @@ class _singlefileMailbox(Mailbox):
|
|||
# already have been generated (and presumably has been modified
|
||||
# by adding or deleting an item).
|
||||
assert self._toc is not None
|
||||
|
||||
|
||||
# Check length of self._file; if it's changed, some other process
|
||||
# has modified the mailbox since we scanned it.
|
||||
self._file.seek(0, 2)
|
||||
|
|
@ -578,7 +578,7 @@ class _singlefileMailbox(Mailbox):
|
|||
raise ExternalClashError('Size of mailbox file changed '
|
||||
'(expected %i, found %i)' %
|
||||
(self._file_length, cur_len))
|
||||
|
||||
|
||||
new_file = _create_temporary(self._path)
|
||||
try:
|
||||
new_toc = {}
|
||||
|
|
@ -1219,7 +1219,7 @@ class Babyl(_singlefileMailbox):
|
|||
self._next_key = len(self._toc)
|
||||
self._file.seek(0, 2)
|
||||
self._file_length = self._file.tell()
|
||||
|
||||
|
||||
def _pre_mailbox_hook(self, f):
|
||||
"""Called before writing the mailbox to file f."""
|
||||
f.write('BABYL OPTIONS:%sVersion: 5%sLabels:%s%s\037' %
|
||||
|
|
|
|||
|
|
@ -236,7 +236,7 @@ _release_version = re.compile(r'([^0-9]+)'
|
|||
'[^(]*(?:\((.+)\))?')
|
||||
|
||||
# See also http://www.novell.com/coolsolutions/feature/11251.html
|
||||
# and http://linuxmafia.com/faq/Admin/release-files.html
|
||||
# and http://linuxmafia.com/faq/Admin/release-files.html
|
||||
# and http://data.linux-ntfs.org/rpm/whichrpm
|
||||
# and http://www.die.net/doc/linux/man/man1/lsb_release.1.html
|
||||
|
||||
|
|
@ -245,7 +245,7 @@ _supported_dists = ('SuSE', 'debian', 'fedora', 'redhat', 'centos',
|
|||
'gentoo', 'UnitedLinux')
|
||||
|
||||
def _parse_release_file(firstline):
|
||||
|
||||
|
||||
# Parse the first line
|
||||
m = _lsb_release_version.match(firstline)
|
||||
if m is not None:
|
||||
|
|
@ -268,7 +268,7 @@ def _parse_release_file(firstline):
|
|||
return '', version, id
|
||||
|
||||
def _test_parse_release_file():
|
||||
|
||||
|
||||
for input, output in (
|
||||
# Examples of release file contents:
|
||||
('SuSE Linux 9.3 (x86-64)', ('SuSE Linux ', '9.3', 'x86-64'))
|
||||
|
|
@ -324,7 +324,7 @@ def linux_distribution(distname='', version='', id='',
|
|||
break
|
||||
else:
|
||||
return _dist_try_harder(distname,version,id)
|
||||
|
||||
|
||||
# Read the first line
|
||||
f = open('/etc/'+file, 'r')
|
||||
firstline = f.readline()
|
||||
|
|
@ -340,7 +340,7 @@ def linux_distribution(distname='', version='', id='',
|
|||
return distname, version, id
|
||||
|
||||
# To maintain backwards compatibility:
|
||||
|
||||
|
||||
def dist(distname='',version='',id='',
|
||||
|
||||
supported_dists=_supported_dists):
|
||||
|
|
@ -1358,7 +1358,7 @@ def python_branch():
|
|||
If not available, an empty string is returned.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
return _sys_version()[2]
|
||||
|
||||
def python_revision():
|
||||
|
|
|
|||
|
|
@ -123,7 +123,7 @@ def fork():
|
|||
os.close(tmp_fd)
|
||||
else:
|
||||
os.close(slave_fd)
|
||||
|
||||
|
||||
# Parent and child process.
|
||||
return pid, master_fd
|
||||
|
||||
|
|
|
|||
|
|
@ -1121,7 +1121,7 @@ class Popen(object):
|
|||
# we can write up to PIPE_BUF bytes without risk
|
||||
# blocking. POSIX defines PIPE_BUF >= 512
|
||||
bytes_written = os.write(self.stdin.fileno(), buffer(input, input_offset, 512))
|
||||
input_offset += bytes_written
|
||||
input_offset += bytes_written
|
||||
if input_offset >= len(input):
|
||||
self.stdin.close()
|
||||
write_set.remove(self.stdin)
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ class SortedDict(UserDict.UserDict):
|
|||
result = self.data.keys()
|
||||
result.sort()
|
||||
return result
|
||||
|
||||
|
||||
def values(self):
|
||||
result = self.items()
|
||||
return [i[1] for i in values]
|
||||
|
|
@ -446,12 +446,12 @@ class SortedTestCase(RawConfigParserTestCase):
|
|||
"o2=3\n"
|
||||
"o1=4\n"
|
||||
"[a]\n"
|
||||
"k=v\n")
|
||||
"k=v\n")
|
||||
output = StringIO.StringIO()
|
||||
self.cf.write(output)
|
||||
self.assertEquals(output.getvalue(),
|
||||
"[a]\n"
|
||||
"k = v\n\n"
|
||||
"k = v\n\n"
|
||||
"[b]\n"
|
||||
"o1 = 4\n"
|
||||
"o2 = 3\n"
|
||||
|
|
|
|||
|
|
@ -7,6 +7,12 @@ from random import random
|
|||
# How much time in seconds can pass before we print a 'Still working' message.
|
||||
_PRINT_WORKING_MSG_INTERVAL = 5 * 60
|
||||
|
||||
class TrivialContext(object):
|
||||
def __enter__(self):
|
||||
return self
|
||||
def __exit__(self, *exc_info):
|
||||
pass
|
||||
|
||||
class CompilerTest(unittest.TestCase):
|
||||
|
||||
def testCompileLibrary(self):
|
||||
|
|
@ -157,6 +163,31 @@ class CompilerTest(unittest.TestCase):
|
|||
exec(c, dct)
|
||||
self.assertEquals(dct['f'].func_annotations, expected)
|
||||
|
||||
def testWith(self):
|
||||
# SF bug 1638243
|
||||
c = compiler.compile('from __future__ import with_statement\n'
|
||||
'def f():\n'
|
||||
' with TrivialContext():\n'
|
||||
' return 1\n'
|
||||
'result = f()',
|
||||
'<string>',
|
||||
'exec' )
|
||||
dct = {'TrivialContext': TrivialContext}
|
||||
exec(c, dct)
|
||||
self.assertEquals(dct.get('result'), 1)
|
||||
|
||||
def testWithAss(self):
|
||||
c = compiler.compile('from __future__ import with_statement\n'
|
||||
'def f():\n'
|
||||
' with TrivialContext() as tc:\n'
|
||||
' return 1\n'
|
||||
'result = f()',
|
||||
'<string>',
|
||||
'exec' )
|
||||
dct = {'TrivialContext': TrivialContext}
|
||||
exec(c, dct)
|
||||
self.assertEquals(dct.get('result'), 1)
|
||||
|
||||
|
||||
NOLINENO = (compiler.ast.Module, compiler.ast.Stmt, compiler.ast.Discard)
|
||||
|
||||
|
|
|
|||
|
|
@ -49,7 +49,7 @@ class DumbDBMTestCase(unittest.TestCase):
|
|||
f.close()
|
||||
finally:
|
||||
os.umask(old_umask)
|
||||
|
||||
|
||||
expected_mode = 0635
|
||||
if os.name != 'posix':
|
||||
# Windows only supports setting the read-only attribute.
|
||||
|
|
@ -61,7 +61,7 @@ class DumbDBMTestCase(unittest.TestCase):
|
|||
self.assertEqual(stat.S_IMODE(st.st_mode), expected_mode)
|
||||
st = os.stat(_fname + '.dir')
|
||||
self.assertEqual(stat.S_IMODE(st.st_mode), expected_mode)
|
||||
|
||||
|
||||
def test_close_twice(self):
|
||||
f = dumbdbm.open(_fname)
|
||||
f['a'] = 'b'
|
||||
|
|
|
|||
|
|
@ -311,6 +311,13 @@ class ExceptionTests(unittest.TestCase):
|
|||
'pickled "%r", attribute "%s' %
|
||||
(e, checkArgName))
|
||||
|
||||
def testSlicing(self):
|
||||
# Test that you can slice an exception directly instead of requiring
|
||||
# going through the 'args' attribute.
|
||||
args = (1, 2, 3)
|
||||
exc = BaseException(*args)
|
||||
self.failUnlessEqual(exc[:], args)
|
||||
|
||||
def testKeywordArgs(self):
|
||||
# test that builtin exception don't take keyword args,
|
||||
# but user-defined subclasses can if they want
|
||||
|
|
|
|||
|
|
@ -138,7 +138,7 @@ class TestGzip(unittest.TestCase):
|
|||
y = f.read(10)
|
||||
f.close()
|
||||
self.assertEquals(y, data1[20:30])
|
||||
|
||||
|
||||
def test_seek_write(self):
|
||||
# Try seek, write test
|
||||
f = gzip.GzipFile(self.filename, 'w')
|
||||
|
|
|
|||
|
|
@ -674,11 +674,11 @@ class TestMaildir(TestMailbox):
|
|||
box = self._factory(self._path, factory=dummy_factory)
|
||||
folder = box.add_folder('folder1')
|
||||
self.assert_(folder._factory is dummy_factory)
|
||||
|
||||
|
||||
folder1_alias = box.get_folder('folder1')
|
||||
self.assert_(folder1_alias._factory is dummy_factory)
|
||||
|
||||
|
||||
|
||||
|
||||
class _TestMboxMMDF(TestMailbox):
|
||||
|
||||
|
|
@ -798,7 +798,7 @@ class TestMH(TestMailbox):
|
|||
def dummy_factory (s):
|
||||
return None
|
||||
self._box = self._factory(self._path, dummy_factory)
|
||||
|
||||
|
||||
new_folder = self._box.add_folder('foo.bar')
|
||||
folder0 = self._box.get_folder('foo.bar')
|
||||
folder0.add(self._template % 'bar')
|
||||
|
|
@ -894,7 +894,7 @@ class TestMH(TestMailbox):
|
|||
self.assert_(self._box.get_sequences() ==
|
||||
{'foo':[1, 2, 3, 4, 5],
|
||||
'unseen':[1], 'bar':[3], 'replied':[3]})
|
||||
|
||||
|
||||
def _get_lock_path(self):
|
||||
return os.path.join(self._path, '.mh_sequences.lock')
|
||||
|
||||
|
|
|
|||
|
|
@ -116,7 +116,7 @@ class MboxTestCase(unittest.TestCase):
|
|||
|
||||
def tearDown(self):
|
||||
os.unlink(self._path)
|
||||
|
||||
|
||||
def test_from_regex (self):
|
||||
# Testing new regex from bug #1633678
|
||||
f = open(self._path, 'w')
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ import unittest
|
|||
import __builtin__
|
||||
import exceptions
|
||||
import warnings
|
||||
from test.test_support import run_unittest
|
||||
from test.test_support import run_unittest, guard_warnings_filter
|
||||
import os
|
||||
from platform import system as platform_system
|
||||
|
||||
|
|
@ -113,13 +113,11 @@ class UsageTests(unittest.TestCase):
|
|||
|
||||
"""Test usage of exceptions"""
|
||||
|
||||
def setUp(self):
|
||||
self._filters = warnings.filters[:]
|
||||
|
||||
def tearDown(self):
|
||||
warnings.filters = self._filters[:]
|
||||
|
||||
def test_raise_new_style_non_exception(self):
|
||||
# You cannot raise a new-style class that does not inherit from
|
||||
# BaseException; the ability was not possible until BaseException's
|
||||
# introduction so no need to support new-style objects that do not
|
||||
# inherit from it.
|
||||
class NewStyleClass(object):
|
||||
pass
|
||||
try:
|
||||
|
|
@ -127,13 +125,51 @@ class UsageTests(unittest.TestCase):
|
|||
except TypeError:
|
||||
pass
|
||||
except:
|
||||
self.fail("unable to raise new-style class")
|
||||
self.fail("able to raise new-style class")
|
||||
try:
|
||||
raise NewStyleClass()
|
||||
except TypeError:
|
||||
pass
|
||||
except:
|
||||
self.fail("unable to raise new-style class instance")
|
||||
self.fail("able to raise new-style class instance")
|
||||
|
||||
def test_raise_string(self):
|
||||
# Raising a string raises TypeError.
|
||||
try:
|
||||
raise "spam"
|
||||
except TypeError:
|
||||
pass
|
||||
except:
|
||||
self.fail("was able to raise a string exception")
|
||||
|
||||
def test_catch_string(self):
|
||||
# Catching a string should trigger a DeprecationWarning.
|
||||
with guard_warnings_filter():
|
||||
warnings.resetwarnings()
|
||||
warnings.filterwarnings("error")
|
||||
str_exc = "spam"
|
||||
try:
|
||||
try:
|
||||
raise StandardError
|
||||
except str_exc:
|
||||
pass
|
||||
except DeprecationWarning:
|
||||
pass
|
||||
except StandardError:
|
||||
self.fail("catching a string exception did not raise "
|
||||
"DeprecationWarning")
|
||||
# Make sure that even if the string exception is listed in a tuple
|
||||
# that a warning is raised.
|
||||
try:
|
||||
try:
|
||||
raise StandardError
|
||||
except (AssertionError, str_exc):
|
||||
pass
|
||||
except DeprecationWarning:
|
||||
pass
|
||||
except StandardError:
|
||||
self.fail("catching a string exception specified in a tuple did "
|
||||
"not raise DeprecationWarning")
|
||||
|
||||
def test_main():
|
||||
run_unittest(ExceptionClassTests, UsageTests)
|
||||
|
|
|
|||
|
|
@ -120,7 +120,7 @@ else:
|
|||
##if False and lines != ['In child, calling os.setsid()',
|
||||
## 'Good: OSError was raised.', '']:
|
||||
## raise TestFailed("Unexpected output from child: %r" % line)
|
||||
|
||||
|
||||
(pid, status) = os.waitpid(pid, 0)
|
||||
res = status >> 8
|
||||
debug("Child (%d) exited with status %d (%d)."%(pid, res, status))
|
||||
|
|
@ -140,8 +140,8 @@ else:
|
|||
## pass
|
||||
##else:
|
||||
## raise TestFailed("Read from master_fd did not raise exception")
|
||||
|
||||
|
||||
|
||||
|
||||
os.close(master_fd)
|
||||
|
||||
# pty.fork() passed.
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ class ResourceTest(unittest.TestCase):
|
|||
self.assertRaises(TypeError, resource.setrlimit, 42, 42, 42)
|
||||
|
||||
def test_fsize_ismax(self):
|
||||
|
||||
|
||||
try:
|
||||
(cur, max) = resource.getrlimit(resource.RLIMIT_FSIZE)
|
||||
except AttributeError:
|
||||
|
|
@ -39,7 +39,7 @@ class ResourceTest(unittest.TestCase):
|
|||
# versions of Python were terminated by an uncaught SIGXFSZ, but
|
||||
# pythonrun.c has been fixed to ignore that exception. If so, the
|
||||
# write() should return EFBIG when the limit is exceeded.
|
||||
|
||||
|
||||
# At least one platform has an unlimited RLIMIT_FSIZE and attempts
|
||||
# to change it raise ValueError instead.
|
||||
try:
|
||||
|
|
|
|||
|
|
@ -481,7 +481,7 @@ class SetSubclassWithKeywordArgs(set):
|
|||
set.__init__(self, iterable)
|
||||
|
||||
class TestSetSubclassWithKeywordArgs(TestSet):
|
||||
|
||||
|
||||
def test_keywords_in_subclass(self):
|
||||
'SF bug #1486663 -- this used to erroneously raise a TypeError'
|
||||
SetSubclassWithKeywordArgs(newarg=1)
|
||||
|
|
@ -1464,7 +1464,7 @@ def test_main(verbose=None):
|
|||
test_classes = (
|
||||
TestSet,
|
||||
TestSetSubclass,
|
||||
TestSetSubclassWithKeywordArgs,
|
||||
TestSetSubclassWithKeywordArgs,
|
||||
TestFrozenSet,
|
||||
TestFrozenSetSubclass,
|
||||
TestSetOfSets,
|
||||
|
|
|
|||
|
|
@ -463,6 +463,10 @@ class CalculationTests(unittest.TestCase):
|
|||
"of the year")
|
||||
test_helper((1917, 12, 31), "Dec 31 on Monday with year starting and "
|
||||
"ending on Monday")
|
||||
test_helper((2007, 01, 07), "First Sunday of 2007")
|
||||
test_helper((2007, 01, 14), "Second Sunday of 2007")
|
||||
test_helper((2006, 12, 31), "Last Sunday of 2006")
|
||||
test_helper((2006, 12, 24), "Second to last Sunday of 2006")
|
||||
|
||||
|
||||
class CacheTests(unittest.TestCase):
|
||||
|
|
|
|||
|
|
@ -119,7 +119,7 @@ for prefix in ('', '@', '<', '>', '=', '!'):
|
|||
cp, bp, hp, ip, lp, fp, dp, tp = struct.unpack(format, s)
|
||||
if (cp != c or bp != b or hp != h or ip != i or lp != l or
|
||||
int(100 * fp) != int(100 * f) or int(100 * dp) != int(100 * d) or
|
||||
tp != t):
|
||||
tp != t):
|
||||
# ^^^ calculate only to two decimal places
|
||||
raise TestFailed, "unpack/pack not transitive (%s, %s)" % (
|
||||
str(format), str((cp, bp, hp, ip, lp, fp, dp, tp)))
|
||||
|
|
@ -160,11 +160,11 @@ tests = [
|
|||
('f', -2.0, '\300\000\000\000', '\000\000\000\300', 0),
|
||||
('d', -2.0, '\300\000\000\000\000\000\000\000',
|
||||
'\000\000\000\000\000\000\000\300', 0),
|
||||
('t', 0, '\0', '\0', 0),
|
||||
('t', 3, '\1', '\1', 1),
|
||||
('t', True, '\1', '\1', 0),
|
||||
('t', [], '\0', '\0', 1),
|
||||
('t', (1,), '\1', '\1', 1),
|
||||
('t', 0, '\0', '\0', 0),
|
||||
('t', 3, '\1', '\1', 1),
|
||||
('t', True, '\1', '\1', 0),
|
||||
('t', [], '\0', '\0', 1),
|
||||
('t', (1,), '\1', '\1', 1),
|
||||
]
|
||||
|
||||
for fmt, arg, big, lil, asy in tests:
|
||||
|
|
@ -621,48 +621,48 @@ test_pack_into()
|
|||
test_pack_into_fn()
|
||||
|
||||
def test_bool():
|
||||
for prefix in tuple("<>!=")+('',):
|
||||
false = (), [], [], '', 0
|
||||
true = [1], 'test', 5, -1, 0xffffffff+1, 0xffffffff/2
|
||||
|
||||
falseFormat = prefix + 't' * len(false)
|
||||
if verbose:
|
||||
print 'trying bool pack/unpack on', false, 'using format', falseFormat
|
||||
packedFalse = struct.pack(falseFormat, *false)
|
||||
unpackedFalse = struct.unpack(falseFormat, packedFalse)
|
||||
|
||||
trueFormat = prefix + 't' * len(true)
|
||||
if verbose:
|
||||
print 'trying bool pack/unpack on', true, 'using format', trueFormat
|
||||
packedTrue = struct.pack(trueFormat, *true)
|
||||
unpackedTrue = struct.unpack(trueFormat, packedTrue)
|
||||
|
||||
if len(true) != len(unpackedTrue):
|
||||
raise TestFailed('unpacked true array is not of same size as input')
|
||||
if len(false) != len(unpackedFalse):
|
||||
raise TestFailed('unpacked false array is not of same size as input')
|
||||
|
||||
for t in unpackedFalse:
|
||||
if t is not False:
|
||||
raise TestFailed('%r did not unpack as False' % t)
|
||||
for t in unpackedTrue:
|
||||
if t is not True:
|
||||
raise TestFailed('%r did not unpack as false' % t)
|
||||
|
||||
if prefix and verbose:
|
||||
print 'trying size of bool with format %r' % (prefix+'t')
|
||||
packed = struct.pack(prefix+'t', 1)
|
||||
|
||||
if len(packed) != struct.calcsize(prefix+'t'):
|
||||
raise TestFailed('packed length is not equal to calculated size')
|
||||
|
||||
if len(packed) != 1 and prefix:
|
||||
raise TestFailed('encoded bool is not one byte: %r' % packed)
|
||||
elif not prefix and verbose:
|
||||
print 'size of bool in native format is %i' % (len(packed))
|
||||
|
||||
for c in '\x01\x7f\xff\x0f\xf0':
|
||||
if struct.unpack('>t', c)[0] is not True:
|
||||
raise TestFailed('%c did not unpack as True' % c)
|
||||
for prefix in tuple("<>!=")+('',):
|
||||
false = (), [], [], '', 0
|
||||
true = [1], 'test', 5, -1, 0xffffffff+1, 0xffffffff/2
|
||||
|
||||
falseFormat = prefix + 't' * len(false)
|
||||
if verbose:
|
||||
print 'trying bool pack/unpack on', false, 'using format', falseFormat
|
||||
packedFalse = struct.pack(falseFormat, *false)
|
||||
unpackedFalse = struct.unpack(falseFormat, packedFalse)
|
||||
|
||||
trueFormat = prefix + 't' * len(true)
|
||||
if verbose:
|
||||
print 'trying bool pack/unpack on', true, 'using format', trueFormat
|
||||
packedTrue = struct.pack(trueFormat, *true)
|
||||
unpackedTrue = struct.unpack(trueFormat, packedTrue)
|
||||
|
||||
if len(true) != len(unpackedTrue):
|
||||
raise TestFailed('unpacked true array is not of same size as input')
|
||||
if len(false) != len(unpackedFalse):
|
||||
raise TestFailed('unpacked false array is not of same size as input')
|
||||
|
||||
for t in unpackedFalse:
|
||||
if t is not False:
|
||||
raise TestFailed('%r did not unpack as False' % t)
|
||||
for t in unpackedTrue:
|
||||
if t is not True:
|
||||
raise TestFailed('%r did not unpack as false' % t)
|
||||
|
||||
if prefix and verbose:
|
||||
print 'trying size of bool with format %r' % (prefix+'t')
|
||||
packed = struct.pack(prefix+'t', 1)
|
||||
|
||||
if len(packed) != struct.calcsize(prefix+'t'):
|
||||
raise TestFailed('packed length is not equal to calculated size')
|
||||
|
||||
if len(packed) != 1 and prefix:
|
||||
raise TestFailed('encoded bool is not one byte: %r' % packed)
|
||||
elif not prefix and verbose:
|
||||
print 'size of bool in native format is %i' % (len(packed))
|
||||
|
||||
for c in '\x01\x7f\xff\x0f\xf0':
|
||||
if struct.unpack('>t', c)[0] is not True:
|
||||
raise TestFailed('%c did not unpack as True' % c)
|
||||
|
||||
test_bool()
|
||||
|
|
|
|||
|
|
@ -270,7 +270,7 @@ def open_urlresource(url):
|
|||
print >> get_original_stdout(), '\tfetching %s ...' % url
|
||||
fn, _ = urllib.urlretrieve(url, filename)
|
||||
return open(fn)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def guard_warnings_filter():
|
||||
"""Guard the warnings filter from being permanently changed."""
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue