This commit is contained in:
Brett Cannon 2012-04-06 12:54:57 -04:00
commit 83c02ee8e8
65 changed files with 2744 additions and 330 deletions

View file

@ -33,7 +33,6 @@ Modules/Setup.local
Modules/config.c Modules/config.c
Modules/ld_so_aix Modules/ld_so_aix
Parser/pgen Parser/pgen
Parser/pgen.stamp
Lib/test/data/* Lib/test/data/*
Lib/lib2to3/Grammar*.pickle Lib/lib2to3/Grammar*.pickle
Lib/lib2to3/PatternGrammar*.pickle Lib/lib2to3/PatternGrammar*.pickle

1
.gitignore vendored
View file

@ -32,7 +32,6 @@ PCbuild/*.o
PCbuild/*.pdb PCbuild/*.pdb
PCbuild/Win32-temp-* PCbuild/Win32-temp-*
Parser/pgen Parser/pgen
Parser/pgen.stamp
__pycache__ __pycache__
autom4te.cache autom4te.cache
build/ build/

View file

@ -32,7 +32,6 @@ Modules/Setup.local
Modules/config.c Modules/config.c
Modules/ld_so_aix$ Modules/ld_so_aix$
Parser/pgen$ Parser/pgen$
Parser/pgen.stamp$
PCbuild/amd64/ PCbuild/amd64/
^core ^core
^python-gdb.py ^python-gdb.py

View file

@ -94,7 +94,7 @@ It defines the following constants and functions:
*size* argument specifies the stack size to be used for subsequently created *size* argument specifies the stack size to be used for subsequently created
threads, and must be 0 (use platform or configured default) or a positive threads, and must be 0 (use platform or configured default) or a positive
integer value of at least 32,768 (32kB). If changing the thread stack size is integer value of at least 32,768 (32kB). If changing the thread stack size is
unsupported, a :exc:`ThreadError` is raised. If the specified stack size is unsupported, a :exc:`RuntimeError` is raised. If the specified stack size is
invalid, a :exc:`ValueError` is raised and the stack size is unmodified. 32kB invalid, a :exc:`ValueError` is raised and the stack size is unmodified. 32kB
is currently the minimum supported stack size value to guarantee sufficient is currently the minimum supported stack size value to guarantee sufficient
stack space for the interpreter itself. Note that some platforms may have stack space for the interpreter itself. Note that some platforms may have

View file

@ -41,6 +41,8 @@ Python's general purpose built-in containers, :class:`dict`, :class:`list`,
:class:`ChainMap` objects :class:`ChainMap` objects
------------------------- -------------------------
.. versionadded:: 3.3
A :class:`ChainMap` class is provided for quickly linking a number of mappings A :class:`ChainMap` class is provided for quickly linking a number of mappings
so they can be treated as a single unit. It is often much faster than creating so they can be treated as a single unit. It is often much faster than creating
a new dictionary and running multiple :meth:`~dict.update` calls. a new dictionary and running multiple :meth:`~dict.update` calls.
@ -91,8 +93,6 @@ The class can be used to simulate nested scopes and is useful in templating.
The use-cases also parallel those for the builtin :func:`super` function. The use-cases also parallel those for the builtin :func:`super` function.
A reference to ``d.parents`` is equivalent to: ``ChainMap(*d.maps[1:])``. A reference to ``d.parents`` is equivalent to: ``ChainMap(*d.maps[1:])``.
.. versionadded:: 3.3
Example of simulating Python's internal lookup chain:: Example of simulating Python's internal lookup chain::
import builtins import builtins

View file

@ -804,7 +804,7 @@ The p1.stdout.close() call after starting the p2 is important in order for p1
to receive a SIGPIPE if p2 exits before p1. to receive a SIGPIPE if p2 exits before p1.
Alternatively, for trusted input, the shell's own pipeline support may still Alternatively, for trusted input, the shell's own pipeline support may still
be used directly: be used directly::
output=`dmesg | grep hda` output=`dmesg | grep hda`
# becomes # becomes

View file

@ -174,7 +174,7 @@ This module defines the following functions and objects:
*size* argument specifies the stack size to be used for subsequently created *size* argument specifies the stack size to be used for subsequently created
threads, and must be 0 (use platform or configured default) or a positive threads, and must be 0 (use platform or configured default) or a positive
integer value of at least 32,768 (32kB). If changing the thread stack size is integer value of at least 32,768 (32kB). If changing the thread stack size is
unsupported, a :exc:`ThreadError` is raised. If the specified stack size is unsupported, a :exc:`RuntimeError` is raised. If the specified stack size is
invalid, a :exc:`ValueError` is raised and the stack size is unmodified. 32kB invalid, a :exc:`ValueError` is raised and the stack size is unmodified. 32kB
is currently the minimum supported stack size value to guarantee sufficient is currently the minimum supported stack size value to guarantee sufficient
stack space for the interpreter itself. Note that some platforms may have stack space for the interpreter itself. Note that some platforms may have
@ -452,7 +452,7 @@ All methods are executed atomically.
are blocked waiting for the lock to become unlocked, allow exactly one of them are blocked waiting for the lock to become unlocked, allow exactly one of them
to proceed. to proceed.
Do not call this method when the lock is unlocked. When invoked on an unlocked lock, a :exc:`RuntimeError` is raised.
There is no return value. There is no return value.

View file

@ -486,6 +486,8 @@ Some smaller changes made to the core Python language are:
(:issue:`10516`) (:issue:`10516`)
.. XXX mention new error messages for passing wrong number of arguments to functions
New and Improved Modules New and Improved Modules
======================== ========================
@ -572,6 +574,26 @@ versions.
The ``unicode_internal`` codec has been deprecated. The ``unicode_internal`` codec has been deprecated.
collections
-----------
Addition of a new :class:`~collections.ChainMap` class to allow treating a
number of mappings as a single unit.
(Written by Raymond Hettinger for :issue:`11089`, made public in
:issue:`11297`)
The abstract base classes have been moved in a new :mod:`collections.abc`
module, to better differentiate between the abstract and the concrete
collections classes. Aliases for ABCs are still present in the
:mod:`collections` module to preserve existing imports.
(:issue:`11085`)
.. XXX addition of __slots__ to ABCs not recorded here: internal detail
crypt crypt
----- -----
@ -865,11 +887,12 @@ packaging
--------- ---------
:mod:`distutils` has undergone additions and refactoring under a new name, :mod:`distutils` has undergone additions and refactoring under a new name,
:mod:`packaging`, to allow developers to break backward compatibility. :mod:`packaging`, to allow developers to make far-reaching changes without
being constrained by backward compatibility.
:mod:`distutils` is still provided in the standard library, but users are :mod:`distutils` is still provided in the standard library, but users are
encouraged to transition to :mod:`packaging`. For older versions of Python, a encouraged to transition to :mod:`packaging`. For older versions of Python, a
backport compatible with 2.4+ and 3.1+ will be made available on PyPI under the backport compatible with Python 2.5 and newer and 3.2 is available on PyPI
name :mod:`distutils2`. under the name `distutils2 <http://pypi.python.org/pypi/Distutils2>`_.
.. TODO add examples and howto to the packaging docs and link to them .. TODO add examples and howto to the packaging docs and link to them
@ -1057,12 +1080,24 @@ should be used. For example, this will send a ``'HEAD'`` request::
(:issue:`1673007`) (:issue:`1673007`)
webbrowser
----------
The :mod:`webbrowser` module supports more browsers: Google Chrome (named
:program:`chrome`, :program:`chromium`, :program:`chrome-browser` or
:program:`chromium-browser` depending on the version and operating system) as
well as the the generic launchers :program:`xdg-open` from the FreeDesktop.org
project and :program:`gvfs-open` which is the default URI handler for GNOME 3.
(:issue:`13620` and :issue:`14493`)
Optimizations Optimizations
============= =============
Major performance enhancements have been added: Major performance enhancements have been added:
* Thanks to the :pep:`393`, some operations on Unicode strings has been optimized: * Thanks to :pep:`393`, some operations on Unicode strings have been optimized:
* the memory footprint is divided by 2 to 4 depending on the text * the memory footprint is divided by 2 to 4 depending on the text
* encode an ASCII string to UTF-8 doesn't need to encode characters anymore, * encode an ASCII string to UTF-8 doesn't need to encode characters anymore,
@ -1081,7 +1116,7 @@ Changes to Python's build process and to the C API include:
* :c:func:`PyMemoryView_FromMemory` * :c:func:`PyMemoryView_FromMemory`
* The :pep:`393` added new Unicode types, macros and functions: * :pep:`393` added new Unicode types, macros and functions:
* High-level API: * High-level API:
@ -1124,7 +1159,7 @@ are no longer supported due to maintenance burden.
Deprecated Python modules, functions and methods Deprecated Python modules, functions and methods
------------------------------------------------ ------------------------------------------------
* The :mod:`distutils` modules has been deprecated. Use the new * The :mod:`distutils` module has been deprecated. Use the new
:mod:`packaging` module instead. :mod:`packaging` module instead.
* The ``unicode_internal`` codec has been deprecated because of the * The ``unicode_internal`` codec has been deprecated because of the
:pep:`393`, use UTF-8, UTF-16 (``utf-16-le`` or ``utf-16-be``), or UTF-32 :pep:`393`, use UTF-8, UTF-16 (``utf-16-le`` or ``utf-16-be``), or UTF-32
@ -1143,7 +1178,7 @@ Deprecated Python modules, functions and methods
Deprecated functions and types of the C API Deprecated functions and types of the C API
------------------------------------------- -------------------------------------------
The :c:type:`Py_UNICODE` has been deprecated by the :pep:`393` and will be The :c:type:`Py_UNICODE` has been deprecated by :pep:`393` and will be
removed in Python 4. All functions using this type are deprecated: removed in Python 4. All functions using this type are deprecated:
Unicode functions and methods using :c:type:`Py_UNICODE` and Unicode functions and methods using :c:type:`Py_UNICODE` and
@ -1245,7 +1280,7 @@ Porting C code
functions using this type are deprecated (but will stay available for functions using this type are deprecated (but will stay available for
at least five years). If you were using low-level Unicode APIs to at least five years). If you were using low-level Unicode APIs to
construct and access unicode objects and you want to benefit of the construct and access unicode objects and you want to benefit of the
memory footprint reduction provided by the PEP 393, you have to convert memory footprint reduction provided by PEP 393, you have to convert
your code to the new :doc:`Unicode API <../c-api/unicode>`. your code to the new :doc:`Unicode API <../c-api/unicode>`.
However, if you only have been using high-level functions such as However, if you only have been using high-level functions such as

View file

@ -17,7 +17,7 @@ PyAPI_DATA(PyTypeObject) PyCFunction_Type;
typedef PyObject *(*PyCFunction)(PyObject *, PyObject *); typedef PyObject *(*PyCFunction)(PyObject *, PyObject *);
typedef PyObject *(*PyCFunctionWithKeywords)(PyObject *, PyObject *, typedef PyObject *(*PyCFunctionWithKeywords)(PyObject *, PyObject *,
PyObject *); PyObject *);
typedef PyObject *(*PyNoArgsFunction)(PyObject *); typedef PyObject *(*PyNoArgsFunction)(PyObject *);
PyAPI_FUNC(PyCFunction) PyCFunction_GetFunction(PyObject *); PyAPI_FUNC(PyCFunction) PyCFunction_GetFunction(PyObject *);
@ -33,22 +33,22 @@ PyAPI_FUNC(int) PyCFunction_GetFlags(PyObject *);
(((PyCFunctionObject *)func) -> m_ml -> ml_flags & METH_STATIC ? \ (((PyCFunctionObject *)func) -> m_ml -> ml_flags & METH_STATIC ? \
NULL : ((PyCFunctionObject *)func) -> m_self) NULL : ((PyCFunctionObject *)func) -> m_self)
#define PyCFunction_GET_FLAGS(func) \ #define PyCFunction_GET_FLAGS(func) \
(((PyCFunctionObject *)func) -> m_ml -> ml_flags) (((PyCFunctionObject *)func) -> m_ml -> ml_flags)
#endif #endif
PyAPI_FUNC(PyObject *) PyCFunction_Call(PyObject *, PyObject *, PyObject *); PyAPI_FUNC(PyObject *) PyCFunction_Call(PyObject *, PyObject *, PyObject *);
struct PyMethodDef { struct PyMethodDef {
const char *ml_name; /* The name of the built-in function/method */ const char *ml_name; /* The name of the built-in function/method */
PyCFunction ml_meth; /* The C function that implements it */ PyCFunction ml_meth; /* The C function that implements it */
int ml_flags; /* Combination of METH_xxx flags, which mostly int ml_flags; /* Combination of METH_xxx flags, which mostly
describe the args expected by the C func */ describe the args expected by the C func */
const char *ml_doc; /* The __doc__ attribute, or NULL */ const char *ml_doc; /* The __doc__ attribute, or NULL */
}; };
typedef struct PyMethodDef PyMethodDef; typedef struct PyMethodDef PyMethodDef;
#define PyCFunction_New(ML, SELF) PyCFunction_NewEx((ML), (SELF), NULL) #define PyCFunction_New(ML, SELF) PyCFunction_NewEx((ML), (SELF), NULL)
PyAPI_FUNC(PyObject *) PyCFunction_NewEx(PyMethodDef *, PyObject *, PyAPI_FUNC(PyObject *) PyCFunction_NewEx(PyMethodDef *, PyObject *,
PyObject *); PyObject *);
/* Flag passed to newmethodobject */ /* Flag passed to newmethodobject */
/* #define METH_OLDARGS 0x0000 -- unsupported now */ /* #define METH_OLDARGS 0x0000 -- unsupported now */

View file

@ -535,6 +535,11 @@ PyAPI_FUNC(int)
_PyObject_GenericSetAttrWithDict(PyObject *, PyObject *, _PyObject_GenericSetAttrWithDict(PyObject *, PyObject *,
PyObject *, PyObject *); PyObject *, PyObject *);
/* Helper to look up a builtin object */
#ifndef Py_LIMITED_API
PyAPI_FUNC(PyObject *)
_PyObject_GetBuiltin(const char *name);
#endif
/* PyObject_Dir(obj) acts like Python builtins.dir(obj), returning a /* PyObject_Dir(obj) acts like Python builtins.dir(obj), returning a
list of strings. PyObject_Dir(NULL) is like builtins.dir(), list of strings. PyObject_Dir(NULL) is like builtins.dir(),

View file

@ -18,9 +18,13 @@ __all__ = ["Hashable", "Iterable", "Iterator",
"ByteString", "ByteString",
] ]
# Private list of types that we want to register with the various ABCs
### collection related types which are not exposed through builtin ### # so that they will pass tests like:
## iterators ## # it = iter(somebytearray)
# assert isinstance(it, Iterable)
# Note: in other implementations, these types many not be distinct
# and they make have their own implementation specific types that
# are not included on this list.
bytes_iterator = type(iter(b'')) bytes_iterator = type(iter(b''))
bytearray_iterator = type(iter(bytearray())) bytearray_iterator = type(iter(bytearray()))
#callable_iterator = ??? #callable_iterator = ???

View file

@ -1,6 +1,9 @@
What's New in IDLE 3.3? What's New in IDLE 3.3?
========================= =========================
- Issue #8515: Set __file__ when run file in IDLE.
Initial patch by Bruce Frederiksen.
- IDLE can be launched as `python -m idlelib` - IDLE can be launched as `python -m idlelib`
- Issue #14409: IDLE now properly executes commands in the Shell window - Issue #14409: IDLE now properly executes commands in the Shell window

View file

@ -150,16 +150,16 @@ class ScriptBinding:
dirname = os.path.dirname(filename) dirname = os.path.dirname(filename)
# XXX Too often this discards arguments the user just set... # XXX Too often this discards arguments the user just set...
interp.runcommand("""if 1: interp.runcommand("""if 1:
_filename = %r __file__ = {filename!r}
import sys as _sys import sys as _sys
from os.path import basename as _basename from os.path import basename as _basename
if (not _sys.argv or if (not _sys.argv or
_basename(_sys.argv[0]) != _basename(_filename)): _basename(_sys.argv[0]) != _basename(__file__)):
_sys.argv = [_filename] _sys.argv = [__file__]
import os as _os import os as _os
_os.chdir(%r) _os.chdir({dirname!r})
del _filename, _sys, _basename, _os del _sys, _basename, _os
\n""" % (filename, dirname)) \n""".format(filename=filename, dirname=dirname))
interp.prepend_syspath(filename) interp.prepend_syspath(filename)
# XXX KBK 03Jul04 When run w/o subprocess, runtime warnings still # XXX KBK 03Jul04 When run w/o subprocess, runtime warnings still
# go to __stderr__. With subprocess, they go to the shell. # go to __stderr__. With subprocess, they go to the shell.

View file

@ -78,7 +78,7 @@ class TabSet(Frame):
def remove_tab(self, tab_name): def remove_tab(self, tab_name):
"""Remove the tab named <tab_name>""" """Remove the tab named <tab_name>"""
if not tab_name in self._tab_names: if not tab_name in self._tab_names:
raise KeyError("No such Tab: '%s" % page_name) raise KeyError("No such Tab: '%s" % tab_name)
self._tab_names.remove(tab_name) self._tab_names.remove(tab_name)
self._arrange_tabs() self._arrange_tabs()
@ -88,7 +88,7 @@ class TabSet(Frame):
if tab_name == self._selected_tab: if tab_name == self._selected_tab:
return return
if tab_name is not None and tab_name not in self._tabs: if tab_name is not None and tab_name not in self._tabs:
raise KeyError("No such Tab: '%s" % page_name) raise KeyError("No such Tab: '%s" % tab_name)
# deselect the current selected tab # deselect the current selected tab
if self._selected_tab is not None: if self._selected_tab is not None:

View file

@ -111,6 +111,10 @@ def _validate_family(family):
if sys.platform != 'win32' and family == 'AF_PIPE': if sys.platform != 'win32' and family == 'AF_PIPE':
raise ValueError('Family %s is not recognized.' % family) raise ValueError('Family %s is not recognized.' % family)
if sys.platform == 'win32' and family == 'AF_UNIX':
# double check
if not hasattr(socket, family):
raise ValueError('Family %s is not recognized.' % family)
def address_type(address): def address_type(address):
''' '''

View file

@ -4,6 +4,7 @@ Tests common to tuple, list and UserList.UserList
import unittest import unittest
import sys import sys
import pickle
# Various iterables # Various iterables
# This is used for checking the constructor (here and in test_deque.py) # This is used for checking the constructor (here and in test_deque.py)
@ -388,3 +389,9 @@ class CommonTest(unittest.TestCase):
self.assertEqual(a.index(0, -4*sys.maxsize, 4*sys.maxsize), 2) self.assertEqual(a.index(0, -4*sys.maxsize, 4*sys.maxsize), 2)
self.assertRaises(ValueError, a.index, 0, 4*sys.maxsize,-4*sys.maxsize) self.assertRaises(ValueError, a.index, 0, 4*sys.maxsize,-4*sys.maxsize)
self.assertRaises(ValueError, a.index, 2, 0, -10) self.assertRaises(ValueError, a.index, 2, 0, -10)
def test_pickle(self):
lst = self.type2test([4, 5, 6, 7])
lst2 = pickle.loads(pickle.dumps(lst))
self.assertEqual(lst2, lst)
self.assertNotEqual(id(lst2), id(lst))

View file

@ -285,6 +285,20 @@ class BaseTest(unittest.TestCase):
self.assertEqual(a.x, b.x) self.assertEqual(a.x, b.x)
self.assertEqual(type(a), type(b)) self.assertEqual(type(a), type(b))
def test_iterator_pickle(self):
data = array.array(self.typecode, self.example)
orgit = iter(data)
d = pickle.dumps(orgit)
it = pickle.loads(d)
self.assertEqual(type(orgit), type(it))
self.assertEqual(list(it), list(data))
if len(data):
it = pickle.loads(d)
next(it)
d = pickle.dumps(it)
self.assertEqual(list(it), list(data)[1:])
def test_insert(self): def test_insert(self):
a = array.array(self.typecode, self.example) a = array.array(self.typecode, self.example)
a.insert(0, self.example[0]) a.insert(0, self.example[0])

View file

@ -74,15 +74,16 @@ def capture_server(evt, buf, serv):
pass pass
else: else:
n = 200 n = 200
while n > 0: start = time.time()
r, w, e = select.select([conn], [], []) while n > 0 and time.time() - start < 3.0:
r, w, e = select.select([conn], [], [], 0.1)
if r: if r:
n -= 1
data = conn.recv(10) data = conn.recv(10)
# keep everything except for the newline terminator # keep everything except for the newline terminator
buf.write(data.replace(b'\n', b'')) buf.write(data.replace(b'\n', b''))
if b'\n' in data: if b'\n' in data:
break break
n -= 1
time.sleep(0.01) time.sleep(0.01)
conn.close() conn.close()

View file

@ -14,6 +14,7 @@ import random
import traceback import traceback
from test.support import TESTFN, unlink, run_unittest, check_warnings from test.support import TESTFN, unlink, run_unittest, check_warnings
from operator import neg from operator import neg
import pickle
try: try:
import pty, signal import pty, signal
except ImportError: except ImportError:
@ -110,7 +111,30 @@ class TestFailingIter:
def __iter__(self): def __iter__(self):
raise RuntimeError raise RuntimeError
def filter_char(arg):
return ord(arg) > ord("d")
def map_char(arg):
return chr(ord(arg)+1)
class BuiltinTest(unittest.TestCase): class BuiltinTest(unittest.TestCase):
# Helper to check picklability
def check_iter_pickle(self, it, seq):
itorg = it
d = pickle.dumps(it)
it = pickle.loads(d)
self.assertEqual(type(itorg), type(it))
self.assertEqual(list(it), seq)
#test the iterator after dropping one from it
it = pickle.loads(d)
try:
next(it)
except StopIteration:
return
d = pickle.dumps(it)
it = pickle.loads(d)
self.assertEqual(list(it), seq[1:])
def test_import(self): def test_import(self):
__import__('sys') __import__('sys')
@ -566,6 +590,11 @@ class BuiltinTest(unittest.TestCase):
self.assertEqual(list(filter(lambda x: x>=3, (1, 2, 3, 4))), [3, 4]) self.assertEqual(list(filter(lambda x: x>=3, (1, 2, 3, 4))), [3, 4])
self.assertRaises(TypeError, list, filter(42, (1, 2))) self.assertRaises(TypeError, list, filter(42, (1, 2)))
def test_filter_pickle(self):
f1 = filter(filter_char, "abcdeabcde")
f2 = filter(filter_char, "abcdeabcde")
self.check_iter_pickle(f1, list(f2))
def test_getattr(self): def test_getattr(self):
self.assertTrue(getattr(sys, 'stdout') is sys.stdout) self.assertTrue(getattr(sys, 'stdout') is sys.stdout)
self.assertRaises(TypeError, getattr, sys, 1) self.assertRaises(TypeError, getattr, sys, 1)
@ -759,6 +788,11 @@ class BuiltinTest(unittest.TestCase):
raise RuntimeError raise RuntimeError
self.assertRaises(RuntimeError, list, map(badfunc, range(5))) self.assertRaises(RuntimeError, list, map(badfunc, range(5)))
def test_map_pickle(self):
m1 = map(map_char, "Is this the real life?")
m2 = map(map_char, "Is this the real life?")
self.check_iter_pickle(m1, list(m2))
def test_max(self): def test_max(self):
self.assertEqual(max('123123'), '3') self.assertEqual(max('123123'), '3')
self.assertEqual(max(1, 2, 3), 3) self.assertEqual(max(1, 2, 3), 3)
@ -1300,6 +1334,13 @@ class BuiltinTest(unittest.TestCase):
return i return i
self.assertRaises(ValueError, list, zip(BadSeq(), BadSeq())) self.assertRaises(ValueError, list, zip(BadSeq(), BadSeq()))
def test_zip_pickle(self):
a = (1, 2, 3)
b = (4, 5, 6)
t = [(1, 4), (2, 5), (3, 6)]
z1 = zip(a, b)
self.check_iter_pickle(z1, t)
def test_format(self): def test_format(self):
# Test the basic machinery of the format() builtin. Don't test # Test the basic machinery of the format() builtin. Don't test
# the specifics of the various formatters # the specifics of the various formatters

View file

@ -518,6 +518,24 @@ class BaseBytesTest(unittest.TestCase):
q = pickle.loads(ps) q = pickle.loads(ps)
self.assertEqual(b, q) self.assertEqual(b, q)
def test_iterator_pickling(self):
for b in b"", b"a", b"abc", b"\xffab\x80", b"\0\0\377\0\0":
it = itorg = iter(self.type2test(b))
data = list(self.type2test(b))
d = pickle.dumps(it)
it = pickle.loads(d)
self.assertEqual(type(itorg), type(it))
self.assertEqual(list(it), data)
it = pickle.loads(d)
try:
next(it)
except StopIteration:
continue
d = pickle.dumps(it)
it = pickle.loads(d)
self.assertEqual(list(it), data[1:])
def test_strip(self): def test_strip(self):
b = self.type2test(b'mississippi') b = self.type2test(b'mississippi')
self.assertEqual(b.strip(b'i'), b'mississipp') self.assertEqual(b.strip(b'i'), b'mississipp')

View file

@ -4953,6 +4953,78 @@ class CWhitebox(unittest.TestCase):
self.assertRaises(ValueError, get_fmt, 12345, invalid_dot, 'g') self.assertRaises(ValueError, get_fmt, 12345, invalid_dot, 'g')
self.assertRaises(ValueError, get_fmt, 12345, invalid_sep, 'g') self.assertRaises(ValueError, get_fmt, 12345, invalid_sep, 'g')
def test_exact_conversion(self):
Decimal = C.Decimal
localcontext = C.localcontext
InvalidOperation = C.InvalidOperation
with localcontext() as c:
c.traps[InvalidOperation] = True
# Clamped
x = "0e%d" % sys.maxsize
self.assertRaises(InvalidOperation, Decimal, x)
x = "0e%d" % (-sys.maxsize-1)
self.assertRaises(InvalidOperation, Decimal, x)
# Overflow
x = "1e%d" % sys.maxsize
self.assertRaises(InvalidOperation, Decimal, x)
# Underflow
x = "1e%d" % (-sys.maxsize-1)
self.assertRaises(InvalidOperation, Decimal, x)
def test_from_tuple(self):
Decimal = C.Decimal
localcontext = C.localcontext
InvalidOperation = C.InvalidOperation
Overflow = C.Overflow
Underflow = C.Underflow
with localcontext() as c:
c.traps[InvalidOperation] = True
c.traps[Overflow] = True
c.traps[Underflow] = True
# SSIZE_MAX
x = (1, (), sys.maxsize)
self.assertEqual(str(c.create_decimal(x)), '-0E+999999')
self.assertRaises(InvalidOperation, Decimal, x)
x = (1, (0, 1, 2), sys.maxsize)
self.assertRaises(Overflow, c.create_decimal, x)
self.assertRaises(InvalidOperation, Decimal, x)
# SSIZE_MIN
x = (1, (), -sys.maxsize-1)
self.assertEqual(str(c.create_decimal(x)), '-0E-1000026')
self.assertRaises(InvalidOperation, Decimal, x)
x = (1, (0, 1, 2), -sys.maxsize-1)
self.assertRaises(Underflow, c.create_decimal, x)
self.assertRaises(InvalidOperation, Decimal, x)
# OverflowError
x = (1, (), sys.maxsize+1)
self.assertRaises(OverflowError, c.create_decimal, x)
self.assertRaises(OverflowError, Decimal, x)
x = (1, (), -sys.maxsize-2)
self.assertRaises(OverflowError, c.create_decimal, x)
self.assertRaises(OverflowError, Decimal, x)
# Specials
x = (1, (), "N")
self.assertEqual(str(Decimal(x)), '-sNaN')
x = (1, (0,), "N")
self.assertEqual(str(Decimal(x)), '-sNaN')
x = (1, (0, 1), "N")
self.assertEqual(str(Decimal(x)), '-sNaN1')
all_tests = [ all_tests = [
CExplicitConstructionTest, PyExplicitConstructionTest, CExplicitConstructionTest, PyExplicitConstructionTest,

View file

@ -471,6 +471,19 @@ class TestBasic(unittest.TestCase):
## self.assertNotEqual(id(d), id(e)) ## self.assertNotEqual(id(d), id(e))
## self.assertEqual(id(e), id(e[-1])) ## self.assertEqual(id(e), id(e[-1]))
def test_iterator_pickle(self):
data = deque(range(200))
it = itorg = iter(data)
d = pickle.dumps(it)
it = pickle.loads(d)
self.assertEqual(type(itorg), type(it))
self.assertEqual(list(it), list(data))
it = pickle.loads(d)
next(it)
d = pickle.dumps(it)
self.assertEqual(list(it), list(data)[1:])
def test_deepcopy(self): def test_deepcopy(self):
mut = [10] mut = [10]
d = deque([mut]) d = deque([mut])

View file

@ -2,7 +2,9 @@ import unittest
from test import support from test import support
import collections, random, string import collections, random, string
import collections.abc
import gc, weakref import gc, weakref
import pickle
class DictTest(unittest.TestCase): class DictTest(unittest.TestCase):
@ -803,6 +805,58 @@ class DictTest(unittest.TestCase):
pass pass
self._tracked(MyDict()) self._tracked(MyDict())
def test_iterator_pickling(self):
data = {1:"a", 2:"b", 3:"c"}
it = iter(data)
d = pickle.dumps(it)
it = pickle.loads(d)
self.assertEqual(sorted(it), sorted(data))
it = pickle.loads(d)
try:
drop = next(it)
except StopIteration:
return
d = pickle.dumps(it)
it = pickle.loads(d)
del data[drop]
self.assertEqual(sorted(it), sorted(data))
def test_itemiterator_pickling(self):
data = {1:"a", 2:"b", 3:"c"}
# dictviews aren't picklable, only their iterators
itorg = iter(data.items())
d = pickle.dumps(itorg)
it = pickle.loads(d)
# note that the type of type of the unpickled iterator
# is not necessarily the same as the original. It is
# merely an object supporting the iterator protocol, yielding
# the same objects as the original one.
# self.assertEqual(type(itorg), type(it))
self.assertTrue(isinstance(it, collections.abc.Iterator))
self.assertEqual(dict(it), data)
it = pickle.loads(d)
drop = next(it)
d = pickle.dumps(it)
it = pickle.loads(d)
del data[drop[0]]
self.assertEqual(dict(it), data)
def test_valuesiterator_pickling(self):
data = {1:"a", 2:"b", 3:"c"}
# data.values() isn't picklable, only its iterator
it = iter(data.values())
d = pickle.dumps(it)
it = pickle.loads(d)
self.assertEqual(sorted(list(it)), sorted(list(data.values())))
it = pickle.loads(d)
drop = next(it)
d = pickle.dumps(it)
it = pickle.loads(d)
values = list(it) + [drop]
self.assertEqual(sorted(values), sorted(list(data.values())))
from test import mapping_tests from test import mapping_tests

View file

@ -1,5 +1,6 @@
import unittest import unittest
import sys import sys
import pickle
from test import support from test import support
@ -61,7 +62,25 @@ class N:
def __iter__(self): def __iter__(self):
return self return self
class EnumerateTestCase(unittest.TestCase): class PickleTest:
# Helper to check picklability
def check_pickle(self, itorg, seq):
d = pickle.dumps(itorg)
it = pickle.loads(d)
self.assertEqual(type(itorg), type(it))
self.assertEqual(list(it), seq)
it = pickle.loads(d)
try:
next(it)
except StopIteration:
self.assertFalse(seq[1:])
return
d = pickle.dumps(it)
it = pickle.loads(d)
self.assertEqual(list(it), seq[1:])
class EnumerateTestCase(unittest.TestCase, PickleTest):
enum = enumerate enum = enumerate
seq, res = 'abc', [(0,'a'), (1,'b'), (2,'c')] seq, res = 'abc', [(0,'a'), (1,'b'), (2,'c')]
@ -73,6 +92,9 @@ class EnumerateTestCase(unittest.TestCase):
self.assertEqual(list(self.enum(self.seq)), self.res) self.assertEqual(list(self.enum(self.seq)), self.res)
self.enum.__doc__ self.enum.__doc__
def test_pickle(self):
self.check_pickle(self.enum(self.seq), self.res)
def test_getitemseqn(self): def test_getitemseqn(self):
self.assertEqual(list(self.enum(G(self.seq))), self.res) self.assertEqual(list(self.enum(G(self.seq))), self.res)
e = self.enum(G('')) e = self.enum(G(''))
@ -126,7 +148,7 @@ class TestBig(EnumerateTestCase):
seq = range(10,20000,2) seq = range(10,20000,2)
res = list(zip(range(20000), seq)) res = list(zip(range(20000), seq))
class TestReversed(unittest.TestCase): class TestReversed(unittest.TestCase, PickleTest):
def test_simple(self): def test_simple(self):
class A: class A:
@ -212,6 +234,10 @@ class TestReversed(unittest.TestCase):
ngi = NoGetItem() ngi = NoGetItem()
self.assertRaises(TypeError, reversed, ngi) self.assertRaises(TypeError, reversed, ngi)
def test_pickle(self):
for data in 'abc', range(5), tuple(enumerate('abc')), range(1,17,5):
self.check_pickle(reversed(data), list(data)[::-1])
class EnumerateStartTestCase(EnumerateTestCase): class EnumerateStartTestCase(EnumerateTestCase):

View file

@ -2,6 +2,8 @@
import unittest import unittest
from test.support import run_unittest, TESTFN, unlink, cpython_only from test.support import run_unittest, TESTFN, unlink, cpython_only
import pickle
import collections.abc
# Test result of triple loop (too big to inline) # Test result of triple loop (too big to inline)
TRIPLETS = [(0, 0, 0), (0, 0, 1), (0, 0, 2), TRIPLETS = [(0, 0, 0), (0, 0, 1), (0, 0, 2),
@ -28,6 +30,8 @@ class BasicIterClass:
raise StopIteration raise StopIteration
self.i = res + 1 self.i = res + 1
return res return res
def __iter__(self):
return self
class IteratingSequenceClass: class IteratingSequenceClass:
def __init__(self, n): def __init__(self, n):
@ -49,7 +53,9 @@ class SequenceClass:
class TestCase(unittest.TestCase): class TestCase(unittest.TestCase):
# Helper to check that an iterator returns a given sequence # Helper to check that an iterator returns a given sequence
def check_iterator(self, it, seq): def check_iterator(self, it, seq, pickle=True):
if pickle:
self.check_pickle(it, seq)
res = [] res = []
while 1: while 1:
try: try:
@ -60,12 +66,33 @@ class TestCase(unittest.TestCase):
self.assertEqual(res, seq) self.assertEqual(res, seq)
# Helper to check that a for loop generates a given sequence # Helper to check that a for loop generates a given sequence
def check_for_loop(self, expr, seq): def check_for_loop(self, expr, seq, pickle=True):
if pickle:
self.check_pickle(iter(expr), seq)
res = [] res = []
for val in expr: for val in expr:
res.append(val) res.append(val)
self.assertEqual(res, seq) self.assertEqual(res, seq)
# Helper to check picklability
def check_pickle(self, itorg, seq):
d = pickle.dumps(itorg)
it = pickle.loads(d)
# Cannot assert type equality because dict iterators unpickle as list
# iterators.
# self.assertEqual(type(itorg), type(it))
self.assertTrue(isinstance(it, collections.abc.Iterator))
self.assertEqual(list(it), seq)
it = pickle.loads(d)
try:
next(it)
except StopIteration:
return
d = pickle.dumps(it)
it = pickle.loads(d)
self.assertEqual(list(it), seq[1:])
# Test basic use of iter() function # Test basic use of iter() function
def test_iter_basic(self): def test_iter_basic(self):
self.check_iterator(iter(range(10)), list(range(10))) self.check_iterator(iter(range(10)), list(range(10)))
@ -138,7 +165,7 @@ class TestCase(unittest.TestCase):
if i > 100: if i > 100:
raise IndexError # Emergency stop raise IndexError # Emergency stop
return i return i
self.check_iterator(iter(C(), 10), list(range(10))) self.check_iterator(iter(C(), 10), list(range(10)), pickle=False)
# Test two-argument iter() with function # Test two-argument iter() with function
def test_iter_function(self): def test_iter_function(self):
@ -146,7 +173,7 @@ class TestCase(unittest.TestCase):
i = state[0] i = state[0]
state[0] = i+1 state[0] = i+1
return i return i
self.check_iterator(iter(spam, 10), list(range(10))) self.check_iterator(iter(spam, 10), list(range(10)), pickle=False)
# Test two-argument iter() with function that raises StopIteration # Test two-argument iter() with function that raises StopIteration
def test_iter_function_stop(self): def test_iter_function_stop(self):
@ -156,7 +183,7 @@ class TestCase(unittest.TestCase):
raise StopIteration raise StopIteration
state[0] = i+1 state[0] = i+1
return i return i
self.check_iterator(iter(spam, 20), list(range(10))) self.check_iterator(iter(spam, 20), list(range(10)), pickle=False)
# Test exception propagation through function iterator # Test exception propagation through function iterator
def test_exception_function(self): def test_exception_function(self):
@ -198,7 +225,7 @@ class TestCase(unittest.TestCase):
if i == 10: if i == 10:
raise StopIteration raise StopIteration
return SequenceClass.__getitem__(self, i) return SequenceClass.__getitem__(self, i)
self.check_for_loop(MySequenceClass(20), list(range(10))) self.check_for_loop(MySequenceClass(20), list(range(10)), pickle=False)
# Test a big range # Test a big range
def test_iter_big_range(self): def test_iter_big_range(self):
@ -237,8 +264,8 @@ class TestCase(unittest.TestCase):
f.close() f.close()
f = open(TESTFN, "r") f = open(TESTFN, "r")
try: try:
self.check_for_loop(f, ["0\n", "1\n", "2\n", "3\n", "4\n"]) self.check_for_loop(f, ["0\n", "1\n", "2\n", "3\n", "4\n"], pickle=False)
self.check_for_loop(f, []) self.check_for_loop(f, [], pickle=False)
finally: finally:
f.close() f.close()
try: try:

View file

@ -37,6 +37,13 @@ def isOdd(x):
'Test predicate' 'Test predicate'
return x%2==1 return x%2==1
def tupleize(*args):
return args
def irange(n):
for i in range(n):
yield i
class StopNow: class StopNow:
'Class emulating an empty iterable.' 'Class emulating an empty iterable.'
def __iter__(self): def __iter__(self):
@ -55,8 +62,59 @@ def fact(n):
'Factorial' 'Factorial'
return prod(range(1, n+1)) return prod(range(1, n+1))
# root level methods for pickling ability
def testR(r):
return r[0]
def testR2(r):
return r[2]
def underten(x):
return x<10
class TestBasicOps(unittest.TestCase): class TestBasicOps(unittest.TestCase):
def pickletest(self, it, stop=4, take=1, compare=None):
"""Test that an iterator is the same after pickling, also when part-consumed"""
def expand(it, i=0):
# Recursively expand iterables, within sensible bounds
if i > 10:
raise RuntimeError("infinite recursion encountered")
if isinstance(it, str):
return it
try:
l = list(islice(it, stop))
except TypeError:
return it # can't expand it
return [expand(e, i+1) for e in l]
# Test the initial copy against the original
dump = pickle.dumps(it)
i2 = pickle.loads(dump)
self.assertEqual(type(it), type(i2))
a, b = expand(it), expand(i2)
self.assertEqual(a, b)
if compare:
c = expand(compare)
self.assertEqual(a, c)
# Take from the copy, and create another copy and compare them.
i3 = pickle.loads(dump)
took = 0
try:
for i in range(take):
next(i3)
took += 1
except StopIteration:
pass #in case there is less data than 'take'
dump = pickle.dumps(i3)
i4 = pickle.loads(dump)
a, b = expand(i3), expand(i4)
self.assertEqual(a, b)
if compare:
c = expand(compare[took:])
self.assertEqual(a, c);
def test_accumulate(self): def test_accumulate(self):
self.assertEqual(list(accumulate(range(10))), # one positional arg self.assertEqual(list(accumulate(range(10))), # one positional arg
[0, 1, 3, 6, 10, 15, 21, 28, 36, 45]) [0, 1, 3, 6, 10, 15, 21, 28, 36, 45])
@ -83,6 +141,7 @@ class TestBasicOps(unittest.TestCase):
[2, 16, 144, 720, 5040, 0, 0, 0, 0, 0]) [2, 16, 144, 720, 5040, 0, 0, 0, 0, 0])
with self.assertRaises(TypeError): with self.assertRaises(TypeError):
list(accumulate(s, chr)) # unary-operation list(accumulate(s, chr)) # unary-operation
self.pickletest(accumulate(range(10))) # test pickling
def test_chain(self): def test_chain(self):
@ -106,14 +165,43 @@ class TestBasicOps(unittest.TestCase):
self.assertEqual(take(4, chain.from_iterable(['abc', 'def'])), list('abcd')) self.assertEqual(take(4, chain.from_iterable(['abc', 'def'])), list('abcd'))
self.assertRaises(TypeError, list, chain.from_iterable([2, 3])) self.assertRaises(TypeError, list, chain.from_iterable([2, 3]))
def test_chain_reducible(self):
operators = [copy.deepcopy,
lambda s: pickle.loads(pickle.dumps(s))]
for oper in operators:
it = chain('abc', 'def')
self.assertEqual(list(oper(it)), list('abcdef'))
self.assertEqual(next(it), 'a')
self.assertEqual(list(oper(it)), list('bcdef'))
self.assertEqual(list(oper(chain(''))), [])
self.assertEqual(take(4, oper(chain('abc', 'def'))), list('abcd'))
self.assertRaises(TypeError, list, oper(chain(2, 3)))
self.pickletest(chain('abc', 'def'), compare=list('abcdef'))
def test_combinations(self): def test_combinations(self):
self.assertRaises(TypeError, combinations, 'abc') # missing r argument self.assertRaises(TypeError, combinations, 'abc') # missing r argument
self.assertRaises(TypeError, combinations, 'abc', 2, 1) # too many arguments self.assertRaises(TypeError, combinations, 'abc', 2, 1) # too many arguments
self.assertRaises(TypeError, combinations, None) # pool is not iterable self.assertRaises(TypeError, combinations, None) # pool is not iterable
self.assertRaises(ValueError, combinations, 'abc', -2) # r is negative self.assertRaises(ValueError, combinations, 'abc', -2) # r is negative
self.assertEqual(list(combinations('abc', 32)), []) # r > n
self.assertEqual(list(combinations(range(4), 3)), for op in (lambda a:a, lambda a:pickle.loads(pickle.dumps(a))):
[(0,1,2), (0,1,3), (0,2,3), (1,2,3)]) self.assertEqual(list(op(combinations('abc', 32))), []) # r > n
self.assertEqual(list(op(combinations('ABCD', 2))),
[('A','B'), ('A','C'), ('A','D'), ('B','C'), ('B','D'), ('C','D')])
testIntermediate = combinations('ABCD', 2)
next(testIntermediate)
self.assertEqual(list(op(testIntermediate)),
[('A','C'), ('A','D'), ('B','C'), ('B','D'), ('C','D')])
self.assertEqual(list(op(combinations(range(4), 3))),
[(0,1,2), (0,1,3), (0,2,3), (1,2,3)])
testIntermediate = combinations(range(4), 3)
next(testIntermediate)
self.assertEqual(list(op(testIntermediate)),
[(0,1,3), (0,2,3), (1,2,3)])
def combinations1(iterable, r): def combinations1(iterable, r):
'Pure python version shown in the docs' 'Pure python version shown in the docs'
@ -168,6 +256,9 @@ class TestBasicOps(unittest.TestCase):
self.assertEqual(result, list(combinations2(values, r))) # matches second pure python version self.assertEqual(result, list(combinations2(values, r))) # matches second pure python version
self.assertEqual(result, list(combinations3(values, r))) # matches second pure python version self.assertEqual(result, list(combinations3(values, r))) # matches second pure python version
self.pickletest(combinations(values, r)) # test pickling
# Test implementation detail: tuple re-use
@support.impl_detail("tuple reuse is specific to CPython") @support.impl_detail("tuple reuse is specific to CPython")
def test_combinations_tuple_reuse(self): def test_combinations_tuple_reuse(self):
self.assertEqual(len(set(map(id, combinations('abcde', 3)))), 1) self.assertEqual(len(set(map(id, combinations('abcde', 3)))), 1)
@ -179,8 +270,15 @@ class TestBasicOps(unittest.TestCase):
self.assertRaises(TypeError, cwr, 'abc', 2, 1) # too many arguments self.assertRaises(TypeError, cwr, 'abc', 2, 1) # too many arguments
self.assertRaises(TypeError, cwr, None) # pool is not iterable self.assertRaises(TypeError, cwr, None) # pool is not iterable
self.assertRaises(ValueError, cwr, 'abc', -2) # r is negative self.assertRaises(ValueError, cwr, 'abc', -2) # r is negative
self.assertEqual(list(cwr('ABC', 2)),
[('A','A'), ('A','B'), ('A','C'), ('B','B'), ('B','C'), ('C','C')]) for op in (lambda a:a, lambda a:pickle.loads(pickle.dumps(a))):
self.assertEqual(list(op(cwr('ABC', 2))),
[('A','A'), ('A','B'), ('A','C'), ('B','B'), ('B','C'), ('C','C')])
testIntermediate = cwr('ABC', 2)
next(testIntermediate)
self.assertEqual(list(op(testIntermediate)),
[('A','B'), ('A','C'), ('B','B'), ('B','C'), ('C','C')])
def cwr1(iterable, r): def cwr1(iterable, r):
'Pure python version shown in the docs' 'Pure python version shown in the docs'
@ -239,6 +337,10 @@ class TestBasicOps(unittest.TestCase):
self.assertEqual(result, list(cwr1(values, r))) # matches first pure python version self.assertEqual(result, list(cwr1(values, r))) # matches first pure python version
self.assertEqual(result, list(cwr2(values, r))) # matches second pure python version self.assertEqual(result, list(cwr2(values, r))) # matches second pure python version
self.pickletest(cwr(values,r)) # test pickling
# Test implementation detail: tuple re-use
@support.impl_detail("tuple reuse is specific to CPython") @support.impl_detail("tuple reuse is specific to CPython")
def test_combinations_with_replacement_tuple_reuse(self): def test_combinations_with_replacement_tuple_reuse(self):
cwr = combinations_with_replacement cwr = combinations_with_replacement
@ -305,6 +407,8 @@ class TestBasicOps(unittest.TestCase):
self.assertEqual(result, list(permutations(values, None))) # test r as None self.assertEqual(result, list(permutations(values, None))) # test r as None
self.assertEqual(result, list(permutations(values))) # test default r self.assertEqual(result, list(permutations(values))) # test default r
self.pickletest(permutations(values, r)) # test pickling
@support.impl_detail("tuple resuse is CPython specific") @support.impl_detail("tuple resuse is CPython specific")
def test_permutations_tuple_reuse(self): def test_permutations_tuple_reuse(self):
self.assertEqual(len(set(map(id, permutations('abcde', 3)))), 1) self.assertEqual(len(set(map(id, permutations('abcde', 3)))), 1)
@ -359,6 +463,24 @@ class TestBasicOps(unittest.TestCase):
self.assertRaises(TypeError, compress, range(6)) # too few args self.assertRaises(TypeError, compress, range(6)) # too few args
self.assertRaises(TypeError, compress, range(6), None) # too many args self.assertRaises(TypeError, compress, range(6), None) # too many args
# check copy, deepcopy, pickle
for op in (lambda a:copy.copy(a), lambda a:copy.deepcopy(a), lambda a:pickle.loads(pickle.dumps(a))):
for data, selectors, result1, result2 in [
('ABCDEF', [1,0,1,0,1,1], 'ACEF', 'CEF'),
('ABCDEF', [0,0,0,0,0,0], '', ''),
('ABCDEF', [1,1,1,1,1,1], 'ABCDEF', 'BCDEF'),
('ABCDEF', [1,0,1], 'AC', 'C'),
('ABC', [0,1,1,1,1,1], 'BC', 'C'),
]:
self.assertEqual(list(op(compress(data=data, selectors=selectors))), list(result1))
self.assertEqual(list(op(compress(data, selectors))), list(result1))
testIntermediate = compress(data, selectors)
if result1:
next(testIntermediate)
self.assertEqual(list(op(testIntermediate)), list(result2))
def test_count(self): def test_count(self):
self.assertEqual(lzip('abc',count()), [('a', 0), ('b', 1), ('c', 2)]) self.assertEqual(lzip('abc',count()), [('a', 0), ('b', 1), ('c', 2)])
self.assertEqual(lzip('abc',count(3)), [('a', 3), ('b', 4), ('c', 5)]) self.assertEqual(lzip('abc',count(3)), [('a', 3), ('b', 4), ('c', 5)])
@ -393,7 +515,7 @@ class TestBasicOps(unittest.TestCase):
c = count(value) c = count(value)
self.assertEqual(next(copy.copy(c)), value) self.assertEqual(next(copy.copy(c)), value)
self.assertEqual(next(copy.deepcopy(c)), value) self.assertEqual(next(copy.deepcopy(c)), value)
self.assertEqual(next(pickle.loads(pickle.dumps(c))), value) self.pickletest(count(value))
#check proper internal error handling for large "step' sizes #check proper internal error handling for large "step' sizes
count(1, maxsize+5); sys.exc_info() count(1, maxsize+5); sys.exc_info()
@ -440,6 +562,7 @@ class TestBasicOps(unittest.TestCase):
else: else:
r2 = ('count(%r, %r)' % (i, j)).replace('L', '') r2 = ('count(%r, %r)' % (i, j)).replace('L', '')
self.assertEqual(r1, r2) self.assertEqual(r1, r2)
self.pickletest(count(i, j))
def test_cycle(self): def test_cycle(self):
self.assertEqual(take(10, cycle('abc')), list('abcabcabca')) self.assertEqual(take(10, cycle('abc')), list('abcabcabca'))
@ -448,6 +571,18 @@ class TestBasicOps(unittest.TestCase):
self.assertRaises(TypeError, cycle, 5) self.assertRaises(TypeError, cycle, 5)
self.assertEqual(list(islice(cycle(gen3()),10)), [0,1,2,0,1,2,0,1,2,0]) self.assertEqual(list(islice(cycle(gen3()),10)), [0,1,2,0,1,2,0,1,2,0])
# check copy, deepcopy, pickle
c = cycle('abc')
self.assertEqual(next(c), 'a')
#simple copy currently not supported, because __reduce__ returns
#an internal iterator
#self.assertEqual(take(10, copy.copy(c)), list('bcabcabcab'))
self.assertEqual(take(10, copy.deepcopy(c)), list('bcabcabcab'))
self.assertEqual(take(10, pickle.loads(pickle.dumps(c))), list('bcabcabcab'))
next(c)
self.assertEqual(take(10, pickle.loads(pickle.dumps(c))), list('cabcabcabc'))
self.pickletest(cycle('abc'))
def test_groupby(self): def test_groupby(self):
# Check whether it accepts arguments correctly # Check whether it accepts arguments correctly
self.assertEqual([], list(groupby([]))) self.assertEqual([], list(groupby([])))
@ -466,18 +601,37 @@ class TestBasicOps(unittest.TestCase):
dup.append(elem) dup.append(elem)
self.assertEqual(s, dup) self.assertEqual(s, dup)
# Check normal pickled
dup = []
for k, g in pickle.loads(pickle.dumps(groupby(s, testR))):
for elem in g:
self.assertEqual(k, elem[0])
dup.append(elem)
self.assertEqual(s, dup)
# Check nested case # Check nested case
dup = [] dup = []
for k, g in groupby(s, lambda r:r[0]): for k, g in groupby(s, testR):
for ik, ig in groupby(g, lambda r:r[2]): for ik, ig in groupby(g, testR2):
for elem in ig: for elem in ig:
self.assertEqual(k, elem[0]) self.assertEqual(k, elem[0])
self.assertEqual(ik, elem[2]) self.assertEqual(ik, elem[2])
dup.append(elem) dup.append(elem)
self.assertEqual(s, dup) self.assertEqual(s, dup)
# Check nested and pickled
dup = []
for k, g in pickle.loads(pickle.dumps(groupby(s, testR))):
for ik, ig in pickle.loads(pickle.dumps(groupby(g, testR2))):
for elem in ig:
self.assertEqual(k, elem[0])
self.assertEqual(ik, elem[2])
dup.append(elem)
self.assertEqual(s, dup)
# Check case where inner iterator is not used # Check case where inner iterator is not used
keys = [k for k, g in groupby(s, lambda r:r[0])] keys = [k for k, g in groupby(s, testR)]
expectedkeys = set([r[0] for r in s]) expectedkeys = set([r[0] for r in s])
self.assertEqual(set(keys), expectedkeys) self.assertEqual(set(keys), expectedkeys)
self.assertEqual(len(keys), len(expectedkeys)) self.assertEqual(len(keys), len(expectedkeys))
@ -548,6 +702,20 @@ class TestBasicOps(unittest.TestCase):
self.assertRaises(TypeError, filter, isEven, 3) self.assertRaises(TypeError, filter, isEven, 3)
self.assertRaises(TypeError, next, filter(range(6), range(6))) self.assertRaises(TypeError, next, filter(range(6), range(6)))
# check copy, deepcopy, pickle
ans = [0,2,4]
c = filter(isEven, range(6))
self.assertEqual(list(copy.copy(c)), ans)
c = filter(isEven, range(6))
self.assertEqual(list(copy.deepcopy(c)), ans)
c = filter(isEven, range(6))
self.assertEqual(list(pickle.loads(pickle.dumps(c))), ans)
next(c)
self.assertEqual(list(pickle.loads(pickle.dumps(c))), ans[1:])
c = filter(isEven, range(6))
self.pickletest(c)
def test_filterfalse(self): def test_filterfalse(self):
self.assertEqual(list(filterfalse(isEven, range(6))), [1,3,5]) self.assertEqual(list(filterfalse(isEven, range(6))), [1,3,5])
self.assertEqual(list(filterfalse(None, [0,1,0,2,0])), [0,0,0]) self.assertEqual(list(filterfalse(None, [0,1,0,2,0])), [0,0,0])
@ -558,6 +726,7 @@ class TestBasicOps(unittest.TestCase):
self.assertRaises(TypeError, filterfalse, lambda x:x, range(6), 7) self.assertRaises(TypeError, filterfalse, lambda x:x, range(6), 7)
self.assertRaises(TypeError, filterfalse, isEven, 3) self.assertRaises(TypeError, filterfalse, isEven, 3)
self.assertRaises(TypeError, next, filterfalse(range(6), range(6))) self.assertRaises(TypeError, next, filterfalse(range(6), range(6)))
self.pickletest(filterfalse(isEven, range(6)))
def test_zip(self): def test_zip(self):
# XXX This is rather silly now that builtin zip() calls zip()... # XXX This is rather silly now that builtin zip() calls zip()...
@ -582,6 +751,23 @@ class TestBasicOps(unittest.TestCase):
ids = list(map(id, list(zip('abc', 'def')))) ids = list(map(id, list(zip('abc', 'def'))))
self.assertEqual(len(dict.fromkeys(ids)), len(ids)) self.assertEqual(len(dict.fromkeys(ids)), len(ids))
# check copy, deepcopy, pickle
ans = [(x,y) for x, y in copy.copy(zip('abc',count()))]
self.assertEqual(ans, [('a', 0), ('b', 1), ('c', 2)])
ans = [(x,y) for x, y in copy.deepcopy(zip('abc',count()))]
self.assertEqual(ans, [('a', 0), ('b', 1), ('c', 2)])
ans = [(x,y) for x, y in pickle.loads(pickle.dumps(zip('abc',count())))]
self.assertEqual(ans, [('a', 0), ('b', 1), ('c', 2)])
testIntermediate = zip('abc',count())
next(testIntermediate)
ans = [(x,y) for x, y in pickle.loads(pickle.dumps(testIntermediate))]
self.assertEqual(ans, [('b', 1), ('c', 2)])
self.pickletest(zip('abc', count()))
def test_ziplongest(self): def test_ziplongest(self):
for args in [ for args in [
['abc', range(6)], ['abc', range(6)],
@ -631,6 +817,12 @@ class TestBasicOps(unittest.TestCase):
ids = list(map(id, list(zip_longest('abc', 'def')))) ids = list(map(id, list(zip_longest('abc', 'def'))))
self.assertEqual(len(dict.fromkeys(ids)), len(ids)) self.assertEqual(len(dict.fromkeys(ids)), len(ids))
def test_zip_longest_pickling(self):
self.pickletest(zip_longest("abc", "def"))
self.pickletest(zip_longest("abc", "defgh"))
self.pickletest(zip_longest("abc", "defgh", fillvalue=1))
self.pickletest(zip_longest("", "defgh"))
def test_bug_7244(self): def test_bug_7244(self):
class Repeater: class Repeater:
@ -734,6 +926,20 @@ class TestBasicOps(unittest.TestCase):
self.assertEqual(len(set(map(id, product('abc', 'def')))), 1) self.assertEqual(len(set(map(id, product('abc', 'def')))), 1)
self.assertNotEqual(len(set(map(id, list(product('abc', 'def'))))), 1) self.assertNotEqual(len(set(map(id, list(product('abc', 'def'))))), 1)
def test_product_pickling(self):
# check copy, deepcopy, pickle
for args, result in [
([], [()]), # zero iterables
(['ab'], [('a',), ('b',)]), # one iterable
([range(2), range(3)], [(0,0), (0,1), (0,2), (1,0), (1,1), (1,2)]), # two iterables
([range(0), range(2), range(3)], []), # first iterable with zero length
([range(2), range(0), range(3)], []), # middle iterable with zero length
([range(2), range(3), range(0)], []), # last iterable with zero length
]:
self.assertEqual(list(copy.copy(product(*args))), result)
self.assertEqual(list(copy.deepcopy(product(*args))), result)
self.pickletest(product(*args))
def test_repeat(self): def test_repeat(self):
self.assertEqual(list(repeat(object='a', times=3)), ['a', 'a', 'a']) self.assertEqual(list(repeat(object='a', times=3)), ['a', 'a', 'a'])
self.assertEqual(lzip(range(3),repeat('a')), self.assertEqual(lzip(range(3),repeat('a')),
@ -752,11 +958,16 @@ class TestBasicOps(unittest.TestCase):
list(r) list(r)
self.assertEqual(repr(r), 'repeat((1+0j), 0)') self.assertEqual(repr(r), 'repeat((1+0j), 0)')
# check copy, deepcopy, pickle
c = repeat(object='a', times=10)
self.assertEqual(next(c), 'a')
self.assertEqual(take(2, copy.copy(c)), list('a' * 2))
self.assertEqual(take(2, copy.deepcopy(c)), list('a' * 2))
self.pickletest(repeat(object='a', times=10))
def test_map(self): def test_map(self):
self.assertEqual(list(map(operator.pow, range(3), range(1,7))), self.assertEqual(list(map(operator.pow, range(3), range(1,7))),
[0**1, 1**2, 2**3]) [0**1, 1**2, 2**3])
def tupleize(*args):
return args
self.assertEqual(list(map(tupleize, 'abc', range(5))), self.assertEqual(list(map(tupleize, 'abc', range(5))),
[('a',0),('b',1),('c',2)]) [('a',0),('b',1),('c',2)])
self.assertEqual(list(map(tupleize, 'abc', count())), self.assertEqual(list(map(tupleize, 'abc', count())),
@ -771,6 +982,18 @@ class TestBasicOps(unittest.TestCase):
self.assertRaises(ValueError, next, map(errfunc, [4], [5])) self.assertRaises(ValueError, next, map(errfunc, [4], [5]))
self.assertRaises(TypeError, next, map(onearg, [4], [5])) self.assertRaises(TypeError, next, map(onearg, [4], [5]))
# check copy, deepcopy, pickle
ans = [('a',0),('b',1),('c',2)]
c = map(tupleize, 'abc', count())
self.assertEqual(list(copy.copy(c)), ans)
c = map(tupleize, 'abc', count())
self.assertEqual(list(copy.deepcopy(c)), ans)
c = map(tupleize, 'abc', count())
self.pickletest(c)
def test_starmap(self): def test_starmap(self):
self.assertEqual(list(starmap(operator.pow, zip(range(3), range(1,7)))), self.assertEqual(list(starmap(operator.pow, zip(range(3), range(1,7)))),
[0**1, 1**2, 2**3]) [0**1, 1**2, 2**3])
@ -785,6 +1008,18 @@ class TestBasicOps(unittest.TestCase):
self.assertRaises(ValueError, next, starmap(errfunc, [(4,5)])) self.assertRaises(ValueError, next, starmap(errfunc, [(4,5)]))
self.assertRaises(TypeError, next, starmap(onearg, [(4,5)])) self.assertRaises(TypeError, next, starmap(onearg, [(4,5)]))
# check copy, deepcopy, pickle
ans = [0**1, 1**2, 2**3]
c = starmap(operator.pow, zip(range(3), range(1,7)))
self.assertEqual(list(copy.copy(c)), ans)
c = starmap(operator.pow, zip(range(3), range(1,7)))
self.assertEqual(list(copy.deepcopy(c)), ans)
c = starmap(operator.pow, zip(range(3), range(1,7)))
self.pickletest(c)
def test_islice(self): def test_islice(self):
for args in [ # islice(args) should agree with range(args) for args in [ # islice(args) should agree with range(args)
(10, 20, 3), (10, 20, 3),
@ -817,17 +1052,18 @@ class TestBasicOps(unittest.TestCase):
self.assertEqual(list(it), list(range(3, 10))) self.assertEqual(list(it), list(range(3, 10)))
# Test invalid arguments # Test invalid arguments
self.assertRaises(TypeError, islice, range(10)) ra = range(10)
self.assertRaises(TypeError, islice, range(10), 1, 2, 3, 4) self.assertRaises(TypeError, islice, ra)
self.assertRaises(ValueError, islice, range(10), -5, 10, 1) self.assertRaises(TypeError, islice, ra, 1, 2, 3, 4)
self.assertRaises(ValueError, islice, range(10), 1, -5, -1) self.assertRaises(ValueError, islice, ra, -5, 10, 1)
self.assertRaises(ValueError, islice, range(10), 1, 10, -1) self.assertRaises(ValueError, islice, ra, 1, -5, -1)
self.assertRaises(ValueError, islice, range(10), 1, 10, 0) self.assertRaises(ValueError, islice, ra, 1, 10, -1)
self.assertRaises(ValueError, islice, range(10), 'a') self.assertRaises(ValueError, islice, ra, 1, 10, 0)
self.assertRaises(ValueError, islice, range(10), 'a', 1) self.assertRaises(ValueError, islice, ra, 'a')
self.assertRaises(ValueError, islice, range(10), 1, 'a') self.assertRaises(ValueError, islice, ra, 'a', 1)
self.assertRaises(ValueError, islice, range(10), 'a', 1, 1) self.assertRaises(ValueError, islice, ra, 1, 'a')
self.assertRaises(ValueError, islice, range(10), 1, 'a', 1) self.assertRaises(ValueError, islice, ra, 'a', 1, 1)
self.assertRaises(ValueError, islice, ra, 1, 'a', 1)
self.assertEqual(len(list(islice(count(), 1, 10, maxsize))), 1) self.assertEqual(len(list(islice(count(), 1, 10, maxsize))), 1)
# Issue #10323: Less islice in a predictable state # Issue #10323: Less islice in a predictable state
@ -835,9 +1071,22 @@ class TestBasicOps(unittest.TestCase):
self.assertEqual(list(islice(c, 1, 3, 50)), [1]) self.assertEqual(list(islice(c, 1, 3, 50)), [1])
self.assertEqual(next(c), 3) self.assertEqual(next(c), 3)
# check copy, deepcopy, pickle
for args in [ # islice(args) should agree with range(args)
(10, 20, 3),
(10, 3, 20),
(10, 20),
(10, 3),
(20,)
]:
self.assertEqual(list(copy.copy(islice(range(100), *args))),
list(range(*args)))
self.assertEqual(list(copy.deepcopy(islice(range(100), *args))),
list(range(*args)))
self.pickletest(islice(range(100), *args))
def test_takewhile(self): def test_takewhile(self):
data = [1, 3, 5, 20, 2, 4, 6, 8] data = [1, 3, 5, 20, 2, 4, 6, 8]
underten = lambda x: x<10
self.assertEqual(list(takewhile(underten, data)), [1, 3, 5]) self.assertEqual(list(takewhile(underten, data)), [1, 3, 5])
self.assertEqual(list(takewhile(underten, [])), []) self.assertEqual(list(takewhile(underten, [])), [])
self.assertRaises(TypeError, takewhile) self.assertRaises(TypeError, takewhile)
@ -849,9 +1098,14 @@ class TestBasicOps(unittest.TestCase):
self.assertEqual(list(t), [1, 1, 1]) self.assertEqual(list(t), [1, 1, 1])
self.assertRaises(StopIteration, next, t) self.assertRaises(StopIteration, next, t)
# check copy, deepcopy, pickle
self.assertEqual(list(copy.copy(takewhile(underten, data))), [1, 3, 5])
self.assertEqual(list(copy.deepcopy(takewhile(underten, data))),
[1, 3, 5])
self.pickletest(takewhile(underten, data))
def test_dropwhile(self): def test_dropwhile(self):
data = [1, 3, 5, 20, 2, 4, 6, 8] data = [1, 3, 5, 20, 2, 4, 6, 8]
underten = lambda x: x<10
self.assertEqual(list(dropwhile(underten, data)), [20, 2, 4, 6, 8]) self.assertEqual(list(dropwhile(underten, data)), [20, 2, 4, 6, 8])
self.assertEqual(list(dropwhile(underten, [])), []) self.assertEqual(list(dropwhile(underten, [])), [])
self.assertRaises(TypeError, dropwhile) self.assertRaises(TypeError, dropwhile)
@ -860,11 +1114,14 @@ class TestBasicOps(unittest.TestCase):
self.assertRaises(TypeError, next, dropwhile(10, [(4,5)])) self.assertRaises(TypeError, next, dropwhile(10, [(4,5)]))
self.assertRaises(ValueError, next, dropwhile(errfunc, [(4,5)])) self.assertRaises(ValueError, next, dropwhile(errfunc, [(4,5)]))
# check copy, deepcopy, pickle
self.assertEqual(list(copy.copy(dropwhile(underten, data))), [20, 2, 4, 6, 8])
self.assertEqual(list(copy.deepcopy(dropwhile(underten, data))),
[20, 2, 4, 6, 8])
self.pickletest(dropwhile(underten, data))
def test_tee(self): def test_tee(self):
n = 200 n = 200
def irange(n):
for i in range(n):
yield i
a, b = tee([]) # test empty iterator a, b = tee([]) # test empty iterator
self.assertEqual(list(a), []) self.assertEqual(list(a), [])
@ -949,6 +1206,67 @@ class TestBasicOps(unittest.TestCase):
del a del a
self.assertRaises(ReferenceError, getattr, p, '__class__') self.assertRaises(ReferenceError, getattr, p, '__class__')
ans = list('abc')
long_ans = list(range(10000))
# check copy
a, b = tee('abc')
self.assertEqual(list(copy.copy(a)), ans)
self.assertEqual(list(copy.copy(b)), ans)
a, b = tee(list(range(10000)))
self.assertEqual(list(copy.copy(a)), long_ans)
self.assertEqual(list(copy.copy(b)), long_ans)
# check partially consumed copy
a, b = tee('abc')
take(2, a)
take(1, b)
self.assertEqual(list(copy.copy(a)), ans[2:])
self.assertEqual(list(copy.copy(b)), ans[1:])
self.assertEqual(list(a), ans[2:])
self.assertEqual(list(b), ans[1:])
a, b = tee(range(10000))
take(100, a)
take(60, b)
self.assertEqual(list(copy.copy(a)), long_ans[100:])
self.assertEqual(list(copy.copy(b)), long_ans[60:])
self.assertEqual(list(a), long_ans[100:])
self.assertEqual(list(b), long_ans[60:])
# check deepcopy
a, b = tee('abc')
self.assertEqual(list(copy.deepcopy(a)), ans)
self.assertEqual(list(copy.deepcopy(b)), ans)
self.assertEqual(list(a), ans)
self.assertEqual(list(b), ans)
a, b = tee(range(10000))
self.assertEqual(list(copy.deepcopy(a)), long_ans)
self.assertEqual(list(copy.deepcopy(b)), long_ans)
self.assertEqual(list(a), long_ans)
self.assertEqual(list(b), long_ans)
# check partially consumed deepcopy
a, b = tee('abc')
take(2, a)
take(1, b)
self.assertEqual(list(copy.deepcopy(a)), ans[2:])
self.assertEqual(list(copy.deepcopy(b)), ans[1:])
self.assertEqual(list(a), ans[2:])
self.assertEqual(list(b), ans[1:])
a, b = tee(range(10000))
take(100, a)
take(60, b)
self.assertEqual(list(copy.deepcopy(a)), long_ans[100:])
self.assertEqual(list(copy.deepcopy(b)), long_ans[60:])
self.assertEqual(list(a), long_ans[100:])
self.assertEqual(list(b), long_ans[60:])
# check pickle
self.pickletest(iter(tee('abc')))
a, b = tee('abc')
self.pickletest(a, compare=ans)
self.pickletest(b, compare=ans)
def test_StopIteration(self): def test_StopIteration(self):
self.assertRaises(StopIteration, next, zip()) self.assertRaises(StopIteration, next, zip())
@ -974,9 +1292,21 @@ class TestBasicOps(unittest.TestCase):
class TestExamples(unittest.TestCase): class TestExamples(unittest.TestCase):
def test_accumlate(self): def test_accumulate(self):
self.assertEqual(list(accumulate([1,2,3,4,5])), [1, 3, 6, 10, 15]) self.assertEqual(list(accumulate([1,2,3,4,5])), [1, 3, 6, 10, 15])
def test_accumulate_reducible(self):
# check copy, deepcopy, pickle
data = [1, 2, 3, 4, 5]
accumulated = [1, 3, 6, 10, 15]
it = accumulate(data)
self.assertEqual(list(pickle.loads(pickle.dumps(it))), accumulated[:])
self.assertEqual(next(it), 1)
self.assertEqual(list(pickle.loads(pickle.dumps(it))), accumulated[1:])
self.assertEqual(list(copy.deepcopy(it)), accumulated[1:])
self.assertEqual(list(copy.copy(it)), accumulated[1:])
def test_chain(self): def test_chain(self):
self.assertEqual(''.join(chain('ABC', 'DEF')), 'ABCDEF') self.assertEqual(''.join(chain('ABC', 'DEF')), 'ABCDEF')

View file

@ -1,5 +1,6 @@
import sys import sys
from test import support, list_tests from test import support, list_tests
import pickle
class ListTest(list_tests.CommonTest): class ListTest(list_tests.CommonTest):
type2test = list type2test = list
@ -69,6 +70,33 @@ class ListTest(list_tests.CommonTest):
check(10) # check our checking code check(10) # check our checking code
check(1000000) check(1000000)
def test_iterator_pickle(self):
# Userlist iterators don't support pickling yet since
# they are based on generators.
data = self.type2test([4, 5, 6, 7])
it = itorg = iter(data)
d = pickle.dumps(it)
it = pickle.loads(d)
self.assertEqual(type(itorg), type(it))
self.assertEqual(self.type2test(it), self.type2test(data))
it = pickle.loads(d)
next(it)
d = pickle.dumps(it)
self.assertEqual(self.type2test(it), self.type2test(data)[1:])
def test_reversed_pickle(self):
data = self.type2test([4, 5, 6, 7])
it = itorg = reversed(data)
d = pickle.dumps(it)
it = pickle.loads(d)
self.assertEqual(type(itorg), type(it))
self.assertEqual(self.type2test(it), self.type2test(reversed(data)))
it = pickle.loads(d)
next(it)
d = pickle.dumps(it)
self.assertEqual(self.type2test(it), self.type2test(reversed(data))[1:])
def test_main(verbose=None): def test_main(verbose=None):
support.run_unittest(ListTest) support.run_unittest(ListTest)

View file

@ -2649,6 +2649,10 @@ class TestInvalidFamily(unittest.TestCase):
with self.assertRaises(ValueError): with self.assertRaises(ValueError):
multiprocessing.connection.Listener(r'\\.\test') multiprocessing.connection.Listener(r'\\.\test')
@unittest.skipUnless(WIN32, "skipped on non-Windows platforms")
def test_invalid_family_win32(self):
with self.assertRaises(ValueError):
multiprocessing.connection.Listener('/var/test.pipe')
testcases_other = [OtherTest, TestInvalidHandle, TestInitializers, testcases_other = [OtherTest, TestInvalidHandle, TestInitializers,
TestStdinBadfiledescriptor, TestWait, TestInvalidFamily] TestStdinBadfiledescriptor, TestWait, TestInvalidFamily]

View file

@ -341,13 +341,35 @@ class RangeTest(unittest.TestCase):
def test_pickling(self): def test_pickling(self):
testcases = [(13,), (0, 11), (-22, 10), (20, 3, -1), testcases = [(13,), (0, 11), (-22, 10), (20, 3, -1),
(13, 21, 3), (-2, 2, 2)] (13, 21, 3), (-2, 2, 2), (2**65, 2**65+2)]
for proto in range(pickle.HIGHEST_PROTOCOL + 1): for proto in range(pickle.HIGHEST_PROTOCOL + 1):
for t in testcases: for t in testcases:
r = range(*t) r = range(*t)
self.assertEqual(list(pickle.loads(pickle.dumps(r, proto))), self.assertEqual(list(pickle.loads(pickle.dumps(r, proto))),
list(r)) list(r))
def test_iterator_pickling(self):
testcases = [(13,), (0, 11), (-22, 10), (20, 3, -1),
(13, 21, 3), (-2, 2, 2), (2**65, 2**65+2)]
for proto in range(pickle.HIGHEST_PROTOCOL + 1):
for t in testcases:
it = itorg = iter(range(*t))
data = list(range(*t))
d = pickle.dumps(it)
it = pickle.loads(d)
self.assertEqual(type(itorg), type(it))
self.assertEqual(list(it), data)
it = pickle.loads(d)
try:
next(it)
except StopIteration:
continue
d = pickle.dumps(it)
it = pickle.loads(d)
self.assertEqual(list(it), data[1:])
def test_odd_bug(self): def test_odd_bug(self):
# This used to raise a "SystemError: NULL result without error" # This used to raise a "SystemError: NULL result without error"
# because the range validation step was eating the exception # because the range validation step was eating the exception

View file

@ -9,6 +9,7 @@ from random import randrange, shuffle
import sys import sys
import warnings import warnings
import collections import collections
import collections.abc
class PassThru(Exception): class PassThru(Exception):
pass pass
@ -234,6 +235,26 @@ class TestJointOps(unittest.TestCase):
dup = pickle.loads(p) dup = pickle.loads(p)
self.assertEqual(self.s.x, dup.x) self.assertEqual(self.s.x, dup.x)
def test_iterator_pickling(self):
itorg = iter(self.s)
data = self.thetype(self.s)
d = pickle.dumps(itorg)
it = pickle.loads(d)
# Set iterators unpickle as list iterators due to the
# undefined order of set items.
# self.assertEqual(type(itorg), type(it))
self.assertTrue(isinstance(it, collections.abc.Iterator))
self.assertEqual(self.thetype(it), data)
it = pickle.loads(d)
try:
drop = next(it)
except StopIteration:
return
d = pickle.dumps(it)
it = pickle.loads(d)
self.assertEqual(self.thetype(it), data - self.thetype((drop,)))
def test_deepcopy(self): def test_deepcopy(self):
class Tracer: class Tracer:
def __init__(self, value): def __init__(self, value):

View file

@ -5,8 +5,11 @@ Tools directory of a Python checkout or tarball, such as reindent.py.
""" """
import os import os
import sys
import imp
import unittest import unittest
import sysconfig import sysconfig
import tempfile
from test import support from test import support
from test.script_helper import assert_python_ok from test.script_helper import assert_python_ok
@ -17,10 +20,11 @@ if not sysconfig.is_python_build():
srcdir = sysconfig.get_config_var('projectbase') srcdir = sysconfig.get_config_var('projectbase')
basepath = os.path.join(os.getcwd(), srcdir, 'Tools') basepath = os.path.join(os.getcwd(), srcdir, 'Tools')
scriptsdir = os.path.join(basepath, 'scripts')
class ReindentTests(unittest.TestCase): class ReindentTests(unittest.TestCase):
script = os.path.join(basepath, 'scripts', 'reindent.py') script = os.path.join(scriptsdir, 'reindent.py')
def test_noargs(self): def test_noargs(self):
assert_python_ok(self.script) assert_python_ok(self.script)
@ -31,8 +35,73 @@ class ReindentTests(unittest.TestCase):
self.assertGreater(err, b'') self.assertGreater(err, b'')
class TestSundryScripts(unittest.TestCase):
# At least make sure the rest don't have syntax errors. When tests are
# added for a script it should be added to the whitelist below.
# scripts that have independent tests.
whitelist = ['reindent.py']
# scripts that can't be imported without running
blacklist = ['make_ctype.py']
# scripts that use windows-only modules
windows_only = ['win_add2path.py']
# blacklisted for other reasons
other = ['analyze_dxp.py']
skiplist = blacklist + whitelist + windows_only + other
def setUp(self):
cm = support.DirsOnSysPath(scriptsdir)
cm.__enter__()
self.addCleanup(cm.__exit__)
def test_sundry(self):
for fn in os.listdir(scriptsdir):
if fn.endswith('.py') and fn not in self.skiplist:
__import__(fn[:-3])
@unittest.skipIf(sys.platform != "win32", "Windows-only test")
def test_sundry_windows(self):
for fn in self.windows_only:
__import__(fn[:-3])
@unittest.skipIf(not support.threading, "test requires _thread module")
def test_analyze_dxp_import(self):
if hasattr(sys, 'getdxp'):
import analyze_dxp
else:
with self.assertRaises(RuntimeError):
import analyze_dxp
class PdepsTests(unittest.TestCase):
@classmethod
def setUpClass(self):
path = os.path.join(scriptsdir, 'pdeps.py')
self.pdeps = imp.load_source('pdeps', path)
@classmethod
def tearDownClass(self):
if 'pdeps' in sys.modules:
del sys.modules['pdeps']
def test_process_errors(self):
# Issue #14492: m_import.match(line) can be None.
with tempfile.TemporaryDirectory() as tmpdir:
fn = os.path.join(tmpdir, 'foo')
with open(fn, 'w') as stream:
stream.write("#!/this/will/fail")
self.pdeps.process(fn, {})
def test_inverse_attribute_error(self):
# Issue #14492: this used to fail with an AttributeError.
self.pdeps.inverse({'a': []})
def test_main(): def test_main():
support.run_unittest(ReindentTests) support.run_unittest(*[obj for obj in globals().values()
if isinstance(obj, type)])
if __name__ == '__main__': if __name__ == '__main__':

View file

@ -1,6 +1,7 @@
from test import support, seq_tests from test import support, seq_tests
import gc import gc
import pickle
class TupleTest(seq_tests.CommonTest): class TupleTest(seq_tests.CommonTest):
type2test = tuple type2test = tuple
@ -164,6 +165,34 @@ class TupleTest(seq_tests.CommonTest):
check(10) # check our checking code check(10) # check our checking code
check(1000000) check(1000000)
def test_iterator_pickle(self):
# Userlist iterators don't support pickling yet since
# they are based on generators.
data = self.type2test([4, 5, 6, 7])
itorg = iter(data)
d = pickle.dumps(itorg)
it = pickle.loads(d)
self.assertEqual(type(itorg), type(it))
self.assertEqual(self.type2test(it), self.type2test(data))
it = pickle.loads(d)
next(it)
d = pickle.dumps(it)
self.assertEqual(self.type2test(it), self.type2test(data)[1:])
def test_reversed_pickle(self):
data = self.type2test([4, 5, 6, 7])
itorg = reversed(data)
d = pickle.dumps(itorg)
it = pickle.loads(d)
self.assertEqual(type(itorg), type(it))
self.assertEqual(self.type2test(it), self.type2test(reversed(data)))
it = pickle.loads(d)
next(it)
d = pickle.dumps(it)
self.assertEqual(self.type2test(it), self.type2test(reversed(data))[1:])
def test_main(): def test_main():
support.run_unittest(TupleTest) support.run_unittest(TupleTest)

View file

@ -1859,6 +1859,41 @@ class BasicElementTest(unittest.TestCase):
gc_collect() gc_collect()
self.assertIsNone(wref()) self.assertIsNone(wref())
# A longer cycle: d->e->e2->d
e = ET.Element('joe')
d = Dummy()
d.dummyref = e
wref = weakref.ref(d)
e2 = ET.SubElement(e, 'foo', attr=d)
del d, e, e2
gc_collect()
self.assertIsNone(wref())
# A cycle between Element objects as children of one another
# e1->e2->e3->e1
e1 = ET.Element('e1')
e2 = ET.Element('e2')
e3 = ET.Element('e3')
e1.append(e2)
e2.append(e2)
e3.append(e1)
wref = weakref.ref(e1)
del e1, e2, e3
gc_collect()
self.assertIsNone(wref())
def test_weakref(self):
flag = False
def wref_cb(w):
nonlocal flag
flag = True
e = ET.Element('e')
wref = weakref.ref(e, wref_cb)
self.assertEqual(wref().tag, 'e')
del e
self.assertEqual(flag, True)
self.assertEqual(wref(), None)
class ElementTreeTest(unittest.TestCase): class ElementTreeTest(unittest.TestCase):
def test_istype(self): def test_istype(self):

View file

@ -2,27 +2,27 @@
# #
# written by Fredrik Lundh, February 1998 # written by Fredrik Lundh, February 1998
# #
# FIXME: should add 'displayof' option where relevant (actual, families,
# measure, and metrics)
#
__version__ = "0.9" __version__ = "0.9"
import itertools
import tkinter import tkinter
# weight/slant # weight/slant
NORMAL = "normal" NORMAL = "normal"
ROMAN = "roman" ROMAN = "roman"
BOLD = "bold" BOLD = "bold"
ITALIC = "italic" ITALIC = "italic"
def nametofont(name): def nametofont(name):
"""Given the name of a tk named font, returns a Font representation. """Given the name of a tk named font, returns a Font representation.
""" """
return Font(name=name, exists=True) return Font(name=name, exists=True)
class Font:
class Font:
"""Represents a named font. """Represents a named font.
Constructor options are: Constructor options are:
@ -44,6 +44,8 @@ class Font:
""" """
counter = itertools.count(1)
def _set(self, kw): def _set(self, kw):
options = [] options = []
for k, v in kw.items(): for k, v in kw.items():
@ -63,7 +65,8 @@ class Font:
options[args[i][1:]] = args[i+1] options[args[i][1:]] = args[i+1]
return options return options
def __init__(self, root=None, font=None, name=None, exists=False, **options): def __init__(self, root=None, font=None, name=None, exists=False,
**options):
if not root: if not root:
root = tkinter._default_root root = tkinter._default_root
if font: if font:
@ -72,7 +75,7 @@ class Font:
else: else:
font = self._set(options) font = self._set(options)
if not name: if not name:
name = "font" + str(id(self)) name = "font" + str(next(self.counter))
self.name = name self.name = name
if exists: if exists:
@ -118,14 +121,17 @@ class Font:
"Return a distinct copy of the current font" "Return a distinct copy of the current font"
return Font(self._root, **self.actual()) return Font(self._root, **self.actual())
def actual(self, option=None): def actual(self, option=None, displayof=None):
"Return actual font attributes" "Return actual font attributes"
args = ()
if displayof:
args = ('-displayof', displayof)
if option: if option:
return self._call("font", "actual", self.name, "-"+option) args = args + ('-' + option, )
return self._call("font", "actual", self.name, *args)
else: else:
return self._mkdict( return self._mkdict(
self._split(self._call("font", "actual", self.name)) self._split(self._call("font", "actual", self.name, *args)))
)
def cget(self, option): def cget(self, option):
"Get font attribute" "Get font attribute"
@ -138,37 +144,47 @@ class Font:
*self._set(options)) *self._set(options))
else: else:
return self._mkdict( return self._mkdict(
self._split(self._call("font", "config", self.name)) self._split(self._call("font", "config", self.name)))
)
configure = config configure = config
def measure(self, text): def measure(self, text, displayof=None):
"Return text width" "Return text width"
return int(self._call("font", "measure", self.name, text)) args = (text,)
if displayof:
args = ('-displayof', displayof, text)
return int(self._call("font", "measure", self.name, *args))
def metrics(self, *options): def metrics(self, *options, **kw):
"""Return font metrics. """Return font metrics.
For best performance, create a dummy widget For best performance, create a dummy widget
using this font before calling this method.""" using this font before calling this method."""
args = ()
displayof = kw.pop('displayof', None)
if displayof:
args = ('-displayof', displayof)
if options: if options:
args = args + self._get(options)
return int( return int(
self._call("font", "metrics", self.name, self._get(options)) self._call("font", "metrics", self.name, *args))
)
else: else:
res = self._split(self._call("font", "metrics", self.name)) res = self._split(self._call("font", "metrics", self.name, *args))
options = {} options = {}
for i in range(0, len(res), 2): for i in range(0, len(res), 2):
options[res[i][1:]] = int(res[i+1]) options[res[i][1:]] = int(res[i+1])
return options return options
def families(root=None):
def families(root=None, displayof=None):
"Get font families (as a tuple)" "Get font families (as a tuple)"
if not root: if not root:
root = tkinter._default_root root = tkinter._default_root
return root.tk.splitlist(root.tk.call("font", "families")) args = ()
if displayof:
args = ('-displayof', displayof)
return root.tk.splitlist(root.tk.call("font", "families", *args))
def names(root=None): def names(root=None):
"Get names of defined fonts (as a tuple)" "Get names of defined fonts (as a tuple)"
@ -176,6 +192,7 @@ def names(root=None):
root = tkinter._default_root root = tkinter._default_root
return root.tk.splitlist(root.tk.call("font", "names")) return root.tk.splitlist(root.tk.call("font", "names"))
# -------------------------------------------------------------------- # --------------------------------------------------------------------
# test stuff # test stuff
@ -198,10 +215,10 @@ if __name__ == "__main__":
print(f.measure("hello"), f.metrics("linespace")) print(f.measure("hello"), f.metrics("linespace"))
print(f.metrics()) print(f.metrics(displayof=root))
f = Font(font=("Courier", 20, "bold")) f = Font(font=("Courier", 20, "bold"))
print(f.measure("hello"), f.metrics("linespace")) print(f.measure("hello"), f.metrics("linespace", displayof=root))
w = tkinter.Label(root, text="Hello, world", font=f) w = tkinter.Label(root, text="Hello, world", font=f)
w.pack() w.pack()

View file

@ -1253,7 +1253,7 @@ class Treeview(Widget, tkinter.XView, tkinter.YView):
def exists(self, item): def exists(self, item):
"""Returns True if the specified item is present in the three, """Returns True if the specified item is present in the tree,
False otherwise.""" False otherwise."""
return bool(self.tk.call(self._w, "exists", item)) return bool(self.tk.call(self._w, "exists", item))

View file

@ -448,6 +448,14 @@ class Grail(BaseBrowser):
def register_X_browsers(): def register_X_browsers():
# use xdg-open if around
if _iscommand("xdg-open"):
register("xdg-open", None, BackgroundBrowser("xdg-open"))
# The default GNOME3 browser
if "GNOME_DESKTOP_SESSION_ID" in os.environ and _iscommand("gvfs-open"):
register("gvfs-open", None, BackgroundBrowser("gvfs-open"))
# The default GNOME browser # The default GNOME browser
if "GNOME_DESKTOP_SESSION_ID" in os.environ and _iscommand("gnome-open"): if "GNOME_DESKTOP_SESSION_ID" in os.environ and _iscommand("gnome-open"):
register("gnome-open", None, BackgroundBrowser("gnome-open")) register("gnome-open", None, BackgroundBrowser("gnome-open"))

View file

@ -1351,7 +1351,7 @@ profile-removal:
clobber: clean profile-removal clobber: clean profile-removal
-rm -f $(BUILDPYTHON) $(PGEN) $(LIBRARY) $(LDLIBRARY) $(DLLLIBRARY) \ -rm -f $(BUILDPYTHON) $(PGEN) $(LIBRARY) $(LDLIBRARY) $(DLLLIBRARY) \
tags TAGS Parser/pgen.stamp \ tags TAGS \
config.cache config.log pyconfig.h Modules/config.c config.cache config.log pyconfig.h Modules/config.c
-rm -rf build platform -rm -rf build platform
-rm -rf $(PYTHONFRAMEWORKDIR) -rm -rf $(PYTHONFRAMEWORKDIR)

View file

@ -333,6 +333,7 @@ Doug Fort
John Fouhy John Fouhy
Andrew Francis Andrew Francis
Martin Franklin Martin Franklin
Bruce Frederiksen
Robin Friedrich Robin Friedrich
Ivan Frohne Ivan Frohne
Matthias Fuchs Matthias Fuchs
@ -976,6 +977,7 @@ Richard Stoakley
Peter Stoehr Peter Stoehr
Casper Stoel Casper Stoel
Michael Stone Michael Stone
Serhiy Storchaka
Ken Stox Ken Stox
Dan Stromberg Dan Stromberg
Daniel Stutzbach Daniel Stutzbach

View file

@ -19,10 +19,26 @@ Core and Builtins
Library Library
------- -------
- Issue #8515: Set __file__ when run file in IDLE.
Initial patch by Bruce Frederiksen.
- Issue #14496: Fix wrong name in idlelib/tabbedpages.py.
Patch by Popa Claudiu.
- Issue #3033: Add displayof parameter to tkinter font. Patch by Guilherme Polo.
- Issue #14482: Raise a ValueError, not a NameError, when trying to create
a multiprocessing Client or Listener with an AF_UNIX type address under
Windows. Patch by Popa Claudiu.
- Issue #802310: Generate always unique tkinter font names if not directly passed.
- Issue #14151: Raise a ValueError, not a NameError, when trying to create - Issue #14151: Raise a ValueError, not a NameError, when trying to create
a multiprocessing Client or Listener with an AF_PIPE type address under a multiprocessing Client or Listener with an AF_PIPE type address under
non-Windows platforms. Patch by Popa Claudiu. non-Windows platforms. Patch by Popa Claudiu.
- Issue #14493: Use gvfs-open or xdg-open in webbrowser.
What's New in Python 3.3.0 Alpha 2? What's New in Python 3.3.0 Alpha 2?
=================================== ===================================
@ -58,6 +74,8 @@ Core and Builtins
- Issue #14471: Fix a possible buffer overrun in the winreg module. - Issue #14471: Fix a possible buffer overrun in the winreg module.
- Issue #14288: Allow the serialization of builtin iterators
Library Library
------- -------
@ -997,7 +1015,7 @@ Library
- Issue #11006: Don't issue low level warning in subprocess when pipe2() fails. - Issue #11006: Don't issue low level warning in subprocess when pipe2() fails.
- Issue #13620: Support for Chrome browser in webbrowser.py Patch contributed - Issue #13620: Support for Chrome browser in webbrowser. Patch contributed
by Arnaud Calmettes. by Arnaud Calmettes.
- Issue #11829: Fix code execution holes in inspect.getattr_static for - Issue #11829: Fix code execution holes in inspect.getattr_static for

View file

@ -1121,6 +1121,35 @@ dequeiter_next(dequeiterobject *it)
return item; return item;
} }
static PyObject *
dequeiter_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
{
Py_ssize_t i, index=0;
PyObject *deque;
dequeiterobject *it;
if (!PyArg_ParseTuple(args, "O!|n", &deque_type, &deque, &index))
return NULL;
assert(type == &dequeiter_type);
it = (dequeiterobject*)deque_iter((dequeobject *)deque);
if (!it)
return NULL;
/* consume items from the queue */
for(i=0; i<index; i++) {
PyObject *item = dequeiter_next(it);
if (item) {
Py_DECREF(item);
} else {
if (it->counter) {
Py_DECREF(it);
return NULL;
} else
break;
}
}
return (PyObject*)it;
}
static PyObject * static PyObject *
dequeiter_len(dequeiterobject *it) dequeiter_len(dequeiterobject *it)
{ {
@ -1129,14 +1158,21 @@ dequeiter_len(dequeiterobject *it)
PyDoc_STRVAR(length_hint_doc, "Private method returning an estimate of len(list(it))."); PyDoc_STRVAR(length_hint_doc, "Private method returning an estimate of len(list(it)).");
static PyObject *
dequeiter_reduce(dequeiterobject *it)
{
return Py_BuildValue("O(On)", Py_TYPE(it), it->deque, it->deque->len - it->counter);
}
static PyMethodDef dequeiter_methods[] = { static PyMethodDef dequeiter_methods[] = {
{"__length_hint__", (PyCFunction)dequeiter_len, METH_NOARGS, length_hint_doc}, {"__length_hint__", (PyCFunction)dequeiter_len, METH_NOARGS, length_hint_doc},
{"__reduce__", (PyCFunction)dequeiter_reduce, METH_NOARGS, reduce_doc},
{NULL, NULL} /* sentinel */ {NULL, NULL} /* sentinel */
}; };
static PyTypeObject dequeiter_type = { static PyTypeObject dequeiter_type = {
PyVarObject_HEAD_INIT(NULL, 0) PyVarObject_HEAD_INIT(NULL, 0)
"deque_iterator", /* tp_name */ "_collections._deque_iterator", /* tp_name */
sizeof(dequeiterobject), /* tp_basicsize */ sizeof(dequeiterobject), /* tp_basicsize */
0, /* tp_itemsize */ 0, /* tp_itemsize */
/* methods */ /* methods */
@ -1164,6 +1200,16 @@ static PyTypeObject dequeiter_type = {
PyObject_SelfIter, /* tp_iter */ PyObject_SelfIter, /* tp_iter */
(iternextfunc)dequeiter_next, /* tp_iternext */ (iternextfunc)dequeiter_next, /* tp_iternext */
dequeiter_methods, /* tp_methods */ dequeiter_methods, /* tp_methods */
0, /* tp_members */
0, /* tp_getset */
0, /* tp_base */
0, /* tp_dict */
0, /* tp_descr_get */
0, /* tp_descr_set */
0, /* tp_dictoffset */
0, /* tp_init */
0, /* tp_alloc */
dequeiter_new, /* tp_new */
0, 0,
}; };
@ -1217,9 +1263,38 @@ dequereviter_next(dequeiterobject *it)
return item; return item;
} }
static PyObject *
dequereviter_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
{
Py_ssize_t i, index=0;
PyObject *deque;
dequeiterobject *it;
if (!PyArg_ParseTuple(args, "O!|n", &deque_type, &deque, &index))
return NULL;
assert(type == &dequereviter_type);
it = (dequeiterobject*)deque_reviter((dequeobject *)deque);
if (!it)
return NULL;
/* consume items from the queue */
for(i=0; i<index; i++) {
PyObject *item = dequereviter_next(it);
if (item) {
Py_DECREF(item);
} else {
if (it->counter) {
Py_DECREF(it);
return NULL;
} else
break;
}
}
return (PyObject*)it;
}
static PyTypeObject dequereviter_type = { static PyTypeObject dequereviter_type = {
PyVarObject_HEAD_INIT(NULL, 0) PyVarObject_HEAD_INIT(NULL, 0)
"deque_reverse_iterator", /* tp_name */ "_collections._deque_reverse_iterator", /* tp_name */
sizeof(dequeiterobject), /* tp_basicsize */ sizeof(dequeiterobject), /* tp_basicsize */
0, /* tp_itemsize */ 0, /* tp_itemsize */
/* methods */ /* methods */
@ -1247,6 +1322,16 @@ static PyTypeObject dequereviter_type = {
PyObject_SelfIter, /* tp_iter */ PyObject_SelfIter, /* tp_iter */
(iternextfunc)dequereviter_next, /* tp_iternext */ (iternextfunc)dequereviter_next, /* tp_iternext */
dequeiter_methods, /* tp_methods */ dequeiter_methods, /* tp_methods */
0, /* tp_members */
0, /* tp_getset */
0, /* tp_base */
0, /* tp_dict */
0, /* tp_descr_get */
0, /* tp_descr_set */
0, /* tp_dictoffset */
0, /* tp_init */
0, /* tp_alloc */
dequereviter_new, /* tp_new */
0, 0,
}; };
@ -1653,9 +1738,13 @@ PyInit__collections(void)
if (PyType_Ready(&dequeiter_type) < 0) if (PyType_Ready(&dequeiter_type) < 0)
return NULL; return NULL;
Py_INCREF(&dequeiter_type);
PyModule_AddObject(m, "_deque_iterator", (PyObject *)&dequeiter_type);
if (PyType_Ready(&dequereviter_type) < 0) if (PyType_Ready(&dequereviter_type) < 0)
return NULL; return NULL;
Py_INCREF(&dequereviter_type);
PyModule_AddObject(m, "_deque_reverse_iterator", (PyObject *)&dequereviter_type);
return m; return m;
} }

View file

@ -1935,7 +1935,7 @@ PyDecType_FromCStringExact(PyTypeObject *type, const char *s,
mpd_maxcontext(&maxctx); mpd_maxcontext(&maxctx);
mpd_qset_string(MPD(dec), s, &maxctx, &status); mpd_qset_string(MPD(dec), s, &maxctx, &status);
if (status & (MPD_Inexact|MPD_Rounded)) { if (status & (MPD_Inexact|MPD_Rounded|MPD_Clamped)) {
/* we want exact results */ /* we want exact results */
mpd_seterror(MPD(dec), MPD_Invalid_operation, &status); mpd_seterror(MPD(dec), MPD_Invalid_operation, &status);
} }
@ -2139,7 +2139,7 @@ PyDecType_FromLongExact(PyTypeObject *type, const PyObject *pylong,
return NULL; return NULL;
} }
if (status & (MPD_Inexact|MPD_Rounded)) { if (status & (MPD_Inexact|MPD_Rounded|MPD_Clamped)) {
/* we want exact results */ /* we want exact results */
mpd_seterror(MPD(dec), MPD_Invalid_operation, &status); mpd_seterror(MPD(dec), MPD_Invalid_operation, &status);
} }
@ -2385,8 +2385,8 @@ dectuple_as_str(PyObject *dectuple)
} }
/* coefficient */ /* coefficient */
digits = sequence_as_tuple(PyTuple_GET_ITEM(dectuple, 1), digits = sequence_as_tuple(PyTuple_GET_ITEM(dectuple, 1), PyExc_ValueError,
PyExc_ValueError, "coefficient must be a tuple of digits"); "coefficient must be a tuple of digits");
if (digits == NULL) { if (digits == NULL) {
goto error; goto error;
} }
@ -2435,8 +2435,8 @@ dectuple_as_str(PyObject *dectuple)
if (sign_special[1] == '\0') { if (sign_special[1] == '\0') {
/* not a special number */ /* not a special number */
*cp++ = 'E'; *cp++ = 'E';
n = snprintf(cp, MPD_EXPDIGITS+1, "%" PRI_mpd_ssize_t, exp); n = snprintf(cp, MPD_EXPDIGITS+2, "%" PRI_mpd_ssize_t, exp);
if (n < 0 || n >= MPD_EXPDIGITS+1) { if (n < 0 || n >= MPD_EXPDIGITS+2) {
PyErr_SetString(PyExc_RuntimeError, PyErr_SetString(PyExc_RuntimeError,
"internal error in dec_sequence_as_str"); "internal error in dec_sequence_as_str");
goto error; goto error;
@ -4215,7 +4215,7 @@ dec_hash(PyObject *v)
mpd_uint_t p_data[1] = {2305843009213693951ULL}; mpd_uint_t p_data[1] = {2305843009213693951ULL};
mpd_t p = {MPD_POS|MPD_STATIC|MPD_CONST_DATA, 0, 19, 1, 1, p_data}; mpd_t p = {MPD_POS|MPD_STATIC|MPD_CONST_DATA, 0, 19, 1, 1, p_data};
/* Inverse of 10 modulo p */ /* Inverse of 10 modulo p */
mpd_uint_t inv10_p_data[2] = {2075258708292324556ULL}; mpd_uint_t inv10_p_data[1] = {2075258708292324556ULL};
mpd_t inv10_p = {MPD_POS|MPD_STATIC|MPD_CONST_DATA, mpd_t inv10_p = {MPD_POS|MPD_STATIC|MPD_CONST_DATA,
0, 19, 1, 1, inv10_p_data}; 0, 19, 1, 1, inv10_p_data};
#elif defined(CONFIG_32) && _PyHASH_BITS == 31 #elif defined(CONFIG_32) && _PyHASH_BITS == 31
@ -4934,7 +4934,7 @@ ctx_copy_decimal(PyObject *context, PyObject *v)
PyObject *result; PyObject *result;
CONVERT_OP_RAISE(&result, v, context); CONVERT_OP_RAISE(&result, v, context);
return result; return result;
} }
static PyObject * static PyObject *

View file

@ -302,6 +302,7 @@ def RestrictedDecimal(value):
dec = maxcontext.create_decimal(value) dec = maxcontext.create_decimal(value)
if maxcontext.flags[P.Inexact] or \ if maxcontext.flags[P.Inexact] or \
maxcontext.flags[P.Rounded] or \ maxcontext.flags[P.Rounded] or \
maxcontext.flags[P.Clamped] or \
maxcontext.flags[P.InvalidOperation]: maxcontext.flags[P.InvalidOperation]:
return context.p._raise_error(P.InvalidOperation) return context.p._raise_error(P.InvalidOperation)
if maxcontext.flags[P.FloatOperation]: if maxcontext.flags[P.FloatOperation]:

View file

@ -48,6 +48,7 @@
/* See http://www.python.org/psf/license for licensing details. */ /* See http://www.python.org/psf/license for licensing details. */
#include "Python.h" #include "Python.h"
#include "structmember.h"
#define VERSION "1.0.6" #define VERSION "1.0.6"
@ -229,6 +230,8 @@ typedef struct {
ElementObjectExtra* extra; ElementObjectExtra* extra;
PyObject *weakreflist; /* For tp_weaklistoffset */
} ElementObject; } ElementObject;
static PyTypeObject Element_Type; static PyTypeObject Element_Type;
@ -261,17 +264,26 @@ create_extra(ElementObject* self, PyObject* attrib)
LOCAL(void) LOCAL(void)
dealloc_extra(ElementObject* self) dealloc_extra(ElementObject* self)
{ {
ElementObjectExtra *myextra;
int i; int i;
Py_DECREF(self->extra->attrib); if (!self->extra)
return;
for (i = 0; i < self->extra->length; i++) /* Avoid DECREFs calling into this code again (cycles, etc.)
Py_DECREF(self->extra->children[i]); */
myextra = self->extra;
self->extra = NULL;
if (self->extra->children != self->extra->_children) Py_DECREF(myextra->attrib);
PyObject_Free(self->extra->children);
PyObject_Free(self->extra); for (i = 0; i < myextra->length; i++)
Py_DECREF(myextra->children[i]);
if (myextra->children != myextra->_children)
PyObject_Free(myextra->children);
PyObject_Free(myextra);
} }
/* Convenience internal function to create new Element objects with the given /* Convenience internal function to create new Element objects with the given
@ -308,6 +320,8 @@ create_new_element(PyObject* tag, PyObject* attrib)
Py_INCREF(Py_None); Py_INCREF(Py_None);
self->tail = Py_None; self->tail = Py_None;
self->weakreflist = NULL;
ALLOC(sizeof(ElementObject), "create element"); ALLOC(sizeof(ElementObject), "create element");
PyObject_GC_Track(self); PyObject_GC_Track(self);
return (PyObject*) self; return (PyObject*) self;
@ -328,6 +342,7 @@ element_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
e->tail = Py_None; e->tail = Py_None;
e->extra = NULL; e->extra = NULL;
e->weakreflist = NULL;
} }
return (PyObject *)e; return (PyObject *)e;
} }
@ -576,19 +591,28 @@ element_gc_traverse(ElementObject *self, visitproc visit, void *arg)
static int static int
element_gc_clear(ElementObject *self) element_gc_clear(ElementObject *self)
{ {
PyObject *text = JOIN_OBJ(self->text);
PyObject *tail = JOIN_OBJ(self->tail);
Py_CLEAR(self->tag); Py_CLEAR(self->tag);
Py_CLEAR(text);
Py_CLEAR(tail); /* The following is like Py_CLEAR for self->text and self->tail, but
* written explicitily because the real pointers hide behind access
* macros.
*/
if (self->text) {
PyObject *tmp = JOIN_OBJ(self->text);
self->text = NULL;
Py_DECREF(tmp);
}
if (self->tail) {
PyObject *tmp = JOIN_OBJ(self->tail);
self->tail = NULL;
Py_DECREF(tmp);
}
/* After dropping all references from extra, it's no longer valid anyway, /* After dropping all references from extra, it's no longer valid anyway,
** so fully deallocate it (see also element_clearmethod) * so fully deallocate it.
*/ */
if (self->extra) { dealloc_extra(self);
dealloc_extra(self);
self->extra = NULL;
}
return 0; return 0;
} }
@ -596,6 +620,10 @@ static void
element_dealloc(ElementObject* self) element_dealloc(ElementObject* self)
{ {
PyObject_GC_UnTrack(self); PyObject_GC_UnTrack(self);
if (self->weakreflist != NULL)
PyObject_ClearWeakRefs((PyObject *) self);
/* element_gc_clear clears all references and deallocates extra /* element_gc_clear clears all references and deallocates extra
*/ */
element_gc_clear(self); element_gc_clear(self);
@ -626,10 +654,7 @@ element_clearmethod(ElementObject* self, PyObject* args)
if (!PyArg_ParseTuple(args, ":clear")) if (!PyArg_ParseTuple(args, ":clear"))
return NULL; return NULL;
if (self->extra) { dealloc_extra(self);
dealloc_extra(self);
self->extra = NULL;
}
Py_INCREF(Py_None); Py_INCREF(Py_None);
Py_DECREF(JOIN_OBJ(self->text)); Py_DECREF(JOIN_OBJ(self->text));
@ -1693,7 +1718,7 @@ static PyTypeObject Element_Type = {
(traverseproc)element_gc_traverse, /* tp_traverse */ (traverseproc)element_gc_traverse, /* tp_traverse */
(inquiry)element_gc_clear, /* tp_clear */ (inquiry)element_gc_clear, /* tp_clear */
0, /* tp_richcompare */ 0, /* tp_richcompare */
0, /* tp_weaklistoffset */ offsetof(ElementObject, weakreflist), /* tp_weaklistoffset */
0, /* tp_iter */ 0, /* tp_iter */
0, /* tp_iternext */ 0, /* tp_iternext */
element_methods, /* tp_methods */ element_methods, /* tp_methods */
@ -3009,8 +3034,7 @@ static struct PyModuleDef _elementtreemodule = {
PyMODINIT_FUNC PyMODINIT_FUNC
PyInit__elementtree(void) PyInit__elementtree(void)
{ {
PyObject* m; PyObject *m, *g, *temp;
PyObject* g;
char* bootstrap; char* bootstrap;
/* Initialize object types */ /* Initialize object types */
@ -3042,10 +3066,6 @@ PyInit__elementtree(void)
PyDict_SetItemString(g, "__builtins__", PyEval_GetBuiltins()); PyDict_SetItemString(g, "__builtins__", PyEval_GetBuiltins());
bootstrap = ( bootstrap = (
"from copy import deepcopy\n"
"from xml.etree import ElementPath\n"
"def iter(node, tag=None):\n" /* helper */ "def iter(node, tag=None):\n" /* helper */
" if tag == '*':\n" " if tag == '*':\n"
" tag = None\n" " tag = None\n"
@ -3069,8 +3089,14 @@ PyInit__elementtree(void)
if (!PyRun_String(bootstrap, Py_file_input, g, NULL)) if (!PyRun_String(bootstrap, Py_file_input, g, NULL))
return NULL; return NULL;
elementpath_obj = PyDict_GetItemString(g, "ElementPath"); if (!(temp = PyImport_ImportModule("copy")))
elementtree_deepcopy_obj = PyDict_GetItemString(g, "deepcopy"); return NULL;
elementtree_deepcopy_obj = PyObject_GetAttrString(temp, "deepcopy");
Py_XDECREF(temp);
if (!(elementpath_obj = PyImport_ImportModule("xml.etree.ElementPath")))
return NULL;
elementtree_iter_obj = PyDict_GetItemString(g, "iter"); elementtree_iter_obj = PyDict_GetItemString(g, "iter");
elementtree_itertext_obj = PyDict_GetItemString(g, "itertext"); elementtree_itertext_obj = PyDict_GetItemString(g, "itertext");

View file

@ -2753,6 +2753,34 @@ arrayiter_traverse(arrayiterobject *it, visitproc visit, void *arg)
return 0; return 0;
} }
static PyObject *
arrayiter_reduce(arrayiterobject *it)
{
return Py_BuildValue("N(O)n", _PyObject_GetBuiltin("iter"),
it->ao, it->index);
}
static PyObject *
arrayiter_setstate(arrayiterobject *it, PyObject *state)
{
Py_ssize_t index = PyLong_AsSsize_t(state);
if (index == -1 && PyErr_Occurred())
return NULL;
if (index < 0)
index = 0;
it->index = index;
Py_RETURN_NONE;
}
PyDoc_STRVAR(setstate_doc, "Set state information for unpickling.");
static PyMethodDef arrayiter_methods[] = {
{"__reduce__", (PyCFunction)arrayiter_reduce, METH_NOARGS,
reduce_doc},
{"__setstate__", (PyCFunction)arrayiter_setstate, METH_O,
setstate_doc},
{NULL, NULL} /* sentinel */
};
static PyTypeObject PyArrayIter_Type = { static PyTypeObject PyArrayIter_Type = {
PyVarObject_HEAD_INIT(NULL, 0) PyVarObject_HEAD_INIT(NULL, 0)
"arrayiterator", /* tp_name */ "arrayiterator", /* tp_name */
@ -2782,7 +2810,7 @@ static PyTypeObject PyArrayIter_Type = {
0, /* tp_weaklistoffset */ 0, /* tp_weaklistoffset */
PyObject_SelfIter, /* tp_iter */ PyObject_SelfIter, /* tp_iter */
(iternextfunc)arrayiter_next, /* tp_iternext */ (iternextfunc)arrayiter_next, /* tp_iternext */
0, /* tp_methods */ arrayiter_methods, /* tp_methods */
}; };

File diff suppressed because it is too large Load diff

View file

@ -3003,7 +3003,7 @@ bytearrayiter_next(bytesiterobject *it)
} }
static PyObject * static PyObject *
bytesarrayiter_length_hint(bytesiterobject *it) bytearrayiter_length_hint(bytesiterobject *it)
{ {
Py_ssize_t len = 0; Py_ssize_t len = 0;
if (it->it_seq) if (it->it_seq)
@ -3014,9 +3014,41 @@ bytesarrayiter_length_hint(bytesiterobject *it)
PyDoc_STRVAR(length_hint_doc, PyDoc_STRVAR(length_hint_doc,
"Private method returning an estimate of len(list(it))."); "Private method returning an estimate of len(list(it)).");
static PyObject *
bytearrayiter_reduce(bytesiterobject *it)
{
if (it->it_seq != NULL) {
return Py_BuildValue("N(O)n", _PyObject_GetBuiltin("iter"),
it->it_seq, it->it_index);
} else {
PyObject *u = PyUnicode_FromUnicode(NULL, 0);
if (u == NULL)
return NULL;
return Py_BuildValue("N(N)", _PyObject_GetBuiltin("iter"), u);
}
}
static PyObject *
bytearrayiter_setstate(bytesiterobject *it, PyObject *state)
{
Py_ssize_t index = PyLong_AsSsize_t(state);
if (index == -1 && PyErr_Occurred())
return NULL;
if (index < 0)
index = 0;
it->it_index = index;
Py_RETURN_NONE;
}
PyDoc_STRVAR(setstate_doc, "Set state information for unpickling.");
static PyMethodDef bytearrayiter_methods[] = { static PyMethodDef bytearrayiter_methods[] = {
{"__length_hint__", (PyCFunction)bytesarrayiter_length_hint, METH_NOARGS, {"__length_hint__", (PyCFunction)bytearrayiter_length_hint, METH_NOARGS,
length_hint_doc}, length_hint_doc},
{"__reduce__", (PyCFunction)bytearrayiter_reduce, METH_NOARGS,
reduce_doc},
{"__setstate__", (PyCFunction)bytearrayiter_setstate, METH_O,
setstate_doc},
{NULL, NULL} /* sentinel */ {NULL, NULL} /* sentinel */
}; };

View file

@ -3074,9 +3074,43 @@ striter_len(striterobject *it)
PyDoc_STRVAR(length_hint_doc, PyDoc_STRVAR(length_hint_doc,
"Private method returning an estimate of len(list(it))."); "Private method returning an estimate of len(list(it)).");
static PyObject *
striter_reduce(striterobject *it)
{
if (it->it_seq != NULL) {
return Py_BuildValue("N(O)n", _PyObject_GetBuiltin("iter"),
it->it_seq, it->it_index);
} else {
PyObject *u = PyUnicode_FromUnicode(NULL, 0);
if (u == NULL)
return NULL;
return Py_BuildValue("N(N)", _PyObject_GetBuiltin("iter"), u);
}
}
PyDoc_STRVAR(reduce_doc, "Return state information for pickling.");
static PyObject *
striter_setstate(striterobject *it, PyObject *state)
{
Py_ssize_t index = PyLong_AsSsize_t(state);
if (index == -1 && PyErr_Occurred())
return NULL;
if (index < 0)
index = 0;
it->it_index = index;
Py_RETURN_NONE;
}
PyDoc_STRVAR(setstate_doc, "Set state information for unpickling.");
static PyMethodDef striter_methods[] = { static PyMethodDef striter_methods[] = {
{"__length_hint__", (PyCFunction)striter_len, METH_NOARGS, {"__length_hint__", (PyCFunction)striter_len, METH_NOARGS,
length_hint_doc}, length_hint_doc},
{"__reduce__", (PyCFunction)striter_reduce, METH_NOARGS,
reduce_doc},
{"__setstate__", (PyCFunction)striter_setstate, METH_O,
setstate_doc},
{NULL, NULL} /* sentinel */ {NULL, NULL} /* sentinel */
}; };

View file

@ -2323,9 +2323,16 @@ dictiter_len(dictiterobject *di)
PyDoc_STRVAR(length_hint_doc, PyDoc_STRVAR(length_hint_doc,
"Private method returning an estimate of len(list(it))."); "Private method returning an estimate of len(list(it)).");
static PyObject *
dictiter_reduce(dictiterobject *di);
PyDoc_STRVAR(reduce_doc, "Return state information for pickling.");
static PyMethodDef dictiter_methods[] = { static PyMethodDef dictiter_methods[] = {
{"__length_hint__", (PyCFunction)dictiter_len, METH_NOARGS, {"__length_hint__", (PyCFunction)dictiter_len, METH_NOARGS,
length_hint_doc}, length_hint_doc},
{"__reduce__", (PyCFunction)dictiter_reduce, METH_NOARGS,
reduce_doc},
{NULL, NULL} /* sentinel */ {NULL, NULL} /* sentinel */
}; };
@ -2560,6 +2567,52 @@ PyTypeObject PyDictIterItem_Type = {
}; };
static PyObject *
dictiter_reduce(dictiterobject *di)
{
PyObject *list;
dictiterobject tmp;
list = PyList_New(0);
if (!list)
return NULL;
/* copy the itertor state */
tmp = *di;
Py_XINCREF(tmp.di_dict);
/* iterate the temporary into a list */
for(;;) {
PyObject *element = 0;
if (Py_TYPE(di) == &PyDictIterItem_Type)
element = dictiter_iternextitem(&tmp);
else if (Py_TYPE(di) == &PyDictIterKey_Type)
element = dictiter_iternextkey(&tmp);
else if (Py_TYPE(di) == &PyDictIterValue_Type)
element = dictiter_iternextvalue(&tmp);
else
assert(0);
if (element) {
if (PyList_Append(list, element)) {
Py_DECREF(element);
Py_DECREF(list);
Py_XDECREF(tmp.di_dict);
return NULL;
}
Py_DECREF(element);
} else
break;
}
Py_XDECREF(tmp.di_dict);
/* check for error */
if (tmp.di_dict != NULL) {
/* we have an error */
Py_DECREF(list);
return NULL;
}
return Py_BuildValue("N(N)", _PyObject_GetBuiltin("iter"), list);
}
/***********************************************/ /***********************************************/
/* View objects for keys(), items(), values(). */ /* View objects for keys(), items(), values(). */
/***********************************************/ /***********************************************/

View file

@ -158,6 +158,22 @@ enum_next(enumobject *en)
return result; return result;
} }
static PyObject *
enum_reduce(enumobject *en)
{
if (en->en_longindex != NULL)
return Py_BuildValue("O(OO)", Py_TYPE(en), en->en_sit, en->en_longindex);
else
return Py_BuildValue("O(On)", Py_TYPE(en), en->en_sit, en->en_index);
}
PyDoc_STRVAR(reduce_doc, "Return state information for pickling.");
static PyMethodDef enum_methods[] = {
{"__reduce__", (PyCFunction)enum_reduce, METH_NOARGS, reduce_doc},
{NULL, NULL} /* sentinel */
};
PyDoc_STRVAR(enum_doc, PyDoc_STRVAR(enum_doc,
"enumerate(iterable[, start]) -> iterator for index, value of iterable\n" "enumerate(iterable[, start]) -> iterator for index, value of iterable\n"
"\n" "\n"
@ -197,7 +213,7 @@ PyTypeObject PyEnum_Type = {
0, /* tp_weaklistoffset */ 0, /* tp_weaklistoffset */
PyObject_SelfIter, /* tp_iter */ PyObject_SelfIter, /* tp_iter */
(iternextfunc)enum_next, /* tp_iternext */ (iternextfunc)enum_next, /* tp_iternext */
0, /* tp_methods */ enum_methods, /* tp_methods */
0, /* tp_members */ 0, /* tp_members */
0, /* tp_getset */ 0, /* tp_getset */
0, /* tp_base */ 0, /* tp_base */
@ -319,8 +335,40 @@ reversed_len(reversedobject *ro)
PyDoc_STRVAR(length_hint_doc, "Private method returning an estimate of len(list(it))."); PyDoc_STRVAR(length_hint_doc, "Private method returning an estimate of len(list(it)).");
static PyObject *
reversed_reduce(reversedobject *ro)
{
if (ro->seq)
return Py_BuildValue("O(O)n", Py_TYPE(ro), ro->seq, ro->index);
else
return Py_BuildValue("O(())", Py_TYPE(ro));
}
static PyObject *
reversed_setstate(reversedobject *ro, PyObject *state)
{
Py_ssize_t index = PyLong_AsSsize_t(state);
if (index == -1 && PyErr_Occurred())
return NULL;
if (ro->seq != 0) {
Py_ssize_t n = PySequence_Size(ro->seq);
if (n < 0)
return NULL;
if (index < -1)
index = -1;
else if (index > n-1)
index = n-1;
ro->index = index;
}
Py_RETURN_NONE;
}
PyDoc_STRVAR(setstate_doc, "Set state information for unpickling.");
static PyMethodDef reversediter_methods[] = { static PyMethodDef reversediter_methods[] = {
{"__length_hint__", (PyCFunction)reversed_len, METH_NOARGS, length_hint_doc}, {"__length_hint__", (PyCFunction)reversed_len, METH_NOARGS, length_hint_doc},
{"__reduce__", (PyCFunction)reversed_reduce, METH_NOARGS, reduce_doc},
{"__setstate__", (PyCFunction)reversed_setstate, METH_O, setstate_doc},
{NULL, NULL} /* sentinel */ {NULL, NULL} /* sentinel */
}; };

View file

@ -88,8 +88,38 @@ iter_len(seqiterobject *it)
PyDoc_STRVAR(length_hint_doc, "Private method returning an estimate of len(list(it))."); PyDoc_STRVAR(length_hint_doc, "Private method returning an estimate of len(list(it)).");
static PyObject *
iter_reduce(seqiterobject *it)
{
if (it->it_seq != NULL)
return Py_BuildValue("N(O)n", _PyObject_GetBuiltin("iter"),
it->it_seq, it->it_index);
else
return Py_BuildValue("N(())", _PyObject_GetBuiltin("iter"));
}
PyDoc_STRVAR(reduce_doc, "Return state information for pickling.");
static PyObject *
iter_setstate(seqiterobject *it, PyObject *state)
{
Py_ssize_t index = PyLong_AsSsize_t(state);
if (index == -1 && PyErr_Occurred())
return NULL;
if (it->it_seq != NULL) {
if (index < 0)
index = 0;
it->it_index = index;
}
Py_RETURN_NONE;
}
PyDoc_STRVAR(setstate_doc, "Set state information for unpickling.");
static PyMethodDef seqiter_methods[] = { static PyMethodDef seqiter_methods[] = {
{"__length_hint__", (PyCFunction)iter_len, METH_NOARGS, length_hint_doc}, {"__length_hint__", (PyCFunction)iter_len, METH_NOARGS, length_hint_doc},
{"__reduce__", (PyCFunction)iter_reduce, METH_NOARGS, reduce_doc},
{"__setstate__", (PyCFunction)iter_setstate, METH_O, setstate_doc},
{NULL, NULL} /* sentinel */ {NULL, NULL} /* sentinel */
}; };
@ -195,6 +225,21 @@ calliter_iternext(calliterobject *it)
return NULL; return NULL;
} }
static PyObject *
calliter_reduce(calliterobject *it)
{
if (it->it_callable != NULL && it->it_sentinel != NULL)
return Py_BuildValue("N(OO)", _PyObject_GetBuiltin("iter"),
it->it_callable, it->it_sentinel);
else
return Py_BuildValue("N(())", _PyObject_GetBuiltin("iter"));
}
static PyMethodDef calliter_methods[] = {
{"__reduce__", (PyCFunction)calliter_reduce, METH_NOARGS, reduce_doc},
{NULL, NULL} /* sentinel */
};
PyTypeObject PyCallIter_Type = { PyTypeObject PyCallIter_Type = {
PyVarObject_HEAD_INIT(&PyType_Type, 0) PyVarObject_HEAD_INIT(&PyType_Type, 0)
"callable_iterator", /* tp_name */ "callable_iterator", /* tp_name */
@ -224,7 +269,7 @@ PyTypeObject PyCallIter_Type = {
0, /* tp_weaklistoffset */ 0, /* tp_weaklistoffset */
PyObject_SelfIter, /* tp_iter */ PyObject_SelfIter, /* tp_iter */
(iternextfunc)calliter_iternext, /* tp_iternext */ (iternextfunc)calliter_iternext, /* tp_iternext */
0, /* tp_methods */ calliter_methods, /* tp_methods */
}; };

View file

@ -2660,11 +2660,18 @@ static void listiter_dealloc(listiterobject *);
static int listiter_traverse(listiterobject *, visitproc, void *); static int listiter_traverse(listiterobject *, visitproc, void *);
static PyObject *listiter_next(listiterobject *); static PyObject *listiter_next(listiterobject *);
static PyObject *listiter_len(listiterobject *); static PyObject *listiter_len(listiterobject *);
static PyObject *listiter_reduce_general(void *_it, int forward);
static PyObject *listiter_reduce(listiterobject *);
static PyObject *listiter_setstate(listiterobject *, PyObject *state);
PyDoc_STRVAR(length_hint_doc, "Private method returning an estimate of len(list(it))."); PyDoc_STRVAR(length_hint_doc, "Private method returning an estimate of len(list(it)).");
PyDoc_STRVAR(reduce_doc, "Return state information for pickling.");
PyDoc_STRVAR(setstate_doc, "Set state information for unpickling.");
static PyMethodDef listiter_methods[] = { static PyMethodDef listiter_methods[] = {
{"__length_hint__", (PyCFunction)listiter_len, METH_NOARGS, length_hint_doc}, {"__length_hint__", (PyCFunction)listiter_len, METH_NOARGS, length_hint_doc},
{"__reduce__", (PyCFunction)listiter_reduce, METH_NOARGS, reduce_doc},
{"__setstate__", (PyCFunction)listiter_setstate, METH_O, setstate_doc},
{NULL, NULL} /* sentinel */ {NULL, NULL} /* sentinel */
}; };
@ -2771,6 +2778,27 @@ listiter_len(listiterobject *it)
} }
return PyLong_FromLong(0); return PyLong_FromLong(0);
} }
static PyObject *
listiter_reduce(listiterobject *it)
{
return listiter_reduce_general(it, 1);
}
static PyObject *
listiter_setstate(listiterobject *it, PyObject *state)
{
long index = PyLong_AsLong(state);
if (index == -1 && PyErr_Occurred())
return NULL;
if (it->it_seq != NULL) {
if (index < 0)
index = 0;
it->it_index = index;
}
Py_RETURN_NONE;
}
/*********************** List Reverse Iterator **************************/ /*********************** List Reverse Iterator **************************/
typedef struct { typedef struct {
@ -2784,9 +2812,13 @@ static void listreviter_dealloc(listreviterobject *);
static int listreviter_traverse(listreviterobject *, visitproc, void *); static int listreviter_traverse(listreviterobject *, visitproc, void *);
static PyObject *listreviter_next(listreviterobject *); static PyObject *listreviter_next(listreviterobject *);
static PyObject *listreviter_len(listreviterobject *); static PyObject *listreviter_len(listreviterobject *);
static PyObject *listreviter_reduce(listreviterobject *);
static PyObject *listreviter_setstate(listreviterobject *, PyObject *);
static PyMethodDef listreviter_methods[] = { static PyMethodDef listreviter_methods[] = {
{"__length_hint__", (PyCFunction)listreviter_len, METH_NOARGS, length_hint_doc}, {"__length_hint__", (PyCFunction)listreviter_len, METH_NOARGS, length_hint_doc},
{"__reduce__", (PyCFunction)listreviter_reduce, METH_NOARGS, reduce_doc},
{"__setstate__", (PyCFunction)listreviter_setstate, METH_O, setstate_doc},
{NULL, NULL} /* sentinel */ {NULL, NULL} /* sentinel */
}; };
@ -2883,3 +2915,51 @@ listreviter_len(listreviterobject *it)
len = 0; len = 0;
return PyLong_FromSsize_t(len); return PyLong_FromSsize_t(len);
} }
static PyObject *
listreviter_reduce(listreviterobject *it)
{
return listiter_reduce_general(it, 0);
}
static PyObject *
listreviter_setstate(listreviterobject *it, PyObject *state)
{
Py_ssize_t index = PyLong_AsSsize_t(state);
if (index == -1 && PyErr_Occurred())
return NULL;
if (it->it_seq != NULL) {
if (index < -1)
index = -1;
else if (index > PyList_GET_SIZE(it->it_seq) - 1)
index = PyList_GET_SIZE(it->it_seq) - 1;
it->it_index = index;
}
Py_RETURN_NONE;
}
/* common pickling support */
static PyObject *
listiter_reduce_general(void *_it, int forward)
{
PyObject *list;
/* the objects are not the same, index is of different types! */
if (forward) {
listiterobject *it = (listiterobject *)_it;
if (it->it_seq)
return Py_BuildValue("N(O)l", _PyObject_GetBuiltin("iter"),
it->it_seq, it->it_index);
} else {
listreviterobject *it = (listreviterobject *)_it;
if (it->it_seq)
return Py_BuildValue("N(O)n", _PyObject_GetBuiltin("reversed"),
it->it_seq, it->it_index);
}
/* empty iterator, create an empty list */
list = PyList_New(0);
if (list == NULL)
return NULL;
return Py_BuildValue("N(N)", _PyObject_GetBuiltin("iter"), list);
}

View file

@ -1026,6 +1026,19 @@ PyObject_SelfIter(PyObject *obj)
return obj; return obj;
} }
/* Convenience function to get a builtin from its name */
PyObject *
_PyObject_GetBuiltin(const char *name)
{
PyObject *mod, *attr;
mod = PyImport_ImportModule("builtins");
if (mod == NULL)
return NULL;
attr = PyObject_GetAttrString(mod, name);
Py_DECREF(mod);
return attr;
}
/* Helper used when the __next__ method is removed from a type: /* Helper used when the __next__ method is removed from a type:
tp_iternext is never NULL and can be safely called without checking tp_iternext is never NULL and can be safely called without checking
on every iteration. on every iteration.

View file

@ -964,9 +964,59 @@ rangeiter_len(rangeiterobject *r)
PyDoc_STRVAR(length_hint_doc, PyDoc_STRVAR(length_hint_doc,
"Private method returning an estimate of len(list(it))."); "Private method returning an estimate of len(list(it)).");
static PyObject *
rangeiter_reduce(rangeiterobject *r)
{
PyObject *start=NULL, *stop=NULL, *step=NULL;
PyObject *range;
/* create a range object for pickling */
start = PyLong_FromLong(r->start);
if (start == NULL)
goto err;
stop = PyLong_FromLong(r->start + r->len * r->step);
if (stop == NULL)
goto err;
step = PyLong_FromLong(r->step);
if (step == NULL)
goto err;
range = (PyObject*)make_range_object(&PyRange_Type,
start, stop, step);
if (range == NULL)
goto err;
/* return the result */
return Py_BuildValue("N(N)i", _PyObject_GetBuiltin("iter"), range, r->index);
err:
Py_XDECREF(start);
Py_XDECREF(stop);
Py_XDECREF(step);
return NULL;
}
static PyObject *
rangeiter_setstate(rangeiterobject *r, PyObject *state)
{
long index = PyLong_AsLong(state);
if (index == -1 && PyErr_Occurred())
return NULL;
if (index < 0 || index >= r->len) {
PyErr_SetString(PyExc_ValueError, "index out of range");
return NULL;
}
r->index = index;
Py_RETURN_NONE;
}
PyDoc_STRVAR(reduce_doc, "Return state information for pickling.");
PyDoc_STRVAR(setstate_doc, "Set state information for unpickling.");
static PyMethodDef rangeiter_methods[] = { static PyMethodDef rangeiter_methods[] = {
{"__length_hint__", (PyCFunction)rangeiter_len, METH_NOARGS, {"__length_hint__", (PyCFunction)rangeiter_len, METH_NOARGS,
length_hint_doc}, length_hint_doc},
{"__reduce__", (PyCFunction)rangeiter_reduce, METH_NOARGS,
reduce_doc},
{"__setstate__", (PyCFunction)rangeiter_setstate, METH_O,
setstate_doc},
{NULL, NULL} /* sentinel */ {NULL, NULL} /* sentinel */
}; };
@ -1095,9 +1145,51 @@ longrangeiter_len(longrangeiterobject *r, PyObject *no_args)
return PyNumber_Subtract(r->len, r->index); return PyNumber_Subtract(r->len, r->index);
} }
static PyObject *
longrangeiter_reduce(longrangeiterobject *r)
{
PyObject *product, *stop=NULL;
PyObject *range;
/* create a range object for pickling. Must calculate the "stop" value */
product = PyNumber_Multiply(r->len, r->step);
if (product == NULL)
return NULL;
stop = PyNumber_Add(r->start, product);
Py_DECREF(product);
if (stop == NULL)
return NULL;
Py_INCREF(r->start);
Py_INCREF(r->step);
range = (PyObject*)make_range_object(&PyRange_Type,
r->start, stop, r->step);
if (range == NULL) {
Py_DECREF(r->start);
Py_DECREF(stop);
Py_DECREF(r->step);
return NULL;
}
/* return the result */
return Py_BuildValue("N(N)O", _PyObject_GetBuiltin("iter"), range, r->index);
}
static PyObject *
longrangeiter_setstate(longrangeiterobject *r, PyObject *state)
{
Py_CLEAR(r->index);
r->index = state;
Py_INCREF(r->index);
Py_RETURN_NONE;
}
static PyMethodDef longrangeiter_methods[] = { static PyMethodDef longrangeiter_methods[] = {
{"__length_hint__", (PyCFunction)longrangeiter_len, METH_NOARGS, {"__length_hint__", (PyCFunction)longrangeiter_len, METH_NOARGS,
length_hint_doc}, length_hint_doc},
{"__reduce__", (PyCFunction)longrangeiter_reduce, METH_NOARGS,
reduce_doc},
{"__setstate__", (PyCFunction)longrangeiter_setstate, METH_O,
setstate_doc},
{NULL, NULL} /* sentinel */ {NULL, NULL} /* sentinel */
}; };

View file

@ -819,8 +819,51 @@ setiter_len(setiterobject *si)
PyDoc_STRVAR(length_hint_doc, "Private method returning an estimate of len(list(it))."); PyDoc_STRVAR(length_hint_doc, "Private method returning an estimate of len(list(it)).");
static PyObject *setiter_iternext(setiterobject *si);
static PyObject *
setiter_reduce(setiterobject *si)
{
PyObject *list;
setiterobject tmp;
list = PyList_New(0);
if (!list)
return NULL;
/* copy the itertor state */
tmp = *si;
Py_XINCREF(tmp.si_set);
/* iterate the temporary into a list */
for(;;) {
PyObject *element = setiter_iternext(&tmp);
if (element) {
if (PyList_Append(list, element)) {
Py_DECREF(element);
Py_DECREF(list);
Py_XDECREF(tmp.si_set);
return NULL;
}
Py_DECREF(element);
} else
break;
}
Py_XDECREF(tmp.si_set);
/* check for error */
if (tmp.si_set != NULL) {
/* we have an error */
Py_DECREF(list);
return NULL;
}
return Py_BuildValue("N(N)", _PyObject_GetBuiltin("iter"), list);
}
PyDoc_STRVAR(reduce_doc, "Return state information for pickling.");
static PyMethodDef setiter_methods[] = { static PyMethodDef setiter_methods[] = {
{"__length_hint__", (PyCFunction)setiter_len, METH_NOARGS, length_hint_doc}, {"__length_hint__", (PyCFunction)setiter_len, METH_NOARGS, length_hint_doc},
{"__reduce__", (PyCFunction)setiter_reduce, METH_NOARGS, reduce_doc},
{NULL, NULL} /* sentinel */ {NULL, NULL} /* sentinel */
}; };
@ -1964,8 +2007,6 @@ done:
return result; return result;
} }
PyDoc_STRVAR(reduce_doc, "Return state information for pickling.");
static PyObject * static PyObject *
set_sizeof(PySetObject *so) set_sizeof(PySetObject *so)
{ {

View file

@ -967,8 +967,39 @@ tupleiter_len(tupleiterobject *it)
PyDoc_STRVAR(length_hint_doc, "Private method returning an estimate of len(list(it))."); PyDoc_STRVAR(length_hint_doc, "Private method returning an estimate of len(list(it)).");
static PyObject *
tupleiter_reduce(tupleiterobject *it)
{
if (it->it_seq)
return Py_BuildValue("N(O)l", _PyObject_GetBuiltin("iter"),
it->it_seq, it->it_index);
else
return Py_BuildValue("N(())", _PyObject_GetBuiltin("iter"));
}
static PyObject *
tupleiter_setstate(tupleiterobject *it, PyObject *state)
{
long index = PyLong_AsLong(state);
if (index == -1 && PyErr_Occurred())
return NULL;
if (it->it_seq != NULL) {
if (index < 0)
index = 0;
else if (it->it_seq != NULL && index > PyTuple_GET_SIZE(it->it_seq))
index = PyTuple_GET_SIZE(it->it_seq);
it->it_index = index;
}
Py_RETURN_NONE;
}
PyDoc_STRVAR(reduce_doc, "Return state information for pickling.");
PyDoc_STRVAR(setstate_doc, "Set state information for unpickling.");
static PyMethodDef tupleiter_methods[] = { static PyMethodDef tupleiter_methods[] = {
{"__length_hint__", (PyCFunction)tupleiter_len, METH_NOARGS, length_hint_doc}, {"__length_hint__", (PyCFunction)tupleiter_len, METH_NOARGS, length_hint_doc},
{"__reduce__", (PyCFunction)tupleiter_reduce, METH_NOARGS, reduce_doc},
{"__setstate__", (PyCFunction)tupleiter_setstate, METH_O, setstate_doc},
{NULL, NULL} /* sentinel */ {NULL, NULL} /* sentinel */
}; };

View file

@ -5393,9 +5393,11 @@ PyUnicode_DecodeUTF16(const char *s,
#if (SIZEOF_LONG == 8) #if (SIZEOF_LONG == 8)
# define FAST_CHAR_MASK 0x8000800080008000L # define FAST_CHAR_MASK 0x8000800080008000L
# define SWAPPED_FAST_CHAR_MASK 0x0080008000800080L # define SWAPPED_FAST_CHAR_MASK 0x0080008000800080L
# define STRIPPED_MASK 0x00FF00FF00FF00FFL
#elif (SIZEOF_LONG == 4) #elif (SIZEOF_LONG == 4)
# define FAST_CHAR_MASK 0x80008000L # define FAST_CHAR_MASK 0x80008000L
# define SWAPPED_FAST_CHAR_MASK 0x00800080L # define SWAPPED_FAST_CHAR_MASK 0x00800080L
# define STRIPPED_MASK 0x00FF00FFL
#else #else
# error C 'long' size should be either 4 or 8! # error C 'long' size should be either 4 or 8!
#endif #endif
@ -5497,7 +5499,6 @@ PyUnicode_DecodeUTF16Stateful(const char *s,
void *data = PyUnicode_DATA(unicode); void *data = PyUnicode_DATA(unicode);
while (_q < aligned_end) { while (_q < aligned_end) {
unsigned long block = * (unsigned long *) _q; unsigned long block = * (unsigned long *) _q;
unsigned short *pblock = (unsigned short*)&block;
Py_UCS4 maxch; Py_UCS4 maxch;
if (native_ordering) { if (native_ordering) {
/* Can use buffer directly */ /* Can use buffer directly */
@ -5506,23 +5507,22 @@ PyUnicode_DecodeUTF16Stateful(const char *s,
} }
else { else {
/* Need to byte-swap */ /* Need to byte-swap */
unsigned char *_p = (unsigned char*)pblock;
if (block & SWAPPED_FAST_CHAR_MASK) if (block & SWAPPED_FAST_CHAR_MASK)
break; break;
_p[0] = _q[1]; block = ((block >> 8) & STRIPPED_MASK) |
_p[1] = _q[0]; ((block & STRIPPED_MASK) << 8);
_p[2] = _q[3];
_p[3] = _q[2];
#if (SIZEOF_LONG == 8)
_p[4] = _q[5];
_p[5] = _q[4];
_p[6] = _q[7];
_p[7] = _q[6];
#endif
} }
maxch = Py_MAX(pblock[0], pblock[1]); maxch = (Py_UCS2)(block & 0xFFFF);
#if SIZEOF_LONG == 8 #if SIZEOF_LONG == 8
maxch = Py_MAX(maxch, Py_MAX(pblock[2], pblock[3])); ch = (Py_UCS2)((block >> 16) & 0xFFFF);
maxch = Py_MAX(maxch, ch);
ch = (Py_UCS2)((block >> 32) & 0xFFFF);
maxch = Py_MAX(maxch, ch);
ch = (Py_UCS2)(block >> 48);
maxch = Py_MAX(maxch, ch);
#else
ch = (Py_UCS2)(block >> 16);
maxch = Py_MAX(maxch, ch);
#endif #endif
if (maxch > PyUnicode_MAX_CHAR_VALUE(unicode)) { if (maxch > PyUnicode_MAX_CHAR_VALUE(unicode)) {
if (unicode_widen(&unicode, maxch) < 0) if (unicode_widen(&unicode, maxch) < 0)
@ -5530,11 +5530,24 @@ PyUnicode_DecodeUTF16Stateful(const char *s,
kind = PyUnicode_KIND(unicode); kind = PyUnicode_KIND(unicode);
data = PyUnicode_DATA(unicode); data = PyUnicode_DATA(unicode);
} }
PyUnicode_WRITE(kind, data, outpos++, pblock[0]); #ifdef BYTEORDER_IS_LITTLE_ENDIAN
PyUnicode_WRITE(kind, data, outpos++, pblock[1]); PyUnicode_WRITE(kind, data, outpos++, (Py_UCS2)(block & 0xFFFF));
#if SIZEOF_LONG == 8 #if SIZEOF_LONG == 8
PyUnicode_WRITE(kind, data, outpos++, pblock[2]); PyUnicode_WRITE(kind, data, outpos++, (Py_UCS2)((block >> 16) & 0xFFFF));
PyUnicode_WRITE(kind, data, outpos++, pblock[3]); PyUnicode_WRITE(kind, data, outpos++, (Py_UCS2)((block >> 32) & 0xFFFF));
PyUnicode_WRITE(kind, data, outpos++, (Py_UCS2)((block >> 48)));
#else
PyUnicode_WRITE(kind, data, outpos++, (Py_UCS2)(block >> 16));
#endif
#else
#if SIZEOF_LONG == 8
PyUnicode_WRITE(kind, data, outpos++, (Py_UCS2)((block >> 48)));
PyUnicode_WRITE(kind, data, outpos++, (Py_UCS2)((block >> 32) & 0xFFFF));
PyUnicode_WRITE(kind, data, outpos++, (Py_UCS2)((block >> 16) & 0xFFFF));
#else
PyUnicode_WRITE(kind, data, outpos++, (Py_UCS2)(block >> 16));
#endif
PyUnicode_WRITE(kind, data, outpos++, (Py_UCS2)(block & 0xFFFF));
#endif #endif
_q += SIZEOF_LONG; _q += SIZEOF_LONG;
} }
@ -14382,9 +14395,43 @@ unicodeiter_len(unicodeiterobject *it)
PyDoc_STRVAR(length_hint_doc, "Private method returning an estimate of len(list(it))."); PyDoc_STRVAR(length_hint_doc, "Private method returning an estimate of len(list(it)).");
static PyObject *
unicodeiter_reduce(unicodeiterobject *it)
{
if (it->it_seq != NULL) {
return Py_BuildValue("N(O)n", _PyObject_GetBuiltin("iter"),
it->it_seq, it->it_index);
} else {
PyObject *u = PyUnicode_FromUnicode(NULL, 0);
if (u == NULL)
return NULL;
return Py_BuildValue("N(N)", _PyObject_GetBuiltin("iter"), u);
}
}
PyDoc_STRVAR(reduce_doc, "Return state information for pickling.");
static PyObject *
unicodeiter_setstate(unicodeiterobject *it, PyObject *state)
{
Py_ssize_t index = PyLong_AsSsize_t(state);
if (index == -1 && PyErr_Occurred())
return NULL;
if (index < 0)
index = 0;
it->it_index = index;
Py_RETURN_NONE;
}
PyDoc_STRVAR(setstate_doc, "Set state information for unpickling.");
static PyMethodDef unicodeiter_methods[] = { static PyMethodDef unicodeiter_methods[] = {
{"__length_hint__", (PyCFunction)unicodeiter_len, METH_NOARGS, {"__length_hint__", (PyCFunction)unicodeiter_len, METH_NOARGS,
length_hint_doc}, length_hint_doc},
{"__reduce__", (PyCFunction)unicodeiter_reduce, METH_NOARGS,
reduce_doc},
{"__setstate__", (PyCFunction)unicodeiter_setstate, METH_O,
setstate_doc},
{NULL, NULL} /* sentinel */ {NULL, NULL} /* sentinel */
}; };

View file

@ -438,6 +438,19 @@ filter_next(filterobject *lz)
} }
} }
static PyObject *
filter_reduce(filterobject *lz)
{
return Py_BuildValue("O(OO)", Py_TYPE(lz), lz->func, lz->it);
}
PyDoc_STRVAR(reduce_doc, "Return state information for pickling.");
static PyMethodDef filter_methods[] = {
{"__reduce__", (PyCFunction)filter_reduce, METH_NOARGS, reduce_doc},
{NULL, NULL} /* sentinel */
};
PyDoc_STRVAR(filter_doc, PyDoc_STRVAR(filter_doc,
"filter(function or None, iterable) --> filter object\n\ "filter(function or None, iterable) --> filter object\n\
\n\ \n\
@ -474,7 +487,7 @@ PyTypeObject PyFilter_Type = {
0, /* tp_weaklistoffset */ 0, /* tp_weaklistoffset */
PyObject_SelfIter, /* tp_iter */ PyObject_SelfIter, /* tp_iter */
(iternextfunc)filter_next, /* tp_iternext */ (iternextfunc)filter_next, /* tp_iternext */
0, /* tp_methods */ filter_methods, /* tp_methods */
0, /* tp_members */ 0, /* tp_members */
0, /* tp_getset */ 0, /* tp_getset */
0, /* tp_base */ 0, /* tp_base */
@ -1054,6 +1067,31 @@ map_next(mapobject *lz)
return result; return result;
} }
static PyObject *
map_reduce(mapobject *lz)
{
Py_ssize_t numargs = PyTuple_GET_SIZE(lz->iters);
PyObject *args = PyTuple_New(numargs+1);
Py_ssize_t i;
if (args == NULL)
return NULL;
Py_INCREF(lz->func);
PyTuple_SET_ITEM(args, 0, lz->func);
for (i = 0; i<numargs; i++){
PyObject *it = PyTuple_GET_ITEM(lz->iters, i);
Py_INCREF(it);
PyTuple_SET_ITEM(args, i+1, it);
}
return Py_BuildValue("ON", Py_TYPE(lz), args);
}
static PyMethodDef map_methods[] = {
{"__reduce__", (PyCFunction)map_reduce, METH_NOARGS, reduce_doc},
{NULL, NULL} /* sentinel */
};
PyDoc_STRVAR(map_doc, PyDoc_STRVAR(map_doc,
"map(func, *iterables) --> map object\n\ "map(func, *iterables) --> map object\n\
\n\ \n\
@ -1090,7 +1128,7 @@ PyTypeObject PyMap_Type = {
0, /* tp_weaklistoffset */ 0, /* tp_weaklistoffset */
PyObject_SelfIter, /* tp_iter */ PyObject_SelfIter, /* tp_iter */
(iternextfunc)map_next, /* tp_iternext */ (iternextfunc)map_next, /* tp_iternext */
0, /* tp_methods */ map_methods, /* tp_methods */
0, /* tp_members */ 0, /* tp_members */
0, /* tp_getset */ 0, /* tp_getset */
0, /* tp_base */ 0, /* tp_base */
@ -2238,6 +2276,18 @@ zip_next(zipobject *lz)
return result; return result;
} }
static PyObject *
zip_reduce(zipobject *lz)
{
/* Just recreate the zip with the internal iterator tuple */
return Py_BuildValue("OO", Py_TYPE(lz), lz->ittuple);
}
static PyMethodDef zip_methods[] = {
{"__reduce__", (PyCFunction)zip_reduce, METH_NOARGS, reduce_doc},
{NULL, NULL} /* sentinel */
};
PyDoc_STRVAR(zip_doc, PyDoc_STRVAR(zip_doc,
"zip(iter1 [,iter2 [...]]) --> zip object\n\ "zip(iter1 [,iter2 [...]]) --> zip object\n\
\n\ \n\
@ -2276,7 +2326,7 @@ PyTypeObject PyZip_Type = {
0, /* tp_weaklistoffset */ 0, /* tp_weaklistoffset */
PyObject_SelfIter, /* tp_iter */ PyObject_SelfIter, /* tp_iter */
(iternextfunc)zip_next, /* tp_iternext */ (iternextfunc)zip_next, /* tp_iternext */
0, /* tp_methods */ zip_methods, /* tp_methods */
0, /* tp_members */ 0, /* tp_members */
0, /* tp_getset */ 0, /* tp_getset */
0, /* tp_base */ 0, /* tp_base */

View file

@ -1356,56 +1356,67 @@ parse_syntax_error(PyObject *err, PyObject **message, const char **filename,
_Py_IDENTIFIER(offset); _Py_IDENTIFIER(offset);
_Py_IDENTIFIER(text); _Py_IDENTIFIER(text);
*message = NULL;
/* new style errors. `err' is an instance */ /* new style errors. `err' is an instance */
*message = _PyObject_GetAttrId(err, &PyId_msg);
if (! (v = _PyObject_GetAttrId(err, &PyId_msg))) if (!*message)
goto finally; goto finally;
*message = v;
if (!(v = _PyObject_GetAttrId(err, &PyId_filename))) v = _PyObject_GetAttrId(err, &PyId_filename);
if (!v)
goto finally; goto finally;
if (v == Py_None) if (v == Py_None) {
Py_DECREF(v);
*filename = NULL; *filename = NULL;
else if (! (*filename = _PyUnicode_AsString(v))) }
goto finally; else {
*filename = _PyUnicode_AsString(v);
Py_DECREF(v);
if (!*filename)
goto finally;
}
Py_DECREF(v); v = _PyObject_GetAttrId(err, &PyId_lineno);
if (!(v = _PyObject_GetAttrId(err, &PyId_lineno))) if (!v)
goto finally; goto finally;
hold = PyLong_AsLong(v); hold = PyLong_AsLong(v);
Py_DECREF(v); Py_DECREF(v);
v = NULL;
if (hold < 0 && PyErr_Occurred()) if (hold < 0 && PyErr_Occurred())
goto finally; goto finally;
*lineno = (int)hold; *lineno = (int)hold;
if (!(v = _PyObject_GetAttrId(err, &PyId_offset))) v = _PyObject_GetAttrId(err, &PyId_offset);
if (!v)
goto finally; goto finally;
if (v == Py_None) { if (v == Py_None) {
*offset = -1; *offset = -1;
Py_DECREF(v); Py_DECREF(v);
v = NULL;
} else { } else {
hold = PyLong_AsLong(v); hold = PyLong_AsLong(v);
Py_DECREF(v); Py_DECREF(v);
v = NULL;
if (hold < 0 && PyErr_Occurred()) if (hold < 0 && PyErr_Occurred())
goto finally; goto finally;
*offset = (int)hold; *offset = (int)hold;
} }
if (!(v = _PyObject_GetAttrId(err, &PyId_text))) v = _PyObject_GetAttrId(err, &PyId_text);
if (!v)
goto finally; goto finally;
if (v == Py_None) if (v == Py_None) {
Py_DECREF(v);
*text = NULL; *text = NULL;
else if (!PyUnicode_Check(v) || }
!(*text = _PyUnicode_AsString(v))) else {
goto finally; *text = _PyUnicode_AsString(v);
Py_DECREF(v); Py_DECREF(v);
if (!*text)
goto finally;
}
return 1; return 1;
finally: finally:
Py_XDECREF(v); Py_XDECREF(*message);
return 0; return 0;
} }

View file

@ -3,34 +3,6 @@
# Usage: abitype.py < old_code > new_code # Usage: abitype.py < old_code > new_code
import re, sys import re, sys
############ Simplistic C scanner ##################################
tokenizer = re.compile(
r"(?P<preproc>#.*\n)"
r"|(?P<comment>/\*.*?\*/)"
r"|(?P<ident>[a-zA-Z_][a-zA-Z0-9_]*)"
r"|(?P<ws>[ \t\n]+)"
r"|(?P<other>.)",
re.MULTILINE)
tokens = []
source = sys.stdin.read()
pos = 0
while pos != len(source):
m = tokenizer.match(source, pos)
tokens.append([m.lastgroup, m.group()])
pos += len(tokens[-1][1])
if tokens[-1][0] == 'preproc':
# continuation lines are considered
# only in preprocess statements
while tokens[-1][1].endswith('\\\n'):
nl = source.find('\n', pos)
if nl == -1:
line = source[pos:]
else:
line = source[pos:nl+1]
tokens[-1][1] += line
pos += len(line)
###### Replacement of PyTypeObject static instances ############## ###### Replacement of PyTypeObject static instances ##############
# classify each token, giving it a one-letter code: # classify each token, giving it a one-letter code:
@ -79,7 +51,7 @@ def get_fields(start, real_end):
while tokens[pos][0] in ('ws', 'comment'): while tokens[pos][0] in ('ws', 'comment'):
pos += 1 pos += 1
if tokens[pos][1] != 'PyVarObject_HEAD_INIT': if tokens[pos][1] != 'PyVarObject_HEAD_INIT':
raise Exception, '%s has no PyVarObject_HEAD_INIT' % name raise Exception('%s has no PyVarObject_HEAD_INIT' % name)
while tokens[pos][1] != ')': while tokens[pos][1] != ')':
pos += 1 pos += 1
pos += 1 pos += 1
@ -183,18 +155,48 @@ def make_slots(name, fields):
return '\n'.join(res) return '\n'.join(res)
# Main loop: replace all static PyTypeObjects until if __name__ == '__main__':
# there are none left.
while 1:
c = classify()
m = re.search('(SW)?TWIW?=W?{.*?};', c)
if not m:
break
start = m.start()
end = m.end()
name, fields = get_fields(start, m)
tokens[start:end] = [('',make_slots(name, fields))]
# Output result to stdout ############ Simplistic C scanner ##################################
for t, v in tokens: tokenizer = re.compile(
sys.stdout.write(v) r"(?P<preproc>#.*\n)"
r"|(?P<comment>/\*.*?\*/)"
r"|(?P<ident>[a-zA-Z_][a-zA-Z0-9_]*)"
r"|(?P<ws>[ \t\n]+)"
r"|(?P<other>.)",
re.MULTILINE)
tokens = []
source = sys.stdin.read()
pos = 0
while pos != len(source):
m = tokenizer.match(source, pos)
tokens.append([m.lastgroup, m.group()])
pos += len(tokens[-1][1])
if tokens[-1][0] == 'preproc':
# continuation lines are considered
# only in preprocess statements
while tokens[-1][1].endswith('\\\n'):
nl = source.find('\n', pos)
if nl == -1:
line = source[pos:]
else:
line = source[pos:nl+1]
tokens[-1][1] += line
pos += len(line)
# Main loop: replace all static PyTypeObjects until
# there are none left.
while 1:
c = classify()
m = re.search('(SW)?TWIW?=W?{.*?};', c)
if not m:
break
start = m.start()
end = m.end()
name, fields = get_fields(start, m)
tokens[start:end] = [('',make_slots(name, fields))]
# Output result to stdout
for t, v in tokens:
sys.stdout.write(v)

View file

@ -106,14 +106,16 @@ def check_limit(n, test_func_name):
else: else:
print("Yikes!") print("Yikes!")
limit = 1000 if __name__ == '__main__':
while 1:
check_limit(limit, "test_recurse") limit = 1000
check_limit(limit, "test_add") while 1:
check_limit(limit, "test_repr") check_limit(limit, "test_recurse")
check_limit(limit, "test_init") check_limit(limit, "test_add")
check_limit(limit, "test_getattr") check_limit(limit, "test_repr")
check_limit(limit, "test_getitem") check_limit(limit, "test_init")
check_limit(limit, "test_cpickle") check_limit(limit, "test_getattr")
print("Limit of %d is fine" % limit) check_limit(limit, "test_getitem")
limit = limit + 100 check_limit(limit, "test_cpickle")
print("Limit of %d is fine" % limit)
limit = limit + 100

View file

@ -76,29 +76,31 @@ usage = """Usage: %s [-cd] paths...
-c: recognize Python source files trying to compile them -c: recognize Python source files trying to compile them
-d: debug output""" % sys.argv[0] -d: debug output""" % sys.argv[0]
try: if __name__ == '__main__':
opts, args = getopt.getopt(sys.argv[1:], 'cd')
except getopt.error as msg:
print(msg, file=sys.stderr)
print(usage, file=sys.stderr)
sys.exit(1)
is_python = pysource.looks_like_python try:
debug = False opts, args = getopt.getopt(sys.argv[1:], 'cd')
except getopt.error as msg:
print(msg, file=sys.stderr)
print(usage, file=sys.stderr)
sys.exit(1)
for o, a in opts: is_python = pysource.looks_like_python
if o == '-c': debug = False
is_python = pysource.can_be_compiled
elif o == '-d':
debug = True
if not args: for o, a in opts:
print(usage, file=sys.stderr) if o == '-c':
sys.exit(1) is_python = pysource.can_be_compiled
elif o == '-d':
debug = True
for fullpath in pysource.walk_python_files(args, is_python): if not args:
if debug: print(usage, file=sys.stderr)
print("Testing for coding: %s" % fullpath) sys.exit(1)
result = needs_declaration(fullpath)
if result: for fullpath in pysource.walk_python_files(args, is_python):
print(fullpath) if debug:
print("Testing for coding: %s" % fullpath)
result = needs_declaration(fullpath)
if result:
print(fullpath)

View file

@ -292,7 +292,7 @@ def addsubst(substfile):
if not words: continue if not words: continue
if len(words) == 3 and words[0] == 'struct': if len(words) == 3 and words[0] == 'struct':
words[:2] = [words[0] + ' ' + words[1]] words[:2] = [words[0] + ' ' + words[1]]
elif len(words) <> 2: elif len(words) != 2:
err(substfile + '%s:%r: warning: bad line: %r' % (substfile, lineno, line)) err(substfile + '%s:%r: warning: bad line: %r' % (substfile, lineno, line))
continue continue
if Reverse: if Reverse:

View file

@ -20,7 +20,7 @@ file ... : files to sum; '-' or no files means stdin
import sys import sys
import os import os
import getopt import getopt
import md5 from hashlib import md5
def sum(*files): def sum(*files):
sts = 0 sts = 0

View file

@ -13,7 +13,6 @@
""" """
import re,sys import re,sys
import TextTools
entityRE = re.compile('<!ENTITY +(\w+) +CDATA +"([^"]+)" +-- +((?:.|\n)+?) *-->') entityRE = re.compile('<!ENTITY +(\w+) +CDATA +"([^"]+)" +-- +((?:.|\n)+?) *-->')
@ -45,7 +44,7 @@ def writefile(f,defs):
charcode = repr(charcode) charcode = repr(charcode)
else: else:
charcode = repr(charcode) charcode = repr(charcode)
comment = TextTools.collapse(comment) comment = ' '.join(comment.split())
f.write(" '%s':\t%s, \t# %s\n" % (name,charcode,comment)) f.write(" '%s':\t%s, \t# %s\n" % (name,charcode,comment))
f.write('\n}\n') f.write('\n}\n')

View file

@ -76,10 +76,9 @@ def process(filename, table):
nextline = fp.readline() nextline = fp.readline()
if not nextline: break if not nextline: break
line = line[:-1] + nextline line = line[:-1] + nextline
if m_import.match(line) >= 0: m_found = m_import.match(line) or m_from.match(line)
(a, b), (a1, b1) = m_import.regs[:2] if m_found:
elif m_from.match(line) >= 0: (a, b), (a1, b1) = m_found.regs[:2]
(a, b), (a1, b1) = m_from.regs[:2]
else: continue else: continue
words = line[a1:b1].split(',') words = line[a1:b1].split(',')
# print '#', line, words # print '#', line, words
@ -87,6 +86,7 @@ def process(filename, table):
word = word.strip() word = word.strip()
if word not in list: if word not in list:
list.append(word) list.append(word)
fp.close()
# Compute closure (this is in fact totally general) # Compute closure (this is in fact totally general)
@ -123,7 +123,7 @@ def closure(table):
def inverse(table): def inverse(table):
inv = {} inv = {}
for key in table.keys(): for key in table.keys():
if not inv.has_key(key): if key not in inv:
inv[key] = [] inv[key] = []
for item in table[key]: for item in table[key]:
store(inv, item, key) store(inv, item, key)