Merge branch 'main' into gh-60055

This commit is contained in:
Łukasz Langa 2023-04-25 02:35:15 +02:00 committed by GitHub
commit d89b199fff
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
113 changed files with 2163 additions and 962 deletions

View file

@ -9,8 +9,8 @@ ENV WASMTIME_HOME=/opt/wasmtime
ENV WASMTIME_VERSION=7.0.0
ENV WASMTIME_CPU_ARCH=x86_64
RUN dnf -y --nodocs install git clang xz python3-blurb dnf-plugins-core && \
dnf -y --nodocs builddep python3 && \
RUN dnf -y --nodocs --setopt=install_weak_deps=False install /usr/bin/{blurb,clang,curl,git,ln,tar,xz} 'dnf-command(builddep)' && \
dnf -y --nodocs --setopt=install_weak_deps=False builddep python3 && \
dnf -y clean all
RUN mkdir ${WASI_SDK_PATH} && \

View file

@ -6,7 +6,7 @@ on:
jobs:
label:
name: DO-NOT-MERGE
name: DO-NOT-MERGE / unresolved review
runs-on: ubuntu-latest
timeout-minutes: 10
@ -15,4 +15,4 @@ jobs:
with:
mode: exactly
count: 0
labels: "DO-NOT-MERGE"
labels: "DO-NOT-MERGE, awaiting changes, awaiting change review"

View file

@ -232,6 +232,15 @@ Type Objects
.. versionadded:: 3.11
.. c:function:: int PyUnstable_Type_AssignVersionTag(PyTypeObject *type)
Attempt to assign a version tag to the given type.
Returns 1 if the type already had a valid version tag or a new one was
assigned, or 0 if a new tag could not be assigned.
.. versionadded:: 3.12
Creating Heap-Allocated Types
.............................

View file

@ -76,6 +76,13 @@ venvdir = os.getenv('VENVDIR')
if venvdir is not None:
exclude_patterns.append(venvdir + '/*')
nitpick_ignore = [
# Do not error nit-picky mode builds when _SubParsersAction.add_parser cannot
# be resolved, as the method is currently undocumented. For context, see
# https://github.com/python/cpython/pull/103289.
('py:meth', '_SubParsersAction.add_parser'),
]
# Disable Docutils smartquotes for several translations
smartquotes_excludes = {
'languages': ['ja', 'fr', 'zh_TW', 'zh_CN'], 'builders': ['man', 'text'],

View file

@ -337,7 +337,7 @@ Here is an example::
}
PyErr_Format(PyExc_AttributeError,
"'%.50s' object has no attribute '%.400s'",
"'%.100s' object has no attribute '%.400s'",
tp->tp_name, name);
return NULL;
}

View file

@ -1,10 +1,12 @@
.. _argparse-tutorial:
*****************
Argparse Tutorial
*****************
:author: Tshepang Mbambo
.. _argparse-tutorial:
.. currentmodule:: argparse
This tutorial is intended to be a gentle introduction to :mod:`argparse`, the
recommended command-line parsing module in the Python standard library.
@ -12,7 +14,7 @@ recommended command-line parsing module in the Python standard library.
.. note::
There are two other modules that fulfill the same task, namely
:mod:`getopt` (an equivalent for :c:func:`getopt` from the C
:mod:`getopt` (an equivalent for ``getopt()`` from the C
language) and the deprecated :mod:`optparse`.
Note also that :mod:`argparse` is based on :mod:`optparse`,
and therefore very similar in terms of usage.
@ -137,13 +139,13 @@ And running the code:
Here is what's happening:
* We've added the :meth:`add_argument` method, which is what we use to specify
* We've added the :meth:`~ArgumentParser.add_argument` method, which is what we use to specify
which command-line options the program is willing to accept. In this case,
I've named it ``echo`` so that it's in line with its function.
* Calling our program now requires us to specify an option.
* The :meth:`parse_args` method actually returns some data from the
* The :meth:`~ArgumentParser.parse_args` method actually returns some data from the
options specified, in this case, ``echo``.
* The variable is some form of 'magic' that :mod:`argparse` performs for free
@ -256,7 +258,7 @@ Here is what is happening:
* To show that the option is actually optional, there is no error when running
the program without it. Note that by default, if an optional argument isn't
used, the relevant variable, in this case :attr:`args.verbosity`, is
used, the relevant variable, in this case ``args.verbosity``, is
given ``None`` as a value, which is the reason it fails the truth
test of the :keyword:`if` statement.
@ -299,7 +301,7 @@ Here is what is happening:
We even changed the name of the option to match that idea.
Note that we now specify a new keyword, ``action``, and give it the value
``"store_true"``. This means that, if the option is specified,
assign the value ``True`` to :data:`args.verbose`.
assign the value ``True`` to ``args.verbose``.
Not specifying it implies ``False``.
* It complains when you specify a value, in true spirit of what flags
@ -698,7 +700,7 @@ Conflicting options
So far, we have been working with two methods of an
:class:`argparse.ArgumentParser` instance. Let's introduce a third one,
:meth:`add_mutually_exclusive_group`. It allows for us to specify options that
:meth:`~ArgumentParser.add_mutually_exclusive_group`. It allows for us to specify options that
conflict with each other. Let's also change the rest of the program so that
the new functionality makes more sense:
we'll introduce the ``--quiet`` option,

View file

@ -585,7 +585,7 @@ arguments will never be treated as file references.
.. versionchanged:: 3.12
:class:`ArgumentParser` changed encoding and errors to read arguments files
from default (e.g. :func:`locale.getpreferredencoding(False)` and
from default (e.g. :func:`locale.getpreferredencoding(False) <locale.getpreferredencoding>` and
``"strict"``) to :term:`filesystem encoding and error handler`.
Arguments file should be encoded in UTF-8 instead of ANSI Codepage on Windows.
@ -1191,7 +1191,7 @@ done downstream after the arguments are parsed.
For example, JSON or YAML conversions have complex error cases that require
better reporting than can be given by the ``type`` keyword. A
:exc:`~json.JSONDecodeError` would not be well formatted and a
:exc:`FileNotFound` exception would not be handled at all.
:exc:`FileNotFoundError` exception would not be handled at all.
Even :class:`~argparse.FileType` has its limitations for use with the ``type``
keyword. If one argument uses *FileType* and then a subsequent argument fails,
@ -1445,7 +1445,7 @@ Action classes
Action classes implement the Action API, a callable which returns a callable
which processes arguments from the command-line. Any object which follows
this API may be passed as the ``action`` parameter to
:meth:`add_argument`.
:meth:`~ArgumentParser.add_argument`.
.. class:: Action(option_strings, dest, nargs=None, const=None, default=None, \
type=None, choices=None, required=False, help=None, \
@ -1723,7 +1723,7 @@ Sub-commands
:class:`ArgumentParser` supports the creation of such sub-commands with the
:meth:`add_subparsers` method. The :meth:`add_subparsers` method is normally
called with no arguments and returns a special action object. This object
has a single method, :meth:`~ArgumentParser.add_parser`, which takes a
has a single method, :meth:`~_SubParsersAction.add_parser`, which takes a
command name and any :class:`ArgumentParser` constructor arguments, and
returns an :class:`ArgumentParser` object that can be modified as usual.
@ -1789,7 +1789,7 @@ Sub-commands
for that particular parser will be printed. The help message will not
include parent parser or sibling parser messages. (A help message for each
subparser command, however, can be given by supplying the ``help=`` argument
to :meth:`add_parser` as above.)
to :meth:`~_SubParsersAction.add_parser` as above.)
::
@ -2157,7 +2157,7 @@ the populated namespace and the list of remaining argument strings.
.. warning::
:ref:`Prefix matching <prefix-matching>` rules apply to
:meth:`parse_known_args`. The parser may consume an option even if it's just
:meth:`~ArgumentParser.parse_known_args`. The parser may consume an option even if it's just
a prefix of one of its known options, instead of leaving it in the remaining
arguments list.
@ -2295,3 +2295,17 @@ A partial upgrade path from :mod:`optparse` to :mod:`argparse`:
* Replace the OptionParser constructor ``version`` argument with a call to
``parser.add_argument('--version', action='version', version='<the version>')``.
Exceptions
----------
.. exception:: ArgumentError
An error from creating or using an argument (optional or positional).
The string value of this exception is the message, augmented with
information about the argument that caused it.
.. exception:: ArgumentTypeError
Raised when something goes wrong converting a command line string to a type.

View file

@ -304,8 +304,15 @@ Functions and classes provided:
This context manager is :ref:`reentrant <reentrant-cms>`.
If the code within the :keyword:`!with` block raises an
:exc:`ExceptionGroup`, suppressed exceptions are removed from the
group. If any exceptions in the group are not suppressed, a group containing them is re-raised.
.. versionadded:: 3.4
.. versionchanged:: 3.12
``suppress`` now supports suppressing exceptions raised as
part of an :exc:`ExceptionGroup`.
.. function:: redirect_stdout(new_target)

View file

@ -28,8 +28,8 @@ Such constructors may be factory functions or class instances.
.. function:: pickle(type, function, constructor_ob=None)
Declares that *function* should be used as a "reduction" function for objects
of type *type*. *function* should return either a string or a tuple
containing two or three elements. See the :attr:`~pickle.Pickler.dispatch_table`
of type *type*. *function* must return either a string or a tuple
containing two or five elements. See the :attr:`~pickle.Pickler.dispatch_table`
for more details on the interface of *function*.
The *constructor_ob* parameter is a legacy feature and is now ignored, but if

View file

@ -12,8 +12,8 @@
--------------
This module provides a decorator and functions for automatically
adding generated :term:`special method`\s such as :meth:`__init__` and
:meth:`__repr__` to user-defined classes. It was originally described
adding generated :term:`special method`\s such as :meth:`~object.__init__` and
:meth:`~object.__repr__` to user-defined classes. It was originally described
in :pep:`557`.
The member variables to use in these generated methods are defined
@ -31,7 +31,7 @@ using :pep:`526` type annotations. For example, this code::
def total_cost(self) -> float:
return self.unit_price * self.quantity_on_hand
will add, among other things, a :meth:`__init__` that looks like::
will add, among other things, a :meth:`~object.__init__` that looks like::
def __init__(self, name: str, unit_price: float, quantity_on_hand: int = 0):
self.name = name
@ -86,86 +86,86 @@ Module contents
The parameters to :func:`dataclass` are:
- ``init``: If true (the default), a :meth:`__init__` method will be
- ``init``: If true (the default), a :meth:`~object.__init__` method will be
generated.
If the class already defines :meth:`__init__`, this parameter is
If the class already defines :meth:`~object.__init__`, this parameter is
ignored.
- ``repr``: If true (the default), a :meth:`__repr__` method will be
- ``repr``: If true (the default), a :meth:`~object.__repr__` method will be
generated. The generated repr string will have the class name and
the name and repr of each field, in the order they are defined in
the class. Fields that are marked as being excluded from the repr
are not included. For example:
``InventoryItem(name='widget', unit_price=3.0, quantity_on_hand=10)``.
If the class already defines :meth:`__repr__`, this parameter is
If the class already defines :meth:`~object.__repr__`, this parameter is
ignored.
- ``eq``: If true (the default), an :meth:`__eq__` method will be
- ``eq``: If true (the default), an :meth:`~object.__eq__` method will be
generated. This method compares the class as if it were a tuple
of its fields, in order. Both instances in the comparison must
be of the identical type.
If the class already defines :meth:`__eq__`, this parameter is
If the class already defines :meth:`~object.__eq__`, this parameter is
ignored.
- ``order``: If true (the default is ``False``), :meth:`__lt__`,
:meth:`__le__`, :meth:`__gt__`, and :meth:`__ge__` methods will be
- ``order``: If true (the default is ``False``), :meth:`~object.__lt__`,
:meth:`~object.__le__`, :meth:`~object.__gt__`, and :meth:`~object.__ge__` methods will be
generated. These compare the class as if it were a tuple of its
fields, in order. Both instances in the comparison must be of the
identical type. If ``order`` is true and ``eq`` is false, a
:exc:`ValueError` is raised.
If the class already defines any of :meth:`__lt__`,
:meth:`__le__`, :meth:`__gt__`, or :meth:`__ge__`, then
If the class already defines any of :meth:`~object.__lt__`,
:meth:`~object.__le__`, :meth:`~object.__gt__`, or :meth:`~object.__ge__`, then
:exc:`TypeError` is raised.
- ``unsafe_hash``: If ``False`` (the default), a :meth:`__hash__` method
- ``unsafe_hash``: If ``False`` (the default), a :meth:`~object.__hash__` method
is generated according to how ``eq`` and ``frozen`` are set.
:meth:`__hash__` is used by built-in :meth:`hash()`, and when objects are
:meth:`~object.__hash__` is used by built-in :meth:`hash()`, and when objects are
added to hashed collections such as dictionaries and sets. Having a
:meth:`__hash__` implies that instances of the class are immutable.
:meth:`~object.__hash__` implies that instances of the class are immutable.
Mutability is a complicated property that depends on the programmer's
intent, the existence and behavior of :meth:`__eq__`, and the values of
intent, the existence and behavior of :meth:`~object.__eq__`, and the values of
the ``eq`` and ``frozen`` flags in the :func:`dataclass` decorator.
By default, :func:`dataclass` will not implicitly add a :meth:`__hash__`
By default, :func:`dataclass` will not implicitly add a :meth:`~object.__hash__`
method unless it is safe to do so. Neither will it add or change an
existing explicitly defined :meth:`__hash__` method. Setting the class
existing explicitly defined :meth:`~object.__hash__` method. Setting the class
attribute ``__hash__ = None`` has a specific meaning to Python, as
described in the :meth:`__hash__` documentation.
described in the :meth:`~object.__hash__` documentation.
If :meth:`__hash__` is not explicitly defined, or if it is set to ``None``,
then :func:`dataclass` *may* add an implicit :meth:`__hash__` method.
If :meth:`~object.__hash__` is not explicitly defined, or if it is set to ``None``,
then :func:`dataclass` *may* add an implicit :meth:`~object.__hash__` method.
Although not recommended, you can force :func:`dataclass` to create a
:meth:`__hash__` method with ``unsafe_hash=True``. This might be the case
:meth:`~object.__hash__` method with ``unsafe_hash=True``. This might be the case
if your class is logically immutable but can nonetheless be mutated.
This is a specialized use case and should be considered carefully.
Here are the rules governing implicit creation of a :meth:`__hash__`
method. Note that you cannot both have an explicit :meth:`__hash__`
Here are the rules governing implicit creation of a :meth:`~object.__hash__`
method. Note that you cannot both have an explicit :meth:`~object.__hash__`
method in your dataclass and set ``unsafe_hash=True``; this will result
in a :exc:`TypeError`.
If ``eq`` and ``frozen`` are both true, by default :func:`dataclass` will
generate a :meth:`__hash__` method for you. If ``eq`` is true and
``frozen`` is false, :meth:`__hash__` will be set to ``None``, marking it
generate a :meth:`~object.__hash__` method for you. If ``eq`` is true and
``frozen`` is false, :meth:`~object.__hash__` will be set to ``None``, marking it
unhashable (which it is, since it is mutable). If ``eq`` is false,
:meth:`__hash__` will be left untouched meaning the :meth:`__hash__`
:meth:`~object.__hash__` will be left untouched meaning the :meth:`~object.__hash__`
method of the superclass will be used (if the superclass is
:class:`object`, this means it will fall back to id-based hashing).
- ``frozen``: If true (the default is ``False``), assigning to fields will
generate an exception. This emulates read-only frozen instances. If
:meth:`__setattr__` or :meth:`__delattr__` is defined in the class, then
:meth:`~object.__setattr__` or :meth:`~object.__delattr__` is defined in the class, then
:exc:`TypeError` is raised. See the discussion below.
- ``match_args``: If true (the default is ``True``), the
``__match_args__`` tuple will be created from the list of
parameters to the generated :meth:`__init__` method (even if
:meth:`__init__` is not generated, see above). If false, or if
parameters to the generated :meth:`~object.__init__` method (even if
:meth:`~object.__init__` is not generated, see above). If false, or if
``__match_args__`` is already defined in the class, then
``__match_args__`` will not be generated.
@ -173,18 +173,18 @@ Module contents
- ``kw_only``: If true (the default value is ``False``), then all
fields will be marked as keyword-only. If a field is marked as
keyword-only, then the only effect is that the :meth:`__init__`
keyword-only, then the only effect is that the :meth:`~object.__init__`
parameter generated from a keyword-only field must be specified
with a keyword when :meth:`__init__` is called. There is no
with a keyword when :meth:`~object.__init__` is called. There is no
effect on any other aspect of dataclasses. See the
:term:`parameter` glossary entry for details. Also see the
:const:`KW_ONLY` section.
.. versionadded:: 3.10
- ``slots``: If true (the default is ``False``), :attr:`__slots__` attribute
- ``slots``: If true (the default is ``False``), :attr:`~object.__slots__` attribute
will be generated and new class will be returned instead of the original one.
If :attr:`__slots__` is already defined in the class, then :exc:`TypeError`
If :attr:`~object.__slots__` is already defined in the class, then :exc:`TypeError`
is raised.
.. versionadded:: 3.10
@ -215,7 +215,7 @@ Module contents
b: int = 0 # assign a default value for 'b'
In this example, both ``a`` and ``b`` will be included in the added
:meth:`__init__` method, which will be defined as::
:meth:`~object.__init__` method, which will be defined as::
def __init__(self, a: int, b: int = 0):
@ -256,13 +256,13 @@ Module contents
error to specify both ``default`` and ``default_factory``.
- ``init``: If true (the default), this field is included as a
parameter to the generated :meth:`__init__` method.
parameter to the generated :meth:`~object.__init__` method.
- ``repr``: If true (the default), this field is included in the
string returned by the generated :meth:`__repr__` method.
string returned by the generated :meth:`~object.__repr__` method.
- ``hash``: This can be a bool or ``None``. If true, this field is
included in the generated :meth:`__hash__` method. If ``None`` (the
included in the generated :meth:`~object.__hash__` method. If ``None`` (the
default), use the value of ``compare``: this would normally be
the expected behavior. A field should be considered in the hash
if it's used for comparisons. Setting this value to anything
@ -275,8 +275,8 @@ Module contents
is excluded from the hash, it will still be used for comparisons.
- ``compare``: If true (the default), this field is included in the
generated equality and comparison methods (:meth:`__eq__`,
:meth:`__gt__`, et al.).
generated equality and comparison methods (:meth:`~object.__eq__`,
:meth:`~object.__gt__`, et al.).
- ``metadata``: This can be a mapping or None. None is treated as
an empty dict. This value is wrapped in
@ -287,7 +287,7 @@ Module contents
namespace in the metadata.
- ``kw_only``: If true, this field will be marked as keyword-only.
This is used when the generated :meth:`__init__` method's
This is used when the generated :meth:`~object.__init__` method's
parameters are computed.
.. versionadded:: 3.10
@ -435,13 +435,13 @@ Module contents
Class, raises :exc:`TypeError`. If values in ``changes`` do not
specify fields, raises :exc:`TypeError`.
The newly returned object is created by calling the :meth:`__init__`
The newly returned object is created by calling the :meth:`~object.__init__`
method of the dataclass. This ensures that
:meth:`__post_init__`, if present, is also called.
:ref:`__post_init__ <post-init-processing>`, if present, is also called.
Init-only variables without default values, if any exist, must be
specified on the call to :func:`replace` so that they can be passed to
:meth:`__init__` and :meth:`__post_init__`.
:meth:`~object.__init__` and :ref:`__post_init__ <post-init-processing>`.
It is an error for ``changes`` to contain any fields that are
defined as having ``init=False``. A :exc:`ValueError` will be raised
@ -449,7 +449,7 @@ Module contents
Be forewarned about how ``init=False`` fields work during a call to
:func:`replace`. They are not copied from the source object, but
rather are initialized in :meth:`__post_init__`, if they're
rather are initialized in :ref:`__post_init__ <post-init-processing>`, if they're
initialized at all. It is expected that ``init=False`` fields will
be rarely and judiciously used. If they are used, it might be wise
to have alternate class constructors, or perhaps a custom
@ -480,7 +480,7 @@ Module contents
:const:`KW_ONLY` is otherwise completely ignored. This includes the
name of such a field. By convention, a name of ``_`` is used for a
:const:`KW_ONLY` field. Keyword-only fields signify
:meth:`__init__` parameters that must be specified as keywords when
:meth:`~object.__init__` parameters that must be specified as keywords when
the class is instantiated.
In this example, the fields ``y`` and ``z`` will be marked as keyword-only fields::
@ -501,20 +501,22 @@ Module contents
.. exception:: FrozenInstanceError
Raised when an implicitly defined :meth:`__setattr__` or
:meth:`__delattr__` is called on a dataclass which was defined with
Raised when an implicitly defined :meth:`~object.__setattr__` or
:meth:`~object.__delattr__` is called on a dataclass which was defined with
``frozen=True``. It is a subclass of :exc:`AttributeError`.
.. _post-init-processing:
Post-init processing
--------------------
The generated :meth:`__init__` code will call a method named
:meth:`__post_init__`, if :meth:`__post_init__` is defined on the
The generated :meth:`~object.__init__` code will call a method named
:meth:`!__post_init__`, if :meth:`!__post_init__` is defined on the
class. It will normally be called as ``self.__post_init__()``.
However, if any ``InitVar`` fields are defined, they will also be
passed to :meth:`__post_init__` in the order they were defined in the
class. If no :meth:`__init__` method is generated, then
:meth:`__post_init__` will not automatically be called.
passed to :meth:`!__post_init__` in the order they were defined in the
class. If no :meth:`~object.__init__` method is generated, then
:meth:`!__post_init__` will not automatically be called.
Among other uses, this allows for initializing field values that
depend on one or more other fields. For example::
@ -528,10 +530,10 @@ depend on one or more other fields. For example::
def __post_init__(self):
self.c = self.a + self.b
The :meth:`__init__` method generated by :func:`dataclass` does not call base
class :meth:`__init__` methods. If the base class has an :meth:`__init__` method
The :meth:`~object.__init__` method generated by :func:`dataclass` does not call base
class :meth:`~object.__init__` methods. If the base class has an :meth:`~object.__init__` method
that has to be called, it is common to call this method in a
:meth:`__post_init__` method::
:meth:`!__post_init__` method::
@dataclass
class Rectangle:
@ -545,12 +547,12 @@ that has to be called, it is common to call this method in a
def __post_init__(self):
super().__init__(self.side, self.side)
Note, however, that in general the dataclass-generated :meth:`__init__` methods
Note, however, that in general the dataclass-generated :meth:`~object.__init__` methods
don't need to be called, since the derived dataclass will take care of
initializing all fields of any base class that is a dataclass itself.
See the section below on init-only variables for ways to pass
parameters to :meth:`__post_init__`. Also see the warning about how
parameters to :meth:`!__post_init__`. Also see the warning about how
:func:`replace` handles ``init=False`` fields.
Class variables
@ -573,8 +575,8 @@ if the type of a field is of type ``dataclasses.InitVar``. If a field
is an ``InitVar``, it is considered a pseudo-field called an init-only
field. As it is not a true field, it is not returned by the
module-level :func:`fields` function. Init-only fields are added as
parameters to the generated :meth:`__init__` method, and are passed to
the optional :meth:`__post_init__` method. They are not otherwise used
parameters to the generated :meth:`~object.__init__` method, and are passed to
the optional :ref:`__post_init__ <post-init-processing>` method. They are not otherwise used
by dataclasses.
For example, suppose a field will be initialized from a database, if a
@ -601,12 +603,12 @@ Frozen instances
It is not possible to create truly immutable Python objects. However,
by passing ``frozen=True`` to the :meth:`dataclass` decorator you can
emulate immutability. In that case, dataclasses will add
:meth:`__setattr__` and :meth:`__delattr__` methods to the class. These
:meth:`~object.__setattr__` and :meth:`~object.__delattr__` methods to the class. These
methods will raise a :exc:`FrozenInstanceError` when invoked.
There is a tiny performance penalty when using ``frozen=True``:
:meth:`__init__` cannot use simple assignment to initialize fields, and
must use :meth:`object.__setattr__`.
:meth:`~object.__init__` cannot use simple assignment to initialize fields, and
must use :meth:`~object.__setattr__`.
Inheritance
-----------
@ -634,14 +636,14 @@ example::
The final list of fields is, in order, ``x``, ``y``, ``z``. The final
type of ``x`` is ``int``, as specified in class ``C``.
The generated :meth:`__init__` method for ``C`` will look like::
The generated :meth:`~object.__init__` method for ``C`` will look like::
def __init__(self, x: int = 15, y: int = 0, z: int = 10):
Re-ordering of keyword-only parameters in :meth:`__init__`
----------------------------------------------------------
Re-ordering of keyword-only parameters in :meth:`~object.__init__`
------------------------------------------------------------------
After the parameters needed for :meth:`__init__` are computed, any
After the parameters needed for :meth:`~object.__init__` are computed, any
keyword-only parameters are moved to come after all regular
(non-keyword-only) parameters. This is a requirement of how
keyword-only parameters are implemented in Python: they must come
@ -662,7 +664,7 @@ fields, and ``Base.x`` and ``D.z`` are regular fields::
z: int = 10
t: int = field(kw_only=True, default=0)
The generated :meth:`__init__` method for ``D`` will look like::
The generated :meth:`~object.__init__` method for ``D`` will look like::
def __init__(self, x: Any = 15.0, z: int = 10, *, y: int = 0, w: int = 1, t: int = 0):
@ -671,7 +673,7 @@ the list of fields: parameters derived from regular fields are
followed by parameters derived from keyword-only fields.
The relative ordering of keyword-only parameters is maintained in the
re-ordered :meth:`__init__` parameter list.
re-ordered :meth:`~object.__init__` parameter list.
Default factory functions
@ -683,10 +685,10 @@ example, to create a new instance of a list, use::
mylist: list = field(default_factory=list)
If a field is excluded from :meth:`__init__` (using ``init=False``)
If a field is excluded from :meth:`~object.__init__` (using ``init=False``)
and the field also specifies ``default_factory``, then the default
factory function will always be called from the generated
:meth:`__init__` function. This happens because there is no other
:meth:`~object.__init__` function. This happens because there is no other
way to give the field an initial value.
Mutable default values
@ -714,7 +716,7 @@ Using dataclasses, *if* this code was valid::
@dataclass
class D:
x: List = []
x: list = [] # This code raises ValueError
def add(self, element):
self.x += element

View file

@ -1043,7 +1043,7 @@ Other constructors, all class methods:
Return a :class:`.datetime` corresponding to *date_string*, parsed according to
*format*.
This is equivalent to::
If *format* does not contain microseconds or timezone information, this is equivalent to::
datetime(*(time.strptime(date_string, format)[0:6]))
@ -2510,10 +2510,7 @@ Notes:
Because the format depends on the current locale, care should be taken when
making assumptions about the output value. Field orderings will vary (for
example, "month/day/year" versus "day/month/year"), and the output may
contain Unicode characters encoded using the locale's default encoding (for
example, if the current locale is ``ja_JP``, the default encoding could be
any one of ``eucJP``, ``SJIS``, or ``utf-8``; use :meth:`locale.getlocale`
to determine the current locale's encoding).
contain non-ASCII characters.
(2)
The :meth:`strptime` method can parse years in the full [1, 9999] range, but

View file

@ -926,7 +926,7 @@ Each thread has its own current context which is accessed or changed using the
You can also use the :keyword:`with` statement and the :func:`localcontext`
function to temporarily change the active context.
.. function:: localcontext(ctx=None, \*\*kwargs)
.. function:: localcontext(ctx=None, **kwargs)
Return a context manager that will set the current context for the active thread
to a copy of *ctx* on entry to the with-statement and restore the previous context

View file

@ -1036,6 +1036,24 @@ iterations of the loop.
pushed to the stack before the attribute or unbound method respectively.
.. opcode:: LOAD_SUPER_ATTR (namei)
This opcode implements :func:`super` (e.g. ``super().method()`` and
``super().attr``). It works the same as :opcode:`LOAD_ATTR`, except that
``namei`` is shifted left by 2 bits instead of 1, and instead of expecting a
single receiver on the stack, it expects three objects (from top of stack
down): ``self`` (the first argument to the current method), ``cls`` (the
class within which the current method was defined), and the global ``super``.
The low bit of ``namei`` signals to attempt a method load, as with
:opcode:`LOAD_ATTR`.
The second-low bit of ``namei``, if set, means that this was a two-argument
call to :func:`super` (unset means zero-argument).
.. versionadded:: 3.12
.. opcode:: COMPARE_OP (opname)
Performs a Boolean operation. The operation name can be found in

View file

@ -954,7 +954,16 @@ The canonical way to create an :class:`Option` instance is with the
As you can see, most actions involve storing or updating a value somewhere.
:mod:`optparse` always creates a special object for this, conventionally called
``options`` (it happens to be an instance of :class:`optparse.Values`). Option
``options``, which is an instance of :class:`optparse.Values`.
.. class:: Values
An object holding parsed argument names and values as attributes.
Normally created by calling when calling :meth:`OptionParser.parse_args`,
and can be overridden by a custom subclass passed to the *values* argument of
:meth:`OptionParser.parse_args` (as described in :ref:`optparse-parsing-arguments`).
Option
arguments (and various other values) are stored as attributes of this object,
according to the :attr:`~Option.dest` (destination) option attribute.
@ -991,6 +1000,14 @@ one that makes sense for *all* options.
Option attributes
^^^^^^^^^^^^^^^^^
.. class:: Option
A single command line argument,
with various attributes passed by keyword to the constructor.
Normally created with :meth:`OptionParser.add_option` rather than directly,
and can be overridden by a custom class via the *option_class* argument
to :class:`OptionParser`.
The following option attributes may be passed as keyword arguments to
:meth:`OptionParser.add_option`. If you pass an option attribute that is not
relevant to a particular option, or fail to pass a required option attribute,
@ -2035,3 +2052,27 @@ Features of note:
about setting a default value for the option destinations in question; they
can just leave the default as ``None`` and :meth:`ensure_value` will take care of
getting it right when it's needed.
Exceptions
----------
.. exception:: OptionError
Raised if an :class:`Option` instance is created with invalid or
inconsistent arguments.
.. exception:: OptionConflictError
Raised if conflicting options are added to an :class:`OptionParser`.
.. exception:: OptionValueError
Raised if an invalid option value is encountered on the command line.
.. exception:: BadOptionError
Raised if an invalid option is passed on the command line.
.. exception:: AmbiguousOptionError
Raised if an ambiguous option is passed on the command line.

View file

@ -3919,7 +3919,8 @@ to be ignored.
the :envvar:`PATH` variable. The other variants, :func:`execl`, :func:`execle`,
:func:`execv`, and :func:`execve`, will not use the :envvar:`PATH` variable to
locate the executable; *path* must contain an appropriate absolute or relative
path.
path. Relative paths must include at least one slash, even on Windows, as
plain names will not be resolved.
For :func:`execle`, :func:`execlpe`, :func:`execve`, and :func:`execvpe` (note
that these all end in "e"), the *env* parameter must be a mapping which is

View file

@ -82,7 +82,7 @@ support.
This is a backwards compatibility wrapper around
:func:`importlib.util.find_spec` that converts most failures to
:exc:`ImportError` and only returns the loader rather than the full
:class:`ModuleSpec`.
:class:`importlib.machinery.ModuleSpec`.
.. versionchanged:: 3.3
Updated to be based directly on :mod:`importlib` rather than relying

View file

@ -140,9 +140,16 @@ server is the address family.
ForkingUDPServer
ThreadingTCPServer
ThreadingUDPServer
ForkingUnixStreamServer
ForkingUnixDatagramServer
ThreadingUnixStreamServer
ThreadingUnixDatagramServer
These classes are pre-defined using the mix-in classes.
.. versionadded:: 3.12
The ``ForkingUnixStreamServer`` and ``ForkingUnixDatagramServer`` classes
were added.
To implement a service, you must derive a class from :class:`BaseRequestHandler`
and redefine its :meth:`~BaseRequestHandler.handle` method.

View file

@ -919,9 +919,12 @@ Reassigning them to new values is unsupported:
.. attribute:: Popen.returncode
The child return code, set by :meth:`poll` and :meth:`wait` (and indirectly
by :meth:`communicate`). A ``None`` value indicates that the process
hasn't terminated yet.
The child return code. Initially ``None``, :attr:`returncode` is set by
a call to the :meth:`poll`, :meth:`wait`, or :meth:`communicate` methods
if they detect that the process has terminated.
A ``None`` value indicates that the process hadn't yet terminated at the
time of the last method call.
A negative value ``-N`` indicates that the child was terminated by signal
``N`` (POSIX only).

View file

@ -59,7 +59,6 @@ Doc/faq/gui.rst
Doc/faq/library.rst
Doc/faq/programming.rst
Doc/glossary.rst
Doc/howto/argparse.rst
Doc/howto/curses.rst
Doc/howto/descriptor.rst
Doc/howto/enum.rst
@ -78,7 +77,6 @@ Doc/library/__future__.rst
Doc/library/_thread.rst
Doc/library/abc.rst
Doc/library/aifc.rst
Doc/library/argparse.rst
Doc/library/ast.rst
Doc/library/asyncio-dev.rst
Doc/library/asyncio-eventloop.rst
@ -113,7 +111,6 @@ Doc/library/csv.rst
Doc/library/ctypes.rst
Doc/library/curses.ascii.rst
Doc/library/curses.rst
Doc/library/dataclasses.rst
Doc/library/datetime.rst
Doc/library/dbm.rst
Doc/library/decimal.rst
@ -180,7 +177,6 @@ Doc/library/os.rst
Doc/library/ossaudiodev.rst
Doc/library/pickle.rst
Doc/library/pickletools.rst
Doc/library/pkgutil.rst
Doc/library/platform.rst
Doc/library/plistlib.rst
Doc/library/poplib.rst

View file

@ -470,7 +470,7 @@ user's system, including environment variables, system registry settings, and
installed packages. The standard library is included as pre-compiled and
optimized ``.pyc`` files in a ZIP, and ``python3.dll``, ``python37.dll``,
``python.exe`` and ``pythonw.exe`` are all provided. Tcl/tk (including all
dependants, such as Idle), pip and the Python documentation are not included.
dependents, such as Idle), pip and the Python documentation are not included.
.. note::

View file

@ -245,6 +245,8 @@ PyAPI_FUNC(PyStatus) PyConfig_SetWideStringList(PyConfig *config,
/* --- PyInterpreterConfig ------------------------------------ */
typedef struct {
// XXX "allow_object_sharing"? "own_objects"?
int use_main_obmalloc;
int allow_fork;
int allow_exec;
int allow_threads;
@ -254,6 +256,7 @@ typedef struct {
#define _PyInterpreterConfig_INIT \
{ \
.use_main_obmalloc = 0, \
.allow_fork = 0, \
.allow_exec = 0, \
.allow_threads = 1, \
@ -263,6 +266,7 @@ typedef struct {
#define _PyInterpreterConfig_LEGACY_INIT \
{ \
.use_main_obmalloc = 1, \
.allow_fork = 1, \
.allow_exec = 1, \
.allow_threads = 1, \

View file

@ -564,3 +564,10 @@ PyAPI_FUNC(int) PyType_AddWatcher(PyType_WatchCallback callback);
PyAPI_FUNC(int) PyType_ClearWatcher(int watcher_id);
PyAPI_FUNC(int) PyType_Watch(int watcher_id, PyObject *type);
PyAPI_FUNC(int) PyType_Unwatch(int watcher_id, PyObject *type);
/* Attempt to assign a version tag to the given type.
*
* Returns 1 if the type already had a valid version tag or a new one was
* assigned, or 0 if a new tag could not be assigned.
*/
PyAPI_FUNC(int) PyUnstable_Type_AssignVersionTag(PyTypeObject *type);

View file

@ -11,6 +11,10 @@ is available in a given context. For example, forking the process
might not be allowed in the current interpreter (i.e. os.fork() would fail).
*/
/* Set if the interpreter share obmalloc runtime state
with the main interpreter. */
#define Py_RTFLAGS_USE_MAIN_OBMALLOC (1UL << 5)
/* Set if import should check a module for subinterpreter support. */
#define Py_RTFLAGS_MULTI_INTERP_EXTENSIONS (1UL << 8)

View file

@ -23,11 +23,12 @@ extern "C" {
#include "pycore_function.h" // FUNC_MAX_WATCHERS
#include "pycore_genobject.h" // struct _Py_async_gen_state
#include "pycore_gc.h" // struct _gc_runtime_state
#include "pycore_global_objects.h" // struct _Py_interp_static_objects
#include "pycore_import.h" // struct _import_state
#include "pycore_instruments.h" // PY_MONITORING_EVENTS
#include "pycore_list.h" // struct _Py_list_state
#include "pycore_global_objects.h" // struct _Py_interp_static_objects
#include "pycore_object_state.h" // struct _py_object_state
#include "pycore_obmalloc.h" // struct obmalloc_state
#include "pycore_tuple.h" // struct _Py_tuple_state
#include "pycore_typeobject.h" // struct type_cache
#include "pycore_unicodeobject.h" // struct _Py_unicode_state
@ -82,6 +83,8 @@ struct _is {
int _initialized;
int finalizing;
struct _obmalloc_state obmalloc;
struct _ceval_state ceval;
struct _gc_runtime_state gc;

View file

@ -657,8 +657,12 @@ struct _obmalloc_usage {
#endif /* WITH_PYMALLOC_RADIX_TREE */
struct _obmalloc_state {
struct _obmalloc_global_state {
int dump_debug_stats;
Py_ssize_t interpreter_leaks;
};
struct _obmalloc_state {
struct _obmalloc_pools pools;
struct _obmalloc_mgmt mgmt;
struct _obmalloc_usage usage;
@ -675,7 +679,11 @@ void _PyObject_VirtualFree(void *, size_t size);
/* This function returns the number of allocated memory blocks, regardless of size */
PyAPI_FUNC(Py_ssize_t) _Py_GetAllocatedBlocks(void);
extern Py_ssize_t _Py_GetGlobalAllocatedBlocks(void);
#define _Py_GetAllocatedBlocks() \
_Py_GetGlobalAllocatedBlocks()
extern Py_ssize_t _PyInterpreterState_GetAllocatedBlocks(PyInterpreterState *);
extern void _PyInterpreterState_FinalizeAllocatedBlocks(PyInterpreterState *);
#ifdef WITH_PYMALLOC

View file

@ -54,9 +54,13 @@ extern "C" {
# error "NB_SMALL_SIZE_CLASSES should be less than 64"
#endif
#define _obmalloc_state_INIT(obmalloc) \
#define _obmalloc_global_state_INIT \
{ \
.dump_debug_stats = -1, \
}
#define _obmalloc_state_INIT(obmalloc) \
{ \
.pools = { \
.used = _obmalloc_pools_INIT(obmalloc.pools), \
}, \

View file

@ -179,6 +179,7 @@ const uint8_t _PyOpcode_Deopt[256] = {
[LOAD_GLOBAL_BUILTIN] = LOAD_GLOBAL,
[LOAD_GLOBAL_MODULE] = LOAD_GLOBAL,
[LOAD_NAME] = LOAD_NAME,
[LOAD_SUPER_ATTR] = LOAD_SUPER_ATTR,
[MAKE_CELL] = MAKE_CELL,
[MAKE_FUNCTION] = MAKE_FUNCTION,
[MAP_ADD] = MAP_ADD,
@ -236,7 +237,7 @@ const uint8_t _PyOpcode_Deopt[256] = {
#endif // NEED_OPCODE_TABLES
#ifdef Py_DEBUG
static const char *const _PyOpcode_OpName[263] = {
static const char *const _PyOpcode_OpName[266] = {
[CACHE] = "CACHE",
[POP_TOP] = "POP_TOP",
[PUSH_NULL] = "PUSH_NULL",
@ -378,9 +379,9 @@ static const char *const _PyOpcode_OpName[263] = {
[STORE_DEREF] = "STORE_DEREF",
[DELETE_DEREF] = "DELETE_DEREF",
[JUMP_BACKWARD] = "JUMP_BACKWARD",
[STORE_FAST__STORE_FAST] = "STORE_FAST__STORE_FAST",
[LOAD_SUPER_ATTR] = "LOAD_SUPER_ATTR",
[CALL_FUNCTION_EX] = "CALL_FUNCTION_EX",
[STORE_SUBSCR_DICT] = "STORE_SUBSCR_DICT",
[STORE_FAST__STORE_FAST] = "STORE_FAST__STORE_FAST",
[EXTENDED_ARG] = "EXTENDED_ARG",
[LIST_APPEND] = "LIST_APPEND",
[SET_ADD] = "SET_ADD",
@ -390,15 +391,15 @@ static const char *const _PyOpcode_OpName[263] = {
[YIELD_VALUE] = "YIELD_VALUE",
[RESUME] = "RESUME",
[MATCH_CLASS] = "MATCH_CLASS",
[STORE_SUBSCR_DICT] = "STORE_SUBSCR_DICT",
[STORE_SUBSCR_LIST_INT] = "STORE_SUBSCR_LIST_INT",
[UNPACK_SEQUENCE_LIST] = "UNPACK_SEQUENCE_LIST",
[FORMAT_VALUE] = "FORMAT_VALUE",
[BUILD_CONST_KEY_MAP] = "BUILD_CONST_KEY_MAP",
[BUILD_STRING] = "BUILD_STRING",
[UNPACK_SEQUENCE_LIST] = "UNPACK_SEQUENCE_LIST",
[UNPACK_SEQUENCE_TUPLE] = "UNPACK_SEQUENCE_TUPLE",
[UNPACK_SEQUENCE_TWO_TUPLE] = "UNPACK_SEQUENCE_TWO_TUPLE",
[SEND_GEN] = "SEND_GEN",
[161] = "<161>",
[LIST_EXTEND] = "LIST_EXTEND",
[SET_UPDATE] = "SET_UPDATE",
[DICT_MERGE] = "DICT_MERGE",
@ -500,11 +501,13 @@ static const char *const _PyOpcode_OpName[263] = {
[JUMP] = "JUMP",
[JUMP_NO_INTERRUPT] = "JUMP_NO_INTERRUPT",
[LOAD_METHOD] = "LOAD_METHOD",
[LOAD_SUPER_METHOD] = "LOAD_SUPER_METHOD",
[LOAD_ZERO_SUPER_METHOD] = "LOAD_ZERO_SUPER_METHOD",
[LOAD_ZERO_SUPER_ATTR] = "LOAD_ZERO_SUPER_ATTR",
};
#endif
#define EXTRA_CASES \
case 161: \
case 166: \
case 167: \
case 168: \

View file

@ -64,6 +64,7 @@ extern void _PyAtExit_Fini(PyInterpreterState *interp);
extern void _PyThread_FiniType(PyInterpreterState *interp);
extern void _Py_Deepfreeze_Fini(void);
extern void _PyArg_Fini(void);
extern void _Py_FinalizeAllocatedBlocks(_PyRuntimeState *);
extern PyStatus _PyGILState_Init(PyInterpreterState *interp);
extern PyStatus _PyGILState_SetTstate(PyThreadState *tstate);

View file

@ -33,6 +33,13 @@ _Py_IsMainInterpreter(PyInterpreterState *interp)
return (interp == _PyInterpreterState_Main());
}
static inline int
_Py_IsMainInterpreterFinalizing(PyInterpreterState *interp)
{
return (_PyRuntimeState_GetFinalizing(interp->runtime) != NULL &&
interp == &interp->runtime->_main_interpreter);
}
static inline const PyConfig *
_Py_GetMainConfig(void)
@ -64,17 +71,14 @@ _Py_ThreadCanHandlePendingCalls(void)
/* Variable and macro for in-line access to current thread
and interpreter state */
static inline PyThreadState*
_PyRuntimeState_GetThreadState(_PyRuntimeState *runtime)
{
return (PyThreadState*)_Py_atomic_load_relaxed(&runtime->tstate_current);
}
#if defined(HAVE_THREAD_LOCAL) && !defined(Py_BUILD_CORE_MODULE)
extern _Py_thread_local PyThreadState *_Py_tss_tstate;
#endif
PyAPI_DATA(PyThreadState *) _PyThreadState_GetCurrent(void);
/* Get the current Python thread state.
Efficient macro reading directly the 'tstate_current' atomic
variable. The macro is unsafe: it does not check for error and it can
return NULL.
This function is unsafe: it does not check for error and it can return NULL.
The caller must hold the GIL.
@ -82,9 +86,20 @@ _PyRuntimeState_GetThreadState(_PyRuntimeState *runtime)
static inline PyThreadState*
_PyThreadState_GET(void)
{
return _PyRuntimeState_GetThreadState(&_PyRuntime);
#if defined(HAVE_THREAD_LOCAL) && !defined(Py_BUILD_CORE_MODULE)
return _Py_tss_tstate;
#else
return _PyThreadState_GetCurrent();
#endif
}
static inline PyThreadState*
_PyRuntimeState_GetThreadState(_PyRuntimeState *Py_UNUSED(runtime))
{
return _PyThreadState_GET();
}
static inline void
_Py_EnsureFuncTstateNotNULL(const char *func, PyThreadState *tstate)
{

View file

@ -21,10 +21,10 @@ extern "C" {
#include "pycore_pymem.h" // struct _pymem_allocators
#include "pycore_pyhash.h" // struct pyhash_runtime_state
#include "pycore_pythread.h" // struct _pythread_runtime_state
#include "pycore_obmalloc.h" // struct obmalloc_state
#include "pycore_signal.h" // struct _signals_runtime_state
#include "pycore_time.h" // struct _time_runtime_state
#include "pycore_tracemalloc.h" // struct _tracemalloc_runtime_state
#include "pycore_typeobject.h" // struct types_runtime_state
#include "pycore_unicodeobject.h" // struct _Py_unicode_runtime_ids
struct _getargs_runtime_state {
@ -87,7 +87,7 @@ typedef struct pyruntimestate {
_Py_atomic_address _finalizing;
struct _pymem_allocators allocators;
struct _obmalloc_state obmalloc;
struct _obmalloc_global_state obmalloc;
struct pyhash_runtime_state pyhash_state;
struct _time_runtime_state time;
struct _pythread_runtime_state threads;
@ -119,9 +119,6 @@ typedef struct pyruntimestate {
unsigned long main_thread;
/* Assuming the current thread holds the GIL, this is the
PyThreadState for the current thread. */
_Py_atomic_address tstate_current;
/* Used for the thread state bound to the current thread. */
Py_tss_t autoTSSkey;
@ -153,13 +150,7 @@ typedef struct pyruntimestate {
struct _py_object_runtime_state object_state;
struct _Py_float_runtime_state float_state;
struct _Py_unicode_runtime_state unicode_state;
struct {
/* Used to set PyTypeObject.tp_version_tag */
// bpo-42745: next_version_tag remains shared by all interpreters
// because of static types.
unsigned int next_version_tag;
} types;
struct _types_runtime_state types;
/* All the objects that are shared by the runtime's interpreters. */
struct _Py_static_objects static_objects;

View file

@ -29,7 +29,7 @@ extern PyTypeObject _PyExc_MemoryError;
_pymem_allocators_debug_INIT, \
_pymem_allocators_obj_arena_INIT, \
}, \
.obmalloc = _obmalloc_state_INIT(runtime.obmalloc), \
.obmalloc = _obmalloc_global_state_INIT, \
.pyhash_state = pyhash_state_INIT, \
.signals = _signals_RUNTIME_INIT, \
.interpreters = { \
@ -93,6 +93,7 @@ extern PyTypeObject _PyExc_MemoryError;
{ \
.id_refcount = -1, \
.imports = IMPORTS_INIT, \
.obmalloc = _obmalloc_state_INIT(INTERP.obmalloc), \
.ceval = { \
.recursion_limit = Py_DEFAULT_RECURSION_LIMIT, \
}, \
@ -112,6 +113,9 @@ extern PyTypeObject _PyExc_MemoryError;
.func_state = { \
.next_version = 1, \
}, \
.types = { \
.next_version_tag = _Py_TYPE_BASE_VERSION_TAG, \
}, \
.static_objects = { \
.singletons = { \
._not_used = 1, \

View file

@ -11,22 +11,17 @@ extern "C" {
#endif
/* runtime lifecycle */
/* state */
extern PyStatus _PyTypes_InitTypes(PyInterpreterState *);
extern void _PyTypes_FiniTypes(PyInterpreterState *);
extern void _PyTypes_Fini(PyInterpreterState *);
#define _Py_TYPE_BASE_VERSION_TAG (2<<16)
#define _Py_MAX_GLOBAL_TYPE_VERSION_TAG (_Py_TYPE_BASE_VERSION_TAG - 1)
/* other API */
/* Length of array of slotdef pointers used to store slots with the
same __name__. There should be at most MAX_EQUIV-1 slotdef entries with
the same __name__, for any __name__. Since that's a static property, it is
appropriate to declare fixed-size arrays for this. */
#define MAX_EQUIV 10
typedef struct wrapperbase pytype_slotdef;
struct _types_runtime_state {
/* Used to set PyTypeObject.tp_version_tag for core static types. */
// bpo-42745: next_version_tag remains shared by all interpreters
// because of static types.
unsigned int next_version_tag;
};
// Type attribute lookup cache: speed up attribute and method lookups,
@ -57,6 +52,36 @@ typedef struct {
PyObject *tp_weaklist;
} static_builtin_state;
struct types_state {
/* Used to set PyTypeObject.tp_version_tag.
It starts at _Py_MAX_GLOBAL_TYPE_VERSION_TAG + 1,
where all those lower numbers are used for core static types. */
unsigned int next_version_tag;
struct type_cache type_cache;
size_t num_builtins_initialized;
static_builtin_state builtins[_Py_MAX_STATIC_BUILTIN_TYPES];
};
/* runtime lifecycle */
extern PyStatus _PyTypes_InitTypes(PyInterpreterState *);
extern void _PyTypes_FiniTypes(PyInterpreterState *);
extern void _PyTypes_Fini(PyInterpreterState *);
/* other API */
/* Length of array of slotdef pointers used to store slots with the
same __name__. There should be at most MAX_EQUIV-1 slotdef entries with
the same __name__, for any __name__. Since that's a static property, it is
appropriate to declare fixed-size arrays for this. */
#define MAX_EQUIV 10
typedef struct wrapperbase pytype_slotdef;
static inline PyObject **
_PyStaticType_GET_WEAKREFS_LISTPTR(static_builtin_state *state)
{
@ -78,12 +103,6 @@ _PyType_GetModuleState(PyTypeObject *type)
return mod->md_state;
}
struct types_state {
struct type_cache type_cache;
size_t num_builtins_initialized;
static_builtin_state builtins[_Py_MAX_STATIC_BUILTIN_TYPES];
};
extern int _PyStaticType_InitBuiltin(PyTypeObject *type);
extern static_builtin_state * _PyStaticType_GetState(PyTypeObject *);
@ -98,6 +117,9 @@ _Py_type_getattro(PyTypeObject *type, PyObject *name);
PyObject *_Py_slot_tp_getattro(PyObject *self, PyObject *name);
PyObject *_Py_slot_tp_getattr_hook(PyObject *self, PyObject *name);
PyObject *
_PySuper_Lookup(PyTypeObject *su_type, PyObject *su_obj, PyObject *name, int *meth_found);
#ifdef __cplusplus
}
#endif

23
Include/opcode.h generated
View file

@ -95,6 +95,7 @@ extern "C" {
#define STORE_DEREF 138
#define DELETE_DEREF 139
#define JUMP_BACKWARD 140
#define LOAD_SUPER_ATTR 141
#define CALL_FUNCTION_EX 142
#define EXTENDED_ARG 144
#define LIST_APPEND 145
@ -142,7 +143,10 @@ extern "C" {
#define JUMP 260
#define JUMP_NO_INTERRUPT 261
#define LOAD_METHOD 262
#define MAX_PSEUDO_OPCODE 262
#define LOAD_SUPER_METHOD 263
#define LOAD_ZERO_SUPER_METHOD 264
#define LOAD_ZERO_SUPER_ATTR 265
#define MAX_PSEUDO_OPCODE 265
#define BINARY_OP_ADD_FLOAT 6
#define BINARY_OP_ADD_INT 7
#define BINARY_OP_ADD_UNICODE 8
@ -198,18 +202,21 @@ extern "C" {
#define STORE_ATTR_SLOT 111
#define STORE_ATTR_WITH_HINT 112
#define STORE_FAST__LOAD_FAST 113
#define STORE_FAST__STORE_FAST 141
#define STORE_SUBSCR_DICT 143
#define STORE_SUBSCR_LIST_INT 153
#define UNPACK_SEQUENCE_LIST 154
#define UNPACK_SEQUENCE_TUPLE 158
#define UNPACK_SEQUENCE_TWO_TUPLE 159
#define SEND_GEN 160
#define STORE_FAST__STORE_FAST 143
#define STORE_SUBSCR_DICT 153
#define STORE_SUBSCR_LIST_INT 154
#define UNPACK_SEQUENCE_LIST 158
#define UNPACK_SEQUENCE_TUPLE 159
#define UNPACK_SEQUENCE_TWO_TUPLE 160
#define SEND_GEN 161
#define HAS_ARG(op) ((((op) >= HAVE_ARGUMENT) && (!IS_PSEUDO_OPCODE(op)))\
|| ((op) == JUMP) \
|| ((op) == JUMP_NO_INTERRUPT) \
|| ((op) == LOAD_METHOD) \
|| ((op) == LOAD_SUPER_METHOD) \
|| ((op) == LOAD_ZERO_SUPER_METHOD) \
|| ((op) == LOAD_ZERO_SUPER_ATTR) \
)
#define HAS_CONST(op) (false\

View file

@ -662,6 +662,27 @@ extern char * _getpty(int *, int, mode_t, int);
# define WITH_THREAD
#endif
#ifdef WITH_THREAD
# ifdef Py_BUILD_CORE
# ifdef HAVE_THREAD_LOCAL
# error "HAVE_THREAD_LOCAL is already defined"
# endif
# define HAVE_THREAD_LOCAL 1
# ifdef thread_local
# define _Py_thread_local thread_local
# elif __STDC_VERSION__ >= 201112L && !defined(__STDC_NO_THREADS__)
# define _Py_thread_local _Thread_local
# elif defined(_MSC_VER) /* AKA NT_THREADS */
# define _Py_thread_local __declspec(thread)
# elif defined(__GNUC__) /* includes clang */
# define _Py_thread_local __thread
# else
// fall back to the PyThread_tss_*() API, or ignore.
# undef HAVE_THREAD_LOCAL
# endif
# endif
#endif
/* Check that ALT_SOABI is consistent with Py_TRACE_REFS:
./configure --with-trace-refs should must be used to define Py_TRACE_REFS */
#if defined(ALT_SOABI) && defined(Py_TRACE_REFS)

View file

@ -574,6 +574,8 @@ class Bdb:
line = linecache.getline(filename, lineno, frame.f_globals)
if line:
s += lprefix + line.strip()
else:
s += f'{lprefix}Warning: lineno is None'
return s
# The following methods can be called by clients to use

View file

@ -441,7 +441,16 @@ class suppress(AbstractContextManager):
# exactly reproduce the limitations of the CPython interpreter.
#
# See http://bugs.python.org/issue12029 for more details
return exctype is not None and issubclass(exctype, self._exceptions)
if exctype is None:
return
if issubclass(exctype, self._exceptions):
return True
if issubclass(exctype, ExceptionGroup):
match, rest = excinst.split(self._exceptions)
if rest is None:
return True
raise rest
return False
class _BaseExitStack:

View file

@ -41,6 +41,7 @@ JUMP_BACKWARD = opmap['JUMP_BACKWARD']
FOR_ITER = opmap['FOR_ITER']
SEND = opmap['SEND']
LOAD_ATTR = opmap['LOAD_ATTR']
LOAD_SUPER_ATTR = opmap['LOAD_SUPER_ATTR']
CACHE = opmap["CACHE"]
@ -475,6 +476,10 @@ def _get_instructions_bytes(code, varname_from_oparg=None,
argval, argrepr = _get_name_info(arg//2, get_name)
if (arg & 1) and argrepr:
argrepr = "NULL|self + " + argrepr
elif deop == LOAD_SUPER_ATTR:
argval, argrepr = _get_name_info(arg//4, get_name)
if (arg & 1) and argrepr:
argrepr = "NULL|self + " + argrepr
else:
argval, argrepr = _get_name_info(arg, get_name)
elif deop in hasjabs:

View file

@ -1987,7 +1987,7 @@ def get_address_list(value):
try:
token, value = get_address(value)
address_list.append(token)
except errors.HeaderParseError as err:
except errors.HeaderParseError:
leader = None
if value[0] in CFWS_LEADER:
leader, value = get_cfws(value)
@ -2096,7 +2096,7 @@ def get_msg_id(value):
except errors.HeaderParseError:
try:
token, value = get_no_fold_literal(value)
except errors.HeaderParseError as e:
except errors.HeaderParseError:
try:
token, value = get_domain(value)
msg_id.defects.append(errors.ObsoleteHeaderDefect(
@ -2443,7 +2443,6 @@ def get_parameter(value):
raise errors.HeaderParseError("Parameter not followed by '='")
param.append(ValueTerminal('=', 'parameter-separator'))
value = value[1:]
leader = None
if value and value[0] in CFWS_LEADER:
token, value = get_cfws(value)
param.append(token)
@ -2568,7 +2567,7 @@ def parse_mime_parameters(value):
try:
token, value = get_parameter(value)
mime_parameters.append(token)
except errors.HeaderParseError as err:
except errors.HeaderParseError:
leader = None
if value[0] in CFWS_LEADER:
leader, value = get_cfws(value)
@ -2626,7 +2625,6 @@ def parse_content_type_header(value):
don't do that.
"""
ctype = ContentType()
recover = False
if not value:
ctype.defects.append(errors.HeaderMissingRequiredValue(
"Missing content type specification"))

View file

@ -341,7 +341,6 @@ class Charset:
if not lines and not current_line:
lines.append(None)
else:
separator = (' ' if lines else '')
joined_line = EMPTYSTRING.join(current_line)
header_bytes = _encode(joined_line, codec)
lines.append(encoder(header_bytes))

View file

@ -264,7 +264,7 @@ class FeedParser:
yield NeedMoreData
continue
break
msg = self._pop_message()
self._pop_message()
# We need to pop the EOF matcher in order to tell if we're at
# the end of the current file, not the end of the last block
# of message headers.

View file

@ -14,7 +14,7 @@ from io import BytesIO, StringIO
# Intrapackage imports
from email import utils
from email import errors
from email._policybase import Policy, compat32
from email._policybase import compat32
from email import charset as _charset
from email._encoded_words import decode_b
Charset = _charset.Charset

View file

@ -6,7 +6,6 @@
__all__ = ['MIMEText']
from email.charset import Charset
from email.mime.nonmultipart import MIMENonMultipart
@ -36,6 +35,6 @@ class MIMEText(MIMENonMultipart):
_charset = 'utf-8'
MIMENonMultipart.__init__(self, 'text', _subtype, policy=policy,
**{'charset': str(_charset)})
charset=str(_charset))
self.set_payload(_text, _charset)

View file

@ -328,7 +328,7 @@ class LineNumbersTest(unittest.TestCase):
self.assertEqual(self.linenumber.sidebar_text.index('@0,0'), '11.0')
# Generate a mouse-wheel event and make sure it scrolled up or down.
# The meaning of the "delta" is OS-dependant, so this just checks for
# The meaning of the "delta" is OS-dependent, so this just checks for
# any change.
self.linenumber.sidebar_text.event_generate('<MouseWheel>',
x=0, y=0,
@ -691,7 +691,7 @@ class ShellSidebarTest(unittest.TestCase):
self.assertIsNotNone(text.dlineinfo(text.index(f'{last_lineno}.0')))
# Scroll up using the <MouseWheel> event.
# The meaning delta is platform-dependant.
# The meaning of delta is platform-dependent.
delta = -1 if sys.platform == 'darwin' else 120
sidebar.canvas.event_generate('<MouseWheel>', x=0, y=0, delta=delta)
yield

View file

@ -439,7 +439,8 @@ _code_type = type(_write_atomic.__code__)
# Python 3.12a7 3523 (Convert COMPARE_AND_BRANCH back to COMPARE_OP)
# Python 3.12a7 3524 (Shrink the BINARY_SUBSCR caches)
# Python 3.12b1 3525 (Shrink the CALL caches)
# Python 3.12a7 3526 (Add instrumentation support)
# Python 3.12b1 3526 (Add instrumentation support)
# Python 3.12b1 3527 (Optimize super() calls)
# Python 3.13 will start with 3550
@ -456,7 +457,7 @@ _code_type = type(_write_atomic.__code__)
# Whenever MAGIC_NUMBER is changed, the ranges in the magic_values array
# in PC/launcher.c must also be updated.
MAGIC_NUMBER = (3526).to_bytes(2, 'little') + b'\r\n'
MAGIC_NUMBER = (3527).to_bytes(2, 'little') + b'\r\n'
_RAW_MAGIC_NUMBER = int.from_bytes(MAGIC_NUMBER, 'little') # For import.c

View file

@ -196,7 +196,7 @@ hasfree.append(138)
def_op('DELETE_DEREF', 139)
hasfree.append(139)
jrel_op('JUMP_BACKWARD', 140) # Number of words to skip (backwards)
name_op('LOAD_SUPER_ATTR', 141)
def_op('CALL_FUNCTION_EX', 142) # Flags
def_op('EXTENDED_ARG', 144)
@ -264,6 +264,9 @@ pseudo_op('JUMP', 260, ['JUMP_FORWARD', 'JUMP_BACKWARD'])
pseudo_op('JUMP_NO_INTERRUPT', 261, ['JUMP_FORWARD', 'JUMP_BACKWARD_NO_INTERRUPT'])
pseudo_op('LOAD_METHOD', 262, ['LOAD_ATTR'])
pseudo_op('LOAD_SUPER_METHOD', 263, ['LOAD_SUPER_ATTR'])
pseudo_op('LOAD_ZERO_SUPER_METHOD', 264, ['LOAD_SUPER_ATTR'])
pseudo_op('LOAD_ZERO_SUPER_ATTR', 265, ['LOAD_SUPER_ATTR'])
MAX_PSEUDO_OPCODE = MIN_PSEUDO_OPCODE + len(_pseudo_ops) - 1

View file

@ -141,6 +141,8 @@ if hasattr(socket, "AF_UNIX"):
__all__.extend(["UnixStreamServer","UnixDatagramServer",
"ThreadingUnixStreamServer",
"ThreadingUnixDatagramServer"])
if hasattr(os, "fork"):
__all__.extend(["ForkingUnixStreamServer", "ForkingUnixDatagramServer"])
# poll/select have the advantage of not requiring any extra file descriptor,
# contrarily to epoll/kqueue (also, they require a single syscall).
@ -727,6 +729,11 @@ if hasattr(socket, 'AF_UNIX'):
class ThreadingUnixDatagramServer(ThreadingMixIn, UnixDatagramServer): pass
if hasattr(os, "fork"):
class ForkingUnixStreamServer(ForkingMixIn, UnixStreamServer): pass
class ForkingUnixDatagramServer(ForkingMixIn, UnixDatagramServer): pass
class BaseRequestHandler:
"""Base class for request handler classes.

View file

@ -0,0 +1,7 @@
class super:
msg = "truly super"
class C:
def method(self):
return super().msg

View file

@ -0,0 +1,25 @@
class ExceptionIsLikeMixin:
def assertExceptionIsLike(self, exc, template):
"""
Passes when the provided `exc` matches the structure of `template`.
Individual exceptions don't have to be the same objects or even pass
an equality test: they only need to be the same type and contain equal
`exc_obj.args`.
"""
if exc is None and template is None:
return
if template is None:
self.fail(f"unexpected exception: {exc}")
if exc is None:
self.fail(f"expected an exception like {template!r}, got None")
if not isinstance(exc, ExceptionGroup):
self.assertEqual(exc.__class__, template.__class__)
self.assertEqual(exc.args[0], template.args[0])
else:
self.assertEqual(exc.message, template.message)
self.assertEqual(len(exc.exceptions), len(template.exceptions))
for e, t in zip(exc.exceptions, template.exceptions):
self.assertExceptionIsLike(e, t)

View file

@ -1469,19 +1469,19 @@ class ExhaustiveChannelTests(TestBase):
with self.assertRaises(channels.ChannelClosedError):
channels.close(fix.cid, force=True)
else:
run_interp(interp.id, f"""
run_interp(interp.id, """
with helpers.expect_channel_closed():
channels.recv(cid)
""")
run_interp(interp.id, f"""
run_interp(interp.id, """
with helpers.expect_channel_closed():
channels.send(cid, b'spam')
""")
run_interp(interp.id, f"""
run_interp(interp.id, """
with helpers.expect_channel_closed():
channels.close(cid)
""")
run_interp(interp.id, f"""
run_interp(interp.id, """
with helpers.expect_channel_closed():
channels.close(cid, force=True)
""")

View file

@ -798,7 +798,7 @@ class RunStringTests(TestBase):
"""))
shared = {'spam': b'ham'}
script = dedent(f"""
script = dedent("""
ns2 = dict(vars())
del ns2['__builtins__']
""")
@ -902,7 +902,7 @@ class RunStringTests(TestBase):
# XXX Fix this test!
@unittest.skip('blocking forever')
def test_still_running_at_exit(self):
script = dedent(f"""
script = dedent("""
from textwrap import dedent
import threading
import _xxsubinterpreters as _interpreters

View file

@ -1712,11 +1712,11 @@ class PolicyTests(unittest.TestCase):
def create_policy(self):
return asyncio.DefaultEventLoopPolicy()
def test_get_default_child_watcher(self):
@mock.patch('asyncio.unix_events.can_use_pidfd')
def test_get_default_child_watcher(self, m_can_use_pidfd):
m_can_use_pidfd.return_value = False
policy = self.create_policy()
self.assertIsNone(policy._watcher)
unix_events.can_use_pidfd = mock.Mock()
unix_events.can_use_pidfd.return_value = False
with self.assertWarns(DeprecationWarning):
watcher = policy.get_child_watcher()
self.assertIsInstance(watcher, asyncio.ThreadedChildWatcher)
@ -1725,10 +1725,9 @@ class PolicyTests(unittest.TestCase):
with self.assertWarns(DeprecationWarning):
self.assertIs(watcher, policy.get_child_watcher())
m_can_use_pidfd.return_value = True
policy = self.create_policy()
self.assertIsNone(policy._watcher)
unix_events.can_use_pidfd = mock.Mock()
unix_events.can_use_pidfd.return_value = True
with self.assertWarns(DeprecationWarning):
watcher = policy.get_child_watcher()
self.assertIsInstance(watcher, asyncio.PidfdChildWatcher)

View file

@ -1207,7 +1207,8 @@ class IssuesTestCase(BaseTestCase):
class TestRegressions(unittest.TestCase):
def test_format_stack_entry_no_lineno(self):
# See gh-101517
Bdb().format_stack_entry((sys._getframe(), None))
self.assertIn('Warning: lineno is None',
Bdb().format_stack_entry((sys._getframe(), None)))
if __name__ == "__main__":

View file

@ -1211,20 +1211,25 @@ class SubinterpreterTest(unittest.TestCase):
"""
import json
OBMALLOC = 1<<5
EXTENSIONS = 1<<8
THREADS = 1<<10
DAEMON_THREADS = 1<<11
FORK = 1<<15
EXEC = 1<<16
features = ['fork', 'exec', 'threads', 'daemon_threads', 'extensions']
features = ['obmalloc', 'fork', 'exec', 'threads', 'daemon_threads',
'extensions']
kwlist = [f'allow_{n}' for n in features]
kwlist[0] = 'use_main_obmalloc'
kwlist[-1] = 'check_multi_interp_extensions'
# expected to work
for config, expected in {
(True, True, True, True, True):
FORK | EXEC | THREADS | DAEMON_THREADS | EXTENSIONS,
(False, False, False, False, False): 0,
(False, False, True, False, True): THREADS | EXTENSIONS,
(True, True, True, True, True, True):
OBMALLOC | FORK | EXEC | THREADS | DAEMON_THREADS | EXTENSIONS,
(True, False, False, False, False, False): OBMALLOC,
(False, False, False, True, False, True): THREADS | EXTENSIONS,
}.items():
kwargs = dict(zip(kwlist, config))
expected = {
@ -1246,6 +1251,20 @@ class SubinterpreterTest(unittest.TestCase):
self.assertEqual(settings, expected)
# expected to fail
for config in [
(False, False, False, False, False, False),
]:
kwargs = dict(zip(kwlist, config))
with self.subTest(config):
script = textwrap.dedent(f'''
import _testinternalcapi
_testinternalcapi.get_interp_settings()
raise NotImplementedError('unreachable')
''')
with self.assertRaises(RuntimeError):
support.run_in_subinterp_with_config(script, **kwargs)
@unittest.skipIf(_testsinglephase is None, "test requires _testsinglephase module")
@unittest.skipUnless(hasattr(os, "pipe"), "requires os.pipe()")
def test_overridden_setting_extensions_subinterp_check(self):
@ -1257,13 +1276,15 @@ class SubinterpreterTest(unittest.TestCase):
"""
import json
OBMALLOC = 1<<5
EXTENSIONS = 1<<8
THREADS = 1<<10
DAEMON_THREADS = 1<<11
FORK = 1<<15
EXEC = 1<<16
BASE_FLAGS = FORK | EXEC | THREADS | DAEMON_THREADS
BASE_FLAGS = OBMALLOC | FORK | EXEC | THREADS | DAEMON_THREADS
base_kwargs = {
'use_main_obmalloc': True,
'allow_fork': True,
'allow_exec': True,
'allow_threads': True,
@ -1400,7 +1421,7 @@ class TestThreadState(unittest.TestCase):
@threading_helper.requires_working_threading()
def test_gilstate_ensure_no_deadlock(self):
# See https://github.com/python/cpython/issues/96071
code = textwrap.dedent(f"""
code = textwrap.dedent("""
import _testcapi
def callback():

View file

@ -356,7 +356,7 @@ class CodeTest(unittest.TestCase):
foo.__code__ = foo.__code__.replace(
co_code=b'\xe5' + foo.__code__.co_code[1:])
msg = f"unknown opcode 229"
msg = "unknown opcode 229"
with self.assertRaisesRegex(SystemError, msg):
foo()

View file

@ -277,7 +277,7 @@ class CodeopTests(unittest.TestCase):
def test_warning(self):
# Test that the warning is only returned once.
with warnings_helper.check_warnings(
('"is" with a literal', SyntaxWarning),
('"is" with \'str\' literal', SyntaxWarning),
("invalid escape sequence", SyntaxWarning),
) as w:
compile_command(r"'\e' is 0")

View file

@ -1626,7 +1626,7 @@ class TestCollectionABCs(ABCTestCase):
class SetUsingInstanceFromIterable(MutableSet):
def __init__(self, values, created_by):
if not created_by:
raise ValueError(f'created_by must be specified')
raise ValueError('created_by must be specified')
self.created_by = created_by
self._values = set(values)

View file

@ -10,6 +10,7 @@ import unittest
from contextlib import * # Tests __all__
from test import support
from test.support import os_helper
from test.support.testcase import ExceptionIsLikeMixin
import weakref
@ -1148,7 +1149,7 @@ class TestRedirectStderr(TestRedirectStream, unittest.TestCase):
orig_stream = "stderr"
class TestSuppress(unittest.TestCase):
class TestSuppress(ExceptionIsLikeMixin, unittest.TestCase):
@support.requires_docstrings
def test_instance_docs(self):
@ -1202,6 +1203,30 @@ class TestSuppress(unittest.TestCase):
1/0
self.assertTrue(outer_continued)
def test_exception_groups(self):
eg_ve = lambda: ExceptionGroup(
"EG with ValueErrors only",
[ValueError("ve1"), ValueError("ve2"), ValueError("ve3")],
)
eg_all = lambda: ExceptionGroup(
"EG with many types of exceptions",
[ValueError("ve1"), KeyError("ke1"), ValueError("ve2"), KeyError("ke2")],
)
with suppress(ValueError):
raise eg_ve()
with suppress(ValueError, KeyError):
raise eg_all()
with self.assertRaises(ExceptionGroup) as eg1:
with suppress(ValueError):
raise eg_all()
self.assertExceptionIsLike(
eg1.exception,
ExceptionGroup(
"EG with many types of exceptions",
[KeyError("ke1"), KeyError("ke2")],
),
)
class TestChdir(unittest.TestCase):
def make_relative_path(self, *parts):

View file

@ -2365,15 +2365,15 @@ class OriginTrackingTest(unittest.TestCase):
f"coroutine '{corofn.__qualname__}' was never awaited\n",
"Coroutine created at (most recent call last)\n",
f' File "{a1_filename}", line {a1_lineno}, in a1\n',
f' return corofn() # comment in a1',
" return corofn() # comment in a1",
]))
check(2, "".join([
f"coroutine '{corofn.__qualname__}' was never awaited\n",
"Coroutine created at (most recent call last)\n",
f' File "{a2_filename}", line {a2_lineno}, in a2\n',
f' return a1() # comment in a2\n',
" return a1() # comment in a2\n",
f' File "{a1_filename}", line {a1_lineno}, in a1\n',
f' return corofn() # comment in a1',
" return corofn() # comment in a1",
]))
finally:

View file

@ -757,8 +757,8 @@ class TestCase(unittest.TestCase):
class Subclass(typ): pass
with self.assertRaisesRegex(ValueError,
f"mutable default .*Subclass'>"
' for field z is not allowed'
"mutable default .*Subclass'>"
" for field z is not allowed"
):
@dataclass
class Point:

View file

@ -110,7 +110,7 @@ class EmbeddingTestsMixin:
print(f"--- {cmd} failed ---")
print(f"stdout:\n{out}")
print(f"stderr:\n{err}")
print(f"------")
print("------")
self.assertEqual(p.returncode, returncode,
"bad returncode %d, stderr is %r" %
@ -1656,6 +1656,7 @@ class InitConfigTests(EmbeddingTestsMixin, unittest.TestCase):
api=API_PYTHON, env=env)
def test_init_main_interpreter_settings(self):
OBMALLOC = 1<<5
EXTENSIONS = 1<<8
THREADS = 1<<10
DAEMON_THREADS = 1<<11
@ -1664,7 +1665,7 @@ class InitConfigTests(EmbeddingTestsMixin, unittest.TestCase):
expected = {
# All optional features should be enabled.
'feature_flags':
FORK | EXEC | THREADS | DAEMON_THREADS,
OBMALLOC | FORK | EXEC | THREADS | DAEMON_THREADS,
}
out, err = self.run_embedded_interpreter(
'test_init_main_interpreter_settings',

View file

@ -1,6 +1,7 @@
import sys
import unittest
import textwrap
from test.support.testcase import ExceptionIsLikeMixin
class TestInvalidExceptStar(unittest.TestCase):
def test_mixed_except_and_except_star_is_syntax_error(self):
@ -169,26 +170,7 @@ class TestBreakContinueReturnInExceptStarBlock(unittest.TestCase):
self.assertIsInstance(exc, ExceptionGroup)
class ExceptStarTest(unittest.TestCase):
def assertExceptionIsLike(self, exc, template):
if exc is None and template is None:
return
if template is None:
self.fail(f"unexpected exception: {exc}")
if exc is None:
self.fail(f"expected an exception like {template!r}, got None")
if not isinstance(exc, ExceptionGroup):
self.assertEqual(exc.__class__, template.__class__)
self.assertEqual(exc.args[0], template.args[0])
else:
self.assertEqual(exc.message, template.message)
self.assertEqual(len(exc.exceptions), len(template.exceptions))
for e, t in zip(exc.exceptions, template.exceptions):
self.assertExceptionIsLike(e, t)
class ExceptStarTest(ExceptionIsLikeMixin, unittest.TestCase):
def assertMetadataEqual(self, e1, e2):
if e1 is None or e2 is None:
self.assertTrue(e1 is None and e2 is None)

View file

@ -940,15 +940,13 @@ x = (
"f'{lambda :x}'",
"f'{lambda *arg, :x}'",
"f'{1, lambda:x}'",
"f'{lambda x:}'",
"f'{lambda :}'",
])
# but don't emit the paren warning in general cases
self.assertAllRaise(SyntaxError,
"f-string: expecting a valid expression after '{'",
["f'{lambda x:}'",
"f'{lambda :}'",
"f'{+ lambda:None}'",
])
with self.assertRaisesRegex(SyntaxError, "f-string: expecting a valid expression after '{'"):
eval("f'{+ lambda:None}'")
def test_valid_prefixes(self):
self.assertEqual(F'{1}', "1")

View file

@ -962,7 +962,7 @@ id(42)
cmd = textwrap.dedent('''
class MyList(list):
def __init__(self):
super().__init__() # wrapper_call()
super(*[]).__init__() # wrapper_call()
id("first break point")
l = MyList()

View file

@ -236,12 +236,9 @@ class TokenTests(unittest.TestCase):
check(f"[{num}for x in ()]")
check(f"{num}spam", error=True)
with self.assertWarnsRegex(SyntaxWarning, r'invalid \w+ literal'):
compile(f"{num}is x", "<testcase>", "eval")
with warnings.catch_warnings():
warnings.filterwarnings('ignore', '"is" with a literal',
SyntaxWarning)
with self.assertWarnsRegex(SyntaxWarning,
r'invalid \w+ literal'):
compile(f"{num}is x", "<testcase>", "eval")
warnings.simplefilter('error', SyntaxWarning)
with self.assertRaisesRegex(SyntaxError,
r'invalid \w+ literal'):
@ -1467,14 +1464,22 @@ class GrammarTests(unittest.TestCase):
if 1 < 1 > 1 == 1 >= 1 <= 1 != 1 in 1 not in x is x is not x: pass
def test_comparison_is_literal(self):
def check(test, msg='"is" with a literal'):
def check(test, msg):
self.check_syntax_warning(test, msg)
check('x is 1')
check('x is "thing"')
check('1 is x')
check('x is y is 1')
check('x is not 1', '"is not" with a literal')
check('x is 1', '"is" with \'int\' literal')
check('x is "thing"', '"is" with \'str\' literal')
check('1 is x', '"is" with \'int\' literal')
check('x is y is 1', '"is" with \'int\' literal')
check('x is not 1', '"is not" with \'int\' literal')
check('x is not (1, 2)', '"is not" with \'tuple\' literal')
check('(1, 2) is not x', '"is not" with \'tuple\' literal')
check('None is 1', '"is" with \'int\' literal')
check('1 is None', '"is" with \'int\' literal')
check('x == 3 is y', '"is" with \'int\' literal')
check('x == "thing" is y', '"is" with \'str\' literal')
with warnings.catch_warnings():
warnings.simplefilter('error', SyntaxWarning)
@ -1482,6 +1487,10 @@ class GrammarTests(unittest.TestCase):
compile('x is False', '<testcase>', 'exec')
compile('x is True', '<testcase>', 'exec')
compile('x is ...', '<testcase>', 'exec')
compile('None is x', '<testcase>', 'exec')
compile('False is x', '<testcase>', 'exec')
compile('True is x', '<testcase>', 'exec')
compile('... is x', '<testcase>', 'exec')
def test_warn_missed_comma(self):
def check(test):

View file

@ -1636,7 +1636,12 @@ class SubinterpImportTests(unittest.TestCase):
allow_exec=False,
allow_threads=True,
allow_daemon_threads=False,
# Isolation-related config values aren't included here.
)
ISOLATED = dict(
use_main_obmalloc=False,
)
NOT_ISOLATED = {k: not v for k, v in ISOLATED.items()}
@unittest.skipUnless(hasattr(os, "pipe"), "requires os.pipe()")
def pipe(self):
@ -1669,6 +1674,7 @@ class SubinterpImportTests(unittest.TestCase):
def run_here(self, name, *,
check_singlephase_setting=False,
check_singlephase_override=None,
isolated=False,
):
"""
Try importing the named module in a subinterpreter.
@ -1689,6 +1695,7 @@ class SubinterpImportTests(unittest.TestCase):
kwargs = dict(
**self.RUN_KWARGS,
**(self.ISOLATED if isolated else self.NOT_ISOLATED),
check_multi_interp_extensions=check_singlephase_setting,
)
@ -1699,33 +1706,36 @@ class SubinterpImportTests(unittest.TestCase):
self.assertEqual(ret, 0)
return os.read(r, 100)
def check_compatible_here(self, name, *, strict=False):
def check_compatible_here(self, name, *, strict=False, isolated=False):
# Verify that the named module may be imported in a subinterpreter.
# (See run_here() for more info.)
out = self.run_here(name,
check_singlephase_setting=strict,
isolated=isolated,
)
self.assertEqual(out, b'okay')
def check_incompatible_here(self, name):
def check_incompatible_here(self, name, *, isolated=False):
# Differences from check_compatible_here():
# * verify that import fails
# * "strict" is always True
out = self.run_here(name,
check_singlephase_setting=True,
isolated=isolated,
)
self.assertEqual(
out.decode('utf-8'),
f'ImportError: module {name} does not support loading in subinterpreters',
)
def check_compatible_fresh(self, name, *, strict=False):
def check_compatible_fresh(self, name, *, strict=False, isolated=False):
# Differences from check_compatible_here():
# * subinterpreter in a new process
# * module has never been imported before in that process
# * this tests importing the module for the first time
kwargs = dict(
**self.RUN_KWARGS,
**(self.ISOLATED if isolated else self.NOT_ISOLATED),
check_multi_interp_extensions=strict,
)
_, out, err = script_helper.assert_python_ok('-c', textwrap.dedent(f'''
@ -1743,12 +1753,13 @@ class SubinterpImportTests(unittest.TestCase):
self.assertEqual(err, b'')
self.assertEqual(out, b'okay')
def check_incompatible_fresh(self, name):
def check_incompatible_fresh(self, name, *, isolated=False):
# Differences from check_compatible_fresh():
# * verify that import fails
# * "strict" is always True
kwargs = dict(
**self.RUN_KWARGS,
**(self.ISOLATED if isolated else self.NOT_ISOLATED),
check_multi_interp_extensions=True,
)
_, out, err = script_helper.assert_python_ok('-c', textwrap.dedent(f'''
@ -1854,6 +1865,14 @@ class SubinterpImportTests(unittest.TestCase):
with self.subTest('config: check disabled; override: disabled'):
check_compatible(False, -1)
def test_isolated_config(self):
module = 'threading'
require_pure_python(module)
with self.subTest(f'{module}: strict, not fresh'):
self.check_compatible_here(module, strict=True, isolated=True)
with self.subTest(f'{module}: strict, fresh'):
self.check_compatible_fresh(module, strict=True, isolated=True)
class TestSinglePhaseSnapshot(ModuleSnapshot):
@ -1868,7 +1887,7 @@ class TestSinglePhaseSnapshot(ModuleSnapshot):
self.init_count = mod.initialized_count()
return self
SCRIPT_BODY = ModuleSnapshot.SCRIPT_BODY + textwrap.dedent(f'''
SCRIPT_BODY = ModuleSnapshot.SCRIPT_BODY + textwrap.dedent('''
snapshot['module'].update(dict(
int_const=mod.int_const,
str_const=mod.str_const,

View file

@ -394,17 +394,17 @@ class TestLauncher(unittest.TestCase, RunPyMixin):
def test_filter_to_tag(self):
company = "PythonTestSuite"
data = self.run_py([f"-V:3.100"])
data = self.run_py(["-V:3.100"])
self.assertEqual("X.Y.exe", data["LaunchCommand"])
self.assertEqual(company, data["env.company"])
self.assertEqual("3.100", data["env.tag"])
data = self.run_py([f"-V:3.100-32"])
data = self.run_py(["-V:3.100-32"])
self.assertEqual("X.Y-32.exe", data["LaunchCommand"])
self.assertEqual(company, data["env.company"])
self.assertEqual("3.100-32", data["env.tag"])
data = self.run_py([f"-V:3.100-arm64"])
data = self.run_py(["-V:3.100-arm64"])
self.assertEqual("X.Y-arm64.exe -X fake_arg_for_test", data["LaunchCommand"])
self.assertEqual(company, data["env.company"])
self.assertEqual("3.100-arm64", data["env.tag"])
@ -421,7 +421,7 @@ class TestLauncher(unittest.TestCase, RunPyMixin):
def test_filter_with_single_install(self):
company = "PythonTestSuite1"
data = self.run_py(
[f"-V:Nonexistent"],
["-V:Nonexistent"],
env={"PYLAUNCHER_LIMIT_TO_COMPANY": company},
expect_returncode=103,
)
@ -500,7 +500,7 @@ class TestLauncher(unittest.TestCase, RunPyMixin):
data = self.run_py(["--version"], argv=f'{argv0} --version')
self.assertEqual("PythonTestSuite", data["SearchInfo.company"])
self.assertEqual("3.100", data["SearchInfo.tag"])
self.assertEqual(f'X.Y.exe --version', data["stdout"].strip())
self.assertEqual("X.Y.exe --version", data["stdout"].strip())
def test_py_default_in_list(self):
data = self.run_py(["-0"], env=TEST_PY_ENV)
@ -662,7 +662,7 @@ class TestLauncher(unittest.TestCase, RunPyMixin):
self.assertIn("9PJPW5LDXLZ5", cmd)
def test_literal_shebang_absolute(self):
with self.script(f"#! C:/some_random_app -witharg") as script:
with self.script("#! C:/some_random_app -witharg") as script:
data = self.run_py([script])
self.assertEqual(
f"C:\\some_random_app -witharg {script}",
@ -670,7 +670,7 @@ class TestLauncher(unittest.TestCase, RunPyMixin):
)
def test_literal_shebang_relative(self):
with self.script(f"#! ..\\some_random_app -witharg") as script:
with self.script("#! ..\\some_random_app -witharg") as script:
data = self.run_py([script])
self.assertEqual(
f"{script.parent.parent}\\some_random_app -witharg {script}",
@ -678,14 +678,14 @@ class TestLauncher(unittest.TestCase, RunPyMixin):
)
def test_literal_shebang_quoted(self):
with self.script(f'#! "some random app" -witharg') as script:
with self.script('#! "some random app" -witharg') as script:
data = self.run_py([script])
self.assertEqual(
f'"{script.parent}\\some random app" -witharg {script}',
data["stdout"].strip(),
)
with self.script(f'#! some" random "app -witharg') as script:
with self.script('#! some" random "app -witharg') as script:
data = self.run_py([script])
self.assertEqual(
f'"{script.parent}\\some random app" -witharg {script}',
@ -693,7 +693,7 @@ class TestLauncher(unittest.TestCase, RunPyMixin):
)
def test_literal_shebang_quoted_escape(self):
with self.script(f'#! some\\" random "app -witharg') as script:
with self.script('#! some\\" random "app -witharg') as script:
data = self.run_py([script])
self.assertEqual(
f'"{script.parent}\\some\\ random app" -witharg {script}',

View file

@ -1715,8 +1715,8 @@ def test_pdb_issue_gh_101517():
... 'continue'
... ]):
... test_function()
--Return--
> <doctest test.test_pdb.test_pdb_issue_gh_101517[0]>(None)test_function()->None
> <doctest test.test_pdb.test_pdb_issue_gh_101517[0]>(5)test_function()
-> import pdb; pdb.Pdb(nosigint=True, readrc=False).set_trace()
(Pdb) continue
"""

View file

@ -810,7 +810,7 @@ class TestMarkingVariablesAsUnKnown(BytecodeTestCase):
self.assertInBytecode(f, 'LOAD_FAST', "a73")
def test_setting_lineno_no_undefined(self):
code = textwrap.dedent(f"""\
code = textwrap.dedent("""\
def f():
x = y = 2
if not x:
@ -842,7 +842,7 @@ class TestMarkingVariablesAsUnKnown(BytecodeTestCase):
self.assertEqual(f.__code__.co_code, co_code)
def test_setting_lineno_one_undefined(self):
code = textwrap.dedent(f"""\
code = textwrap.dedent("""\
def f():
x = y = 2
if not x:
@ -876,7 +876,7 @@ class TestMarkingVariablesAsUnKnown(BytecodeTestCase):
self.assertEqual(f.__code__.co_code, co_code)
def test_setting_lineno_two_undefined(self):
code = textwrap.dedent(f"""\
code = textwrap.dedent("""\
def f():
x = y = 2
if not x:

View file

@ -47,14 +47,6 @@ def receive(sock, n, timeout=test.support.SHORT_TIMEOUT):
else:
raise RuntimeError("timed out on %r" % (sock,))
if HAVE_UNIX_SOCKETS and HAVE_FORKING:
class ForkingUnixStreamServer(socketserver.ForkingMixIn,
socketserver.UnixStreamServer):
pass
class ForkingUnixDatagramServer(socketserver.ForkingMixIn,
socketserver.UnixDatagramServer):
pass
@test.support.requires_fork()
@contextlib.contextmanager
@ -211,7 +203,7 @@ class SocketServerTest(unittest.TestCase):
@requires_forking
def test_ForkingUnixStreamServer(self):
with simple_subprocess(self):
self.run_server(ForkingUnixStreamServer,
self.run_server(socketserver.ForkingUnixStreamServer,
socketserver.StreamRequestHandler,
self.stream_examine)
@ -247,7 +239,7 @@ class SocketServerTest(unittest.TestCase):
@requires_unix_sockets
@requires_forking
def test_ForkingUnixDatagramServer(self):
self.run_server(ForkingUnixDatagramServer,
self.run_server(socketserver.ForkingUnixDatagramServer,
socketserver.DatagramRequestHandler,
self.dgram_examine)

View file

@ -491,21 +491,21 @@ class RecursiveUseOfCursors(unittest.TestCase):
def test_recursive_cursor_init(self):
conv = lambda x: self.cur.__init__(self.con)
with patch.dict(sqlite.converters, {"INIT": conv}):
self.cur.execute(f'select x as "x [INIT]", x from test')
self.cur.execute('select x as "x [INIT]", x from test')
self.assertRaisesRegex(sqlite.ProgrammingError, self.msg,
self.cur.fetchall)
def test_recursive_cursor_close(self):
conv = lambda x: self.cur.close()
with patch.dict(sqlite.converters, {"CLOSE": conv}):
self.cur.execute(f'select x as "x [CLOSE]", x from test')
self.cur.execute('select x as "x [CLOSE]", x from test')
self.assertRaisesRegex(sqlite.ProgrammingError, self.msg,
self.cur.fetchall)
def test_recursive_cursor_iter(self):
conv = lambda x, l=[]: self.cur.fetchone() if l else l.append(None)
with patch.dict(sqlite.converters, {"ITER": conv}):
self.cur.execute(f'select x as "x [ITER]", x from test')
self.cur.execute('select x as "x [ITER]", x from test')
self.assertRaisesRegex(sqlite.ProgrammingError, self.msg,
self.cur.fetchall)

View file

@ -562,7 +562,7 @@ class WindowFunctionTests(unittest.TestCase):
# callback errors to sqlite3_step(); this implies that OperationalError
# is _not_ raised.
with patch.object(WindowSumInt, "finalize", side_effect=BadWindow):
name = f"exception_in_finalize"
name = "exception_in_finalize"
self.con.create_window_function(name, 1, WindowSumInt)
self.cur.execute(self.query % name)
self.cur.fetchall()

View file

@ -1,6 +1,8 @@
"""Unit tests for zero-argument super() & related machinery."""
import unittest
from unittest.mock import patch
from test import shadowed_super
class A:
@ -283,17 +285,28 @@ class TestSuper(unittest.TestCase):
def test_obscure_super_errors(self):
def f():
super()
self.assertRaises(RuntimeError, f)
with self.assertRaisesRegex(RuntimeError, r"no arguments"):
f()
class C:
def f():
super()
with self.assertRaisesRegex(RuntimeError, r"no arguments"):
C.f()
def f(x):
del x
super()
self.assertRaises(RuntimeError, f, None)
with self.assertRaisesRegex(RuntimeError, r"arg\[0\] deleted"):
f(None)
class X:
def f(x):
nonlocal __class__
del __class__
super()
self.assertRaises(RuntimeError, X().f)
with self.assertRaisesRegex(RuntimeError, r"empty __class__ cell"):
X().f()
def test_cell_as_self(self):
class X:
@ -325,6 +338,78 @@ class TestSuper(unittest.TestCase):
with self.assertRaisesRegex(TypeError, "argument 1 must be a type"):
super(1, int)
def test_shadowed_global(self):
self.assertEqual(shadowed_super.C().method(), "truly super")
def test_shadowed_local(self):
class super:
msg = "quite super"
class C:
def method(self):
return super().msg
self.assertEqual(C().method(), "quite super")
def test_shadowed_dynamic(self):
class MySuper:
msg = "super super"
class C:
def method(self):
return super().msg
with patch("test.test_super.super", MySuper) as m:
self.assertEqual(C().method(), "super super")
def test_shadowed_dynamic_two_arg(self):
call_args = []
class MySuper:
def __init__(self, *args):
call_args.append(args)
msg = "super super"
class C:
def method(self):
return super(1, 2).msg
with patch("test.test_super.super", MySuper) as m:
self.assertEqual(C().method(), "super super")
self.assertEqual(call_args, [(1, 2)])
def test_attribute_error(self):
class C:
def method(self):
return super().msg
with self.assertRaisesRegex(AttributeError, "'super' object has no attribute 'msg'"):
C().method()
def test_bad_first_arg(self):
class C:
def method(self):
return super(1, self).method()
with self.assertRaisesRegex(TypeError, "argument 1 must be a type"):
C().method()
def test_super___class__(self):
class C:
def method(self):
return super().__class__
self.assertEqual(C().method(), super)
def test_super_subclass___class__(self):
class mysuper(super):
pass
class C:
def method(self):
return mysuper(C, self).__class__
self.assertEqual(C().method(), mysuper)
if __name__ == "__main__":
unittest.main()

View file

@ -1343,6 +1343,7 @@ class SubinterpThreadingTests(BaseTestCase):
import test.support
test.support.run_in_subinterp_with_config(
{subinterp_code!r},
use_main_obmalloc=True,
allow_fork=True,
allow_exec=True,
allow_threads={allowed},

View file

@ -802,12 +802,12 @@ class TracebackErrorLocationCaretTestBase:
)()
actual = self.get_exception(f)
expected = [
f"Traceback (most recent call last):",
"Traceback (most recent call last):",
f" File \"{__file__}\", line {self.callable_line}, in get_exception",
f" callable()",
" callable()",
f" File \"{__file__}\", line {f.__code__.co_firstlineno + 2}, in f",
f" .method",
f" ^^^^^^",
" .method",
" ^^^^^^",
]
self.assertEqual(actual, expected)
@ -818,11 +818,11 @@ class TracebackErrorLocationCaretTestBase:
)()
actual = self.get_exception(f)
expected = [
f"Traceback (most recent call last):",
"Traceback (most recent call last):",
f" File \"{__file__}\", line {self.callable_line}, in get_exception",
f" callable()",
" callable()",
f" File \"{__file__}\", line {f.__code__.co_firstlineno + 2}, in f",
f" method",
" method",
]
self.assertEqual(actual, expected)
@ -833,12 +833,12 @@ class TracebackErrorLocationCaretTestBase:
)()
actual = self.get_exception(f)
expected = [
f"Traceback (most recent call last):",
"Traceback (most recent call last):",
f" File \"{__file__}\", line {self.callable_line}, in get_exception",
f" callable()",
" callable()",
f" File \"{__file__}\", line {f.__code__.co_firstlineno + 2}, in f",
f" . method",
f" ^^^^^^",
" . method",
" ^^^^^^",
]
self.assertEqual(actual, expected)
@ -848,11 +848,11 @@ class TracebackErrorLocationCaretTestBase:
actual = self.get_exception(f)
expected = [
f"Traceback (most recent call last):",
"Traceback (most recent call last):",
f" File \"{__file__}\", line {self.callable_line}, in get_exception",
f" callable()",
" callable()",
f" File \"{__file__}\", line {f.__code__.co_firstlineno + 1}, in f",
f" ",
" ",
]
self.assertEqual(actual, expected)
@ -864,11 +864,11 @@ class TracebackErrorLocationCaretTestBase:
actual = self.get_exception(f)
expected = [
f"Traceback (most recent call last):",
"Traceback (most recent call last):",
f" File \"{__file__}\", line {self.callable_line}, in get_exception",
f" callable()",
" callable()",
f" File \"{__file__}\", line {f.__code__.co_firstlineno + 2}, in f",
f" raise ValueError()",
" raise ValueError()",
]
self.assertEqual(actual, expected)
@ -882,12 +882,12 @@ class TracebackErrorLocationCaretTestBase:
actual = self.get_exception(f)
expected = [
f"Traceback (most recent call last):",
"Traceback (most recent call last):",
f" File \"{__file__}\", line {self.callable_line}, in get_exception",
f" callable()",
" callable()",
f" File \"{__file__}\", line {f.__code__.co_firstlineno + 4}, in f",
f" print(1, (",
f" ^^^^",
" print(1, (",
" ^^^^",
]
self.assertEqual(actual, expected)
@ -2844,26 +2844,26 @@ class TestTracebackException_ExceptionGroups(unittest.TestCase):
formatted = ''.join(teg.format()).split('\n')
expected = [
f' | ExceptionGroup: eg (2 sub-exceptions)',
f' +-+---------------- 1 ----------------',
f' | ExceptionGroup: eg1 (3 sub-exceptions)',
f' +-+---------------- 1 ----------------',
f' | ValueError: 0',
f' +---------------- 2 ----------------',
f' | ValueError: 1',
f' +---------------- ... ----------------',
f' | and 1 more exception',
f' +------------------------------------',
f' +---------------- 2 ----------------',
f' | ExceptionGroup: eg2 (10 sub-exceptions)',
f' +-+---------------- 1 ----------------',
f' | TypeError: 0',
f' +---------------- 2 ----------------',
f' | TypeError: 1',
f' +---------------- ... ----------------',
f' | and 8 more exceptions',
f' +------------------------------------',
f'']
' | ExceptionGroup: eg (2 sub-exceptions)',
' +-+---------------- 1 ----------------',
' | ExceptionGroup: eg1 (3 sub-exceptions)',
' +-+---------------- 1 ----------------',
' | ValueError: 0',
' +---------------- 2 ----------------',
' | ValueError: 1',
' +---------------- ... ----------------',
' | and 1 more exception',
' +------------------------------------',
' +---------------- 2 ----------------',
' | ExceptionGroup: eg2 (10 sub-exceptions)',
' +-+---------------- 1 ----------------',
' | TypeError: 0',
' +---------------- 2 ----------------',
' | TypeError: 1',
' +---------------- ... ----------------',
' | and 8 more exceptions',
' +------------------------------------',
'']
self.assertEqual(formatted, expected)
@ -2876,22 +2876,22 @@ class TestTracebackException_ExceptionGroups(unittest.TestCase):
formatted = ''.join(teg.format()).split('\n')
expected = [
f' | ExceptionGroup: exc (3 sub-exceptions)',
f' +-+---------------- 1 ----------------',
f' | ValueError: -2',
f' +---------------- 2 ----------------',
f' | ExceptionGroup: exc (3 sub-exceptions)',
f' +-+---------------- 1 ----------------',
f' | ValueError: -1',
f' +---------------- 2 ----------------',
f' | ... (max_group_depth is 2)',
f' +---------------- 3 ----------------',
f' | ValueError: 1',
f' +------------------------------------',
f' +---------------- 3 ----------------',
f' | ValueError: 2',
f' +------------------------------------',
f'']
' | ExceptionGroup: exc (3 sub-exceptions)',
' +-+---------------- 1 ----------------',
' | ValueError: -2',
' +---------------- 2 ----------------',
' | ExceptionGroup: exc (3 sub-exceptions)',
' +-+---------------- 1 ----------------',
' | ValueError: -1',
' +---------------- 2 ----------------',
' | ... (max_group_depth is 2)',
' +---------------- 3 ----------------',
' | ValueError: 1',
' +------------------------------------',
' +---------------- 3 ----------------',
' | ValueError: 2',
' +------------------------------------',
'']
self.assertEqual(formatted, expected)

View file

@ -9,6 +9,7 @@ except ImportError:
# Skip this test if the _testcapi module isn't available.
type_get_version = import_helper.import_module('_testcapi').type_get_version
type_assign_version = import_helper.import_module('_testcapi').type_assign_version
@support.cpython_only
@ -42,6 +43,19 @@ class TypeCacheTests(unittest.TestCase):
self.assertEqual(len(set(all_version_tags)), 30,
msg=f"{all_version_tags} contains non-unique versions")
def test_type_assign_version(self):
class C:
x = 5
self.assertEqual(type_assign_version(C), 1)
c_ver = type_get_version(C)
C.x = 6
self.assertEqual(type_get_version(C), 0)
self.assertEqual(type_assign_version(C), 1)
self.assertNotEqual(type_get_version(C), 0)
self.assertNotEqual(type_get_version(C), c_ver)
if __name__ == "__main__":
support.run_unittest(TypeCacheTests)

View file

@ -925,6 +925,35 @@ class UnionTests(unittest.TestCase):
assert typing.Optional[int] | str == typing.Union[int, str, None]
assert typing.Union[int, bool] | str == typing.Union[int, bool, str]
def test_or_type_operator_with_Literal(self):
Literal = typing.Literal
self.assertEqual((Literal[1] | Literal[2]).__args__,
(Literal[1], Literal[2]))
self.assertEqual((Literal[0] | Literal[False]).__args__,
(Literal[0], Literal[False]))
self.assertEqual((Literal[1] | Literal[True]).__args__,
(Literal[1], Literal[True]))
self.assertEqual(Literal[1] | Literal[1], Literal[1])
self.assertEqual(Literal['a'] | Literal['a'], Literal['a'])
import enum
class Ints(enum.IntEnum):
A = 0
B = 1
self.assertEqual(Literal[Ints.A] | Literal[Ints.A], Literal[Ints.A])
self.assertEqual(Literal[Ints.B] | Literal[Ints.B], Literal[Ints.B])
self.assertEqual((Literal[Ints.B] | Literal[Ints.A]).__args__,
(Literal[Ints.B], Literal[Ints.A]))
self.assertEqual((Literal[0] | Literal[Ints.A]).__args__,
(Literal[0], Literal[Ints.A]))
self.assertEqual((Literal[1] | Literal[Ints.B]).__args__,
(Literal[1], Literal[Ints.B]))
def test_or_type_repr(self):
assert repr(int | str) == "int | str"
assert repr((int | str) | list) == "int | str | list"

View file

@ -1805,6 +1805,11 @@ class UnionTests(BaseTestCase):
A = 0
B = 1
self.assertEqual(Union[Literal[Ints.A], Literal[Ints.A]],
Literal[Ints.A])
self.assertEqual(Union[Literal[Ints.B], Literal[Ints.B]],
Literal[Ints.B])
self.assertEqual(Union[Literal[Ints.A], Literal[Ints.B]].__args__,
(Literal[Ints.A], Literal[Ints.B]))

View file

@ -116,6 +116,17 @@ class ReferencesTestCase(TestBase):
del o
repr(wr)
def test_repr_failure_gh99184(self):
class MyConfig(dict):
def __getattr__(self, x):
return self[x]
obj = MyConfig(offset=5)
obj_weakref = weakref.ref(obj)
self.assertIn('MyConfig', repr(obj_weakref))
self.assertIn('MyConfig', str(obj_weakref))
def test_basic_callback(self):
self.check_basic_callback(C)
self.check_basic_callback(create_function)

View file

@ -982,7 +982,7 @@ Makefile Modules/config.c: Makefile.pre \
Modules/Setup.local \
Modules/Setup.bootstrap \
Modules/Setup.stdlib
$(SHELL) $(MAKESETUP) -c $(srcdir)/Modules/config.c.in \
$(MAKESETUP) -c $(srcdir)/Modules/config.c.in \
-s Modules \
Modules/Setup.local \
Modules/Setup.stdlib \
@ -1194,7 +1194,7 @@ Tools/build/freeze_modules.py: $(FREEZE_MODULE)
.PHONY: regen-frozen
regen-frozen: Tools/build/freeze_modules.py $(FROZEN_FILES_IN)
$(PYTHON_FOR_REGEN) $(srcdir)/Tools/build/freeze_modules.py
$(PYTHON_FOR_REGEN) $(srcdir)/Tools/build/freeze_modules.py --frozen-modules
@echo "The Makefile was updated, you may need to re-run make."
############################################################################
@ -2423,12 +2423,12 @@ frameworkinstallextras:
# Build the toplevel Makefile
Makefile.pre: $(srcdir)/Makefile.pre.in config.status
CONFIG_FILES=Makefile.pre CONFIG_HEADERS= $(SHELL) config.status
CONFIG_FILES=Makefile.pre CONFIG_HEADERS= ./config.status
$(MAKE) -f Makefile.pre Makefile
# Run the configure script.
config.status: $(srcdir)/configure
$(SHELL) $(srcdir)/configure $(CONFIG_ARGS)
$(srcdir)/configure $(CONFIG_ARGS)
.PRECIOUS: config.status $(BUILDPYTHON) Makefile Makefile.pre
@ -2453,8 +2453,8 @@ reindent:
# Rerun configure with the same options as it was run last time,
# provided the config.status script exists
recheck:
$(SHELL) config.status --recheck
$(SHELL) config.status
./config.status --recheck
./config.status
# Regenerate configure and pyconfig.h.in
.PHONY: autoconf

View file

@ -160,6 +160,7 @@ Brice Berna
Olivier Bernard
Vivien Bernet-Rollande
Maxwell Bernstein
Jay Berry
Eric Beser
Steven Bethard
Stephen Bevan

View file

@ -0,0 +1 @@
Add a new C-API function to eagerly assign a version tag to a PyTypeObject: ``PyUnstable_Type_AssignVersionTag()``.

View file

@ -0,0 +1,2 @@
Bypass instance attribute access of ``__name__`` in ``repr`` of
:class:`weakref.ref`.

View file

@ -0,0 +1,3 @@
We've replaced our use of ``_PyRuntime.tstate_current`` with a thread-local
variable. This is a fairly low-level implementation detail, and there
should be no change in behavior.

View file

@ -0,0 +1 @@
Add :opcode:`LOAD_SUPER_ATTR` to speed up ``super().meth()`` and ``super().attr`` calls.

View file

@ -0,0 +1 @@
Clarify :exc:`SyntaxWarning` with literal ``is`` comparison by specifying which literal is problematic, since comparisons using ``is`` with e.g. None and bool literals are idiomatic.

View file

@ -0,0 +1 @@
Fix bug in line numbers of instructions emitted for :keyword:`except* <except_star>`.

View file

@ -0,0 +1 @@
Adds three minor linting fixes to the wasm module caught that were caught by ruff.

View file

@ -0,0 +1 @@
Convert private :meth:`_posixsubprocess.fork_exec` to use Argument Clinic.

View file

@ -0,0 +1,2 @@
:mod:`socketserver` gains ``ForkingUnixStreamServer`` and
``ForkingUnixDatagramServer`` classes. Patch by Jay Berry.

View file

@ -0,0 +1,3 @@
:class:`contextlib.suppress` now supports suppressing exceptions raised as
part of an :exc:`ExceptionGroup`. If other exceptions exist on the group, they
are re-raised in a group that does not contain the suppressed exceptions.

View file

@ -75,6 +75,28 @@
static struct PyModuleDef _posixsubprocessmodule;
/*[clinic input]
module _posixsubprocess
[clinic start generated code]*/
/*[clinic end generated code: output=da39a3ee5e6b4b0d input=c62211df27cf7334]*/
/*[python input]
class pid_t_converter(CConverter):
type = 'pid_t'
format_unit = '" _Py_PARSE_PID "'
def parse_arg(self, argname, displayname):
return """
{paramname} = PyLong_AsPid({argname});
if ({paramname} == -1 && PyErr_Occurred()) {{{{
goto exit;
}}}}
""".format(argname=argname, paramname=self.parser_name)
[python start generated code]*/
/*[python end generated code: output=da39a3ee5e6b4b0d input=5af1c116d56cbb5a]*/
#include "clinic/_posixsubprocess.c.h"
/* Convert ASCII to a positive int, no libc call. no overflow. -1 on error. */
static int
_pos_int_from_ascii(const char *name)
@ -744,7 +766,7 @@ do_fork_exec(char *const exec_array[],
assert(preexec_fn == Py_None);
pid = vfork();
if (pid == -1) {
if (pid == (pid_t)-1) {
/* If vfork() fails, fall back to using fork(). When it isn't
* allowed in a process by the kernel, vfork can return -1
* with errno EINVAL. https://bugs.python.org/issue47151. */
@ -784,44 +806,81 @@ do_fork_exec(char *const exec_array[],
return 0; /* Dead code to avoid a potential compiler warning. */
}
/*[clinic input]
_posixsubprocess.fork_exec as subprocess_fork_exec
args as process_args: object
executable_list: object
close_fds: bool
pass_fds as py_fds_to_keep: object(subclass_of='&PyTuple_Type')
cwd as cwd_obj: object
env as env_list: object
p2cread: int
p2cwrite: int
c2pread: int
c2pwrite: int
errread: int
errwrite: int
errpipe_read: int
errpipe_write: int
restore_signals: bool
call_setsid: bool
pgid_to_set: pid_t
gid as gid_object: object
extra_groups as extra_groups_packed: object
uid as uid_object: object
child_umask: int
preexec_fn: object
allow_vfork: bool
/
Spawn a fresh new child process.
Fork a child process, close parent file descriptors as appropriate in the
child and duplicate the few that are needed before calling exec() in the
child process.
If close_fds is True, close file descriptors 3 and higher, except those listed
in the sorted tuple pass_fds.
The preexec_fn, if supplied, will be called immediately before closing file
descriptors and exec.
WARNING: preexec_fn is NOT SAFE if your application uses threads.
It may trigger infrequent, difficult to debug deadlocks.
If an error occurs in the child process before the exec, it is
serialized and written to the errpipe_write fd per subprocess.py.
Returns: the child process's PID.
Raises: Only on an error in the parent process.
[clinic start generated code]*/
static PyObject *
subprocess_fork_exec(PyObject *module, PyObject *args)
subprocess_fork_exec_impl(PyObject *module, PyObject *process_args,
PyObject *executable_list, int close_fds,
PyObject *py_fds_to_keep, PyObject *cwd_obj,
PyObject *env_list, int p2cread, int p2cwrite,
int c2pread, int c2pwrite, int errread,
int errwrite, int errpipe_read, int errpipe_write,
int restore_signals, int call_setsid,
pid_t pgid_to_set, PyObject *gid_object,
PyObject *extra_groups_packed,
PyObject *uid_object, int child_umask,
PyObject *preexec_fn, int allow_vfork)
/*[clinic end generated code: output=7ee4f6ee5cf22b5b input=51757287ef266ffa]*/
{
PyObject *gc_module = NULL;
PyObject *executable_list, *py_fds_to_keep;
PyObject *env_list, *preexec_fn;
PyObject *process_args, *converted_args = NULL, *fast_args = NULL;
PyObject *converted_args = NULL, *fast_args = NULL;
PyObject *preexec_fn_args_tuple = NULL;
PyObject *extra_groups_packed;
PyObject *uid_object, *gid_object;
int p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite;
int errpipe_read, errpipe_write, close_fds, restore_signals;
int call_setsid;
pid_t pgid_to_set = -1;
gid_t *extra_groups = NULL;
int child_umask;
PyObject *cwd_obj, *cwd_obj2 = NULL;
const char *cwd;
PyObject *cwd_obj2 = NULL;
const char *cwd = NULL;
pid_t pid = -1;
int need_to_reenable_gc = 0;
char *const *exec_array, *const *argv = NULL, *const *envp = NULL;
Py_ssize_t arg_num, extra_group_size = 0;
char *const *argv = NULL, *const *envp = NULL;
Py_ssize_t extra_group_size = 0;
int need_after_fork = 0;
int saved_errno = 0;
int allow_vfork;
if (!PyArg_ParseTuple(
args, "OOpO!OOiiiiiiiipp" _Py_PARSE_PID "OOOiOp:fork_exec",
&process_args, &executable_list,
&close_fds, &PyTuple_Type, &py_fds_to_keep,
&cwd_obj, &env_list,
&p2cread, &p2cwrite, &c2pread, &c2pwrite,
&errread, &errwrite, &errpipe_read, &errpipe_write,
&restore_signals, &call_setsid, &pgid_to_set,
&gid_object, &extra_groups_packed, &uid_object, &child_umask,
&preexec_fn, &allow_vfork))
return NULL;
PyInterpreterState *interp = PyInterpreterState_Get();
if ((preexec_fn != Py_None) && (interp != PyInterpreterState_Main())) {
@ -844,7 +903,7 @@ subprocess_fork_exec(PyObject *module, PyObject *args)
need_to_reenable_gc = PyGC_Disable();
}
exec_array = _PySequence_BytesToCharpArray(executable_list);
char *const *exec_array = _PySequence_BytesToCharpArray(executable_list);
if (!exec_array)
goto cleanup;
@ -862,7 +921,7 @@ subprocess_fork_exec(PyObject *module, PyObject *args)
converted_args = PyTuple_New(num_args);
if (converted_args == NULL)
goto cleanup;
for (arg_num = 0; arg_num < num_args; ++arg_num) {
for (Py_ssize_t arg_num = 0; arg_num < num_args; ++arg_num) {
PyObject *borrowed_arg, *converted_arg;
if (PySequence_Fast_GET_SIZE(fast_args) != num_args) {
PyErr_SetString(PyExc_RuntimeError, "args changed during iteration");
@ -891,8 +950,6 @@ subprocess_fork_exec(PyObject *module, PyObject *args)
if (PyUnicode_FSConverter(cwd_obj, &cwd_obj2) == 0)
goto cleanup;
cwd = PyBytes_AsString(cwd_obj2);
} else {
cwd = NULL;
}
if (extra_groups_packed != Py_None) {
@ -1019,7 +1076,7 @@ subprocess_fork_exec(PyObject *module, PyObject *args)
py_fds_to_keep, preexec_fn, preexec_fn_args_tuple);
/* Parent (original) process */
if (pid == -1) {
if (pid == (pid_t)-1) {
/* Capture errno for the exception. */
saved_errno = errno;
}
@ -1068,47 +1125,17 @@ cleanup:
if (need_to_reenable_gc) {
PyGC_Enable();
}
Py_XDECREF(gc_module);
return pid == -1 ? NULL : PyLong_FromPid(pid);
}
PyDoc_STRVAR(subprocess_fork_exec_doc,
"fork_exec(args, executable_list, close_fds, pass_fds, cwd, env,\n\
p2cread, p2cwrite, c2pread, c2pwrite,\n\
errread, errwrite, errpipe_read, errpipe_write,\n\
restore_signals, call_setsid, pgid_to_set,\n\
gid, extra_groups, uid,\n\
preexec_fn)\n\
\n\
Forks a child process, closes parent file descriptors as appropriate in the\n\
child and dups the few that are needed before calling exec() in the child\n\
process.\n\
\n\
If close_fds is true, close file descriptors 3 and higher, except those listed\n\
in the sorted tuple pass_fds.\n\
\n\
The preexec_fn, if supplied, will be called immediately before closing file\n\
descriptors and exec.\n\
WARNING: preexec_fn is NOT SAFE if your application uses threads.\n\
It may trigger infrequent, difficult to debug deadlocks.\n\
\n\
If an error occurs in the child process before the exec, it is\n\
serialized and written to the errpipe_write fd per subprocess.py.\n\
\n\
Returns: the child process's PID.\n\
\n\
Raises: Only on an error in the parent process.\n\
");
/* module level code ********************************************************/
PyDoc_STRVAR(module_doc,
"A POSIX helper for the subprocess module.");
static PyMethodDef module_methods[] = {
{"fork_exec", subprocess_fork_exec, METH_VARARGS, subprocess_fork_exec_doc},
SUBPROCESS_FORK_EXEC_METHODDEF
{NULL, NULL} /* sentinel */
};

View file

@ -1482,6 +1482,7 @@ static PyObject *
run_in_subinterp_with_config(PyObject *self, PyObject *args, PyObject *kwargs)
{
const char *code;
int use_main_obmalloc = -1;
int allow_fork = -1;
int allow_exec = -1;
int allow_threads = -1;
@ -1493,6 +1494,7 @@ run_in_subinterp_with_config(PyObject *self, PyObject *args, PyObject *kwargs)
PyCompilerFlags cflags = {0};
static char *kwlist[] = {"code",
"use_main_obmalloc",
"allow_fork",
"allow_exec",
"allow_threads",
@ -1500,12 +1502,17 @@ run_in_subinterp_with_config(PyObject *self, PyObject *args, PyObject *kwargs)
"check_multi_interp_extensions",
NULL};
if (!PyArg_ParseTupleAndKeywords(args, kwargs,
"s$ppppp:run_in_subinterp_with_config", kwlist,
&code, &allow_fork, &allow_exec,
"s$pppppp:run_in_subinterp_with_config", kwlist,
&code, &use_main_obmalloc,
&allow_fork, &allow_exec,
&allow_threads, &allow_daemon_threads,
&check_multi_interp_extensions)) {
return NULL;
}
if (use_main_obmalloc < 0) {
PyErr_SetString(PyExc_ValueError, "missing use_main_obmalloc");
return NULL;
}
if (allow_fork < 0) {
PyErr_SetString(PyExc_ValueError, "missing allow_fork");
return NULL;
@ -1532,6 +1539,7 @@ run_in_subinterp_with_config(PyObject *self, PyObject *args, PyObject *kwargs)
PyThreadState_Swap(NULL);
const _PyInterpreterConfig config = {
.use_main_obmalloc = use_main_obmalloc,
.allow_fork = allow_fork,
.allow_exec = allow_exec,
.allow_threads = allow_threads,
@ -2733,6 +2741,18 @@ type_get_version(PyObject *self, PyObject *type)
}
static PyObject *
type_assign_version(PyObject *self, PyObject *type)
{
if (!PyType_Check(type)) {
PyErr_SetString(PyExc_TypeError, "argument must be a type");
return NULL;
}
int res = PyUnstable_Type_AssignVersionTag((PyTypeObject *)type);
return PyLong_FromLong(res);
}
// Test PyThreadState C API
static PyObject *
test_tstate_capi(PyObject *self, PyObject *Py_UNUSED(args))
@ -3530,6 +3550,7 @@ static PyMethodDef TestMethods[] = {
{"test_py_is_macros", test_py_is_macros, METH_NOARGS},
{"test_py_is_funcs", test_py_is_funcs, METH_NOARGS},
{"type_get_version", type_get_version, METH_O, PyDoc_STR("type->tp_version_tag")},
{"type_assign_version", type_assign_version, METH_O, PyDoc_STR("PyUnstable_Type_AssignVersionTag")},
{"test_tstate_capi", test_tstate_capi, METH_NOARGS, NULL},
{"frame_getlocals", frame_getlocals, METH_O, NULL},
{"frame_getglobals", frame_getglobals, METH_O, NULL},

View file

@ -946,7 +946,7 @@ local_setattro(localobject *self, PyObject *name, PyObject *v)
}
if (r == 1) {
PyErr_Format(PyExc_AttributeError,
"'%.50s' object attribute '%U' is read-only",
"'%.100s' object attribute '%U' is read-only",
Py_TYPE(self)->tp_name, name);
return -1;
}

162
Modules/clinic/_posixsubprocess.c.h generated Normal file
View file

@ -0,0 +1,162 @@
/*[clinic input]
preserve
[clinic start generated code]*/
#if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
# include "pycore_gc.h" // PyGC_Head
# include "pycore_runtime.h" // _Py_ID()
#endif
PyDoc_STRVAR(subprocess_fork_exec__doc__,
"fork_exec($module, args, executable_list, close_fds, pass_fds, cwd,\n"
" env, p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite,\n"
" errpipe_read, errpipe_write, restore_signals, call_setsid,\n"
" pgid_to_set, gid, extra_groups, uid, child_umask, preexec_fn,\n"
" allow_vfork, /)\n"
"--\n"
"\n"
"Spawn a fresh new child process.\n"
"\n"
"Fork a child process, close parent file descriptors as appropriate in the\n"
"child and duplicate the few that are needed before calling exec() in the\n"
"child process.\n"
"\n"
"If close_fds is True, close file descriptors 3 and higher, except those listed\n"
"in the sorted tuple pass_fds.\n"
"\n"
"The preexec_fn, if supplied, will be called immediately before closing file\n"
"descriptors and exec.\n"
"\n"
"WARNING: preexec_fn is NOT SAFE if your application uses threads.\n"
" It may trigger infrequent, difficult to debug deadlocks.\n"
"\n"
"If an error occurs in the child process before the exec, it is\n"
"serialized and written to the errpipe_write fd per subprocess.py.\n"
"\n"
"Returns: the child process\'s PID.\n"
"\n"
"Raises: Only on an error in the parent process.");
#define SUBPROCESS_FORK_EXEC_METHODDEF \
{"fork_exec", _PyCFunction_CAST(subprocess_fork_exec), METH_FASTCALL, subprocess_fork_exec__doc__},
static PyObject *
subprocess_fork_exec_impl(PyObject *module, PyObject *process_args,
PyObject *executable_list, int close_fds,
PyObject *py_fds_to_keep, PyObject *cwd_obj,
PyObject *env_list, int p2cread, int p2cwrite,
int c2pread, int c2pwrite, int errread,
int errwrite, int errpipe_read, int errpipe_write,
int restore_signals, int call_setsid,
pid_t pgid_to_set, PyObject *gid_object,
PyObject *extra_groups_packed,
PyObject *uid_object, int child_umask,
PyObject *preexec_fn, int allow_vfork);
static PyObject *
subprocess_fork_exec(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
{
PyObject *return_value = NULL;
PyObject *process_args;
PyObject *executable_list;
int close_fds;
PyObject *py_fds_to_keep;
PyObject *cwd_obj;
PyObject *env_list;
int p2cread;
int p2cwrite;
int c2pread;
int c2pwrite;
int errread;
int errwrite;
int errpipe_read;
int errpipe_write;
int restore_signals;
int call_setsid;
pid_t pgid_to_set;
PyObject *gid_object;
PyObject *extra_groups_packed;
PyObject *uid_object;
int child_umask;
PyObject *preexec_fn;
int allow_vfork;
if (!_PyArg_CheckPositional("fork_exec", nargs, 23, 23)) {
goto exit;
}
process_args = args[0];
executable_list = args[1];
close_fds = PyObject_IsTrue(args[2]);
if (close_fds < 0) {
goto exit;
}
if (!PyTuple_Check(args[3])) {
_PyArg_BadArgument("fork_exec", "argument 4", "tuple", args[3]);
goto exit;
}
py_fds_to_keep = args[3];
cwd_obj = args[4];
env_list = args[5];
p2cread = _PyLong_AsInt(args[6]);
if (p2cread == -1 && PyErr_Occurred()) {
goto exit;
}
p2cwrite = _PyLong_AsInt(args[7]);
if (p2cwrite == -1 && PyErr_Occurred()) {
goto exit;
}
c2pread = _PyLong_AsInt(args[8]);
if (c2pread == -1 && PyErr_Occurred()) {
goto exit;
}
c2pwrite = _PyLong_AsInt(args[9]);
if (c2pwrite == -1 && PyErr_Occurred()) {
goto exit;
}
errread = _PyLong_AsInt(args[10]);
if (errread == -1 && PyErr_Occurred()) {
goto exit;
}
errwrite = _PyLong_AsInt(args[11]);
if (errwrite == -1 && PyErr_Occurred()) {
goto exit;
}
errpipe_read = _PyLong_AsInt(args[12]);
if (errpipe_read == -1 && PyErr_Occurred()) {
goto exit;
}
errpipe_write = _PyLong_AsInt(args[13]);
if (errpipe_write == -1 && PyErr_Occurred()) {
goto exit;
}
restore_signals = PyObject_IsTrue(args[14]);
if (restore_signals < 0) {
goto exit;
}
call_setsid = PyObject_IsTrue(args[15]);
if (call_setsid < 0) {
goto exit;
}
pgid_to_set = PyLong_AsPid(args[16]);
if (pgid_to_set == -1 && PyErr_Occurred()) {
goto exit;
}
gid_object = args[17];
extra_groups_packed = args[18];
uid_object = args[19];
child_umask = _PyLong_AsInt(args[20]);
if (child_umask == -1 && PyErr_Occurred()) {
goto exit;
}
preexec_fn = args[21];
allow_vfork = PyObject_IsTrue(args[22]);
if (allow_vfork < 0) {
goto exit;
}
return_value = subprocess_fork_exec_impl(module, process_args, executable_list, close_fds, py_fds_to_keep, cwd_obj, env_list, p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite, errpipe_read, errpipe_write, restore_signals, call_setsid, pgid_to_set, gid_object, extra_groups_packed, uid_object, child_umask, preexec_fn, allow_vfork);
exit:
return return_value;
}
/*[clinic end generated code: output=46d71e86845c93d7 input=a9049054013a1b77]*/

View file

@ -145,7 +145,7 @@ _PyDebug_PrintTotalRefs(void) {
_PyRuntimeState *runtime = &_PyRuntime;
fprintf(stderr,
"[%zd refs, %zd blocks]\n",
get_global_reftotal(runtime), _Py_GetAllocatedBlocks());
get_global_reftotal(runtime), _Py_GetGlobalAllocatedBlocks());
/* It may be helpful to also print the "legacy" reftotal separately.
Likewise for the total for each interpreter. */
}
@ -1033,7 +1033,7 @@ PyObject_GetAttr(PyObject *v, PyObject *name)
}
else {
PyErr_Format(PyExc_AttributeError,
"'%.50s' object has no attribute '%U'",
"'%.100s' object has no attribute '%U'",
tp->tp_name, name);
}
@ -1353,7 +1353,7 @@ _PyObject_GetMethod(PyObject *obj, PyObject *name, PyObject **method)
}
PyErr_Format(PyExc_AttributeError,
"'%.50s' object has no attribute '%U'",
"'%.100s' object has no attribute '%U'",
tp->tp_name, name);
set_attribute_error_context(obj, name);
@ -1474,7 +1474,7 @@ _PyObject_GenericGetAttrWithDict(PyObject *obj, PyObject *name,
if (!suppress) {
PyErr_Format(PyExc_AttributeError,
"'%.50s' object has no attribute '%U'",
"'%.100s' object has no attribute '%U'",
tp->tp_name, name);
set_attribute_error_context(obj, name);
@ -1545,7 +1545,7 @@ _PyObject_GenericSetAttrWithDict(PyObject *obj, PyObject *name,
}
else {
PyErr_Format(PyExc_AttributeError,
"'%.50s' object attribute '%U' is read-only",
"'%.100s' object attribute '%U' is read-only",
tp->tp_name, name);
}
goto done;

View file

@ -725,20 +725,51 @@ PyObject_Free(void *ptr)
static int running_on_valgrind = -1;
#endif
typedef struct _obmalloc_state OMState;
#define allarenas (_PyRuntime.obmalloc.mgmt.arenas)
#define maxarenas (_PyRuntime.obmalloc.mgmt.maxarenas)
#define unused_arena_objects (_PyRuntime.obmalloc.mgmt.unused_arena_objects)
#define usable_arenas (_PyRuntime.obmalloc.mgmt.usable_arenas)
#define nfp2lasta (_PyRuntime.obmalloc.mgmt.nfp2lasta)
#define narenas_currently_allocated (_PyRuntime.obmalloc.mgmt.narenas_currently_allocated)
#define ntimes_arena_allocated (_PyRuntime.obmalloc.mgmt.ntimes_arena_allocated)
#define narenas_highwater (_PyRuntime.obmalloc.mgmt.narenas_highwater)
#define raw_allocated_blocks (_PyRuntime.obmalloc.mgmt.raw_allocated_blocks)
static inline int
has_own_state(PyInterpreterState *interp)
{
return (_Py_IsMainInterpreter(interp) ||
!(interp->feature_flags & Py_RTFLAGS_USE_MAIN_OBMALLOC) ||
_Py_IsMainInterpreterFinalizing(interp));
}
static inline OMState *
get_state(void)
{
PyInterpreterState *interp = _PyInterpreterState_GET();
if (!has_own_state(interp)) {
interp = _PyInterpreterState_Main();
}
return &interp->obmalloc;
}
// These macros all rely on a local "state" variable.
#define usedpools (state->pools.used)
#define allarenas (state->mgmt.arenas)
#define maxarenas (state->mgmt.maxarenas)
#define unused_arena_objects (state->mgmt.unused_arena_objects)
#define usable_arenas (state->mgmt.usable_arenas)
#define nfp2lasta (state->mgmt.nfp2lasta)
#define narenas_currently_allocated (state->mgmt.narenas_currently_allocated)
#define ntimes_arena_allocated (state->mgmt.ntimes_arena_allocated)
#define narenas_highwater (state->mgmt.narenas_highwater)
#define raw_allocated_blocks (state->mgmt.raw_allocated_blocks)
Py_ssize_t
_Py_GetAllocatedBlocks(void)
_PyInterpreterState_GetAllocatedBlocks(PyInterpreterState *interp)
{
#ifdef Py_DEBUG
assert(has_own_state(interp));
#else
if (!has_own_state(interp)) {
_Py_FatalErrorFunc(__func__,
"the interpreter doesn't have its own allocator");
}
#endif
OMState *state = &interp->obmalloc;
Py_ssize_t n = raw_allocated_blocks;
/* add up allocated blocks for used pools */
for (uint i = 0; i < maxarenas; ++i) {
@ -759,20 +790,100 @@ _Py_GetAllocatedBlocks(void)
return n;
}
void
_PyInterpreterState_FinalizeAllocatedBlocks(PyInterpreterState *interp)
{
if (has_own_state(interp)) {
Py_ssize_t leaked = _PyInterpreterState_GetAllocatedBlocks(interp);
assert(has_own_state(interp) || leaked == 0);
interp->runtime->obmalloc.interpreter_leaks += leaked;
}
}
static Py_ssize_t get_num_global_allocated_blocks(_PyRuntimeState *);
/* We preserve the number of blockss leaked during runtime finalization,
so they can be reported if the runtime is initialized again. */
// XXX We don't lose any information by dropping this,
// so we should consider doing so.
static Py_ssize_t last_final_leaks = 0;
void
_Py_FinalizeAllocatedBlocks(_PyRuntimeState *runtime)
{
last_final_leaks = get_num_global_allocated_blocks(runtime);
runtime->obmalloc.interpreter_leaks = 0;
}
static Py_ssize_t
get_num_global_allocated_blocks(_PyRuntimeState *runtime)
{
Py_ssize_t total = 0;
if (_PyRuntimeState_GetFinalizing(runtime) != NULL) {
PyInterpreterState *interp = _PyInterpreterState_Main();
if (interp == NULL) {
/* We are at the very end of runtime finalization.
We can't rely on finalizing->interp since that thread
state is probably already freed, so we don't worry
about it. */
assert(PyInterpreterState_Head() == NULL);
}
else {
assert(interp != NULL);
/* It is probably the last interpreter but not necessarily. */
assert(PyInterpreterState_Next(interp) == NULL);
total += _PyInterpreterState_GetAllocatedBlocks(interp);
}
}
else {
HEAD_LOCK(runtime);
PyInterpreterState *interp = PyInterpreterState_Head();
assert(interp != NULL);
#ifdef Py_DEBUG
int got_main = 0;
#endif
for (; interp != NULL; interp = PyInterpreterState_Next(interp)) {
#ifdef Py_DEBUG
if (_Py_IsMainInterpreter(interp)) {
assert(!got_main);
got_main = 1;
assert(has_own_state(interp));
}
#endif
if (has_own_state(interp)) {
total += _PyInterpreterState_GetAllocatedBlocks(interp);
}
}
HEAD_UNLOCK(runtime);
#ifdef Py_DEBUG
assert(got_main);
#endif
}
total += runtime->obmalloc.interpreter_leaks;
total += last_final_leaks;
return total;
}
Py_ssize_t
_Py_GetGlobalAllocatedBlocks(void)
{
return get_num_global_allocated_blocks(&_PyRuntime);
}
#if WITH_PYMALLOC_RADIX_TREE
/*==========================================================================*/
/* radix tree for tracking arena usage. */
#define arena_map_root (_PyRuntime.obmalloc.usage.arena_map_root)
#define arena_map_root (state->usage.arena_map_root)
#ifdef USE_INTERIOR_NODES
#define arena_map_mid_count (_PyRuntime.obmalloc.usage.arena_map_mid_count)
#define arena_map_bot_count (_PyRuntime.obmalloc.usage.arena_map_bot_count)
#define arena_map_mid_count (state->usage.arena_map_mid_count)
#define arena_map_bot_count (state->usage.arena_map_bot_count)
#endif
/* Return a pointer to a bottom tree node, return NULL if it doesn't exist or
* it cannot be created */
static Py_ALWAYS_INLINE arena_map_bot_t *
arena_map_get(pymem_block *p, int create)
arena_map_get(OMState *state, pymem_block *p, int create)
{
#ifdef USE_INTERIOR_NODES
/* sanity check that IGNORE_BITS is correct */
@ -833,11 +944,12 @@ arena_map_get(pymem_block *p, int create)
/* mark or unmark addresses covered by arena */
static int
arena_map_mark_used(uintptr_t arena_base, int is_used)
arena_map_mark_used(OMState *state, uintptr_t arena_base, int is_used)
{
/* sanity check that IGNORE_BITS is correct */
assert(HIGH_BITS(arena_base) == HIGH_BITS(&arena_map_root));
arena_map_bot_t *n_hi = arena_map_get((pymem_block *)arena_base, is_used);
arena_map_bot_t *n_hi = arena_map_get(
state, (pymem_block *)arena_base, is_used);
if (n_hi == NULL) {
assert(is_used); /* otherwise node should already exist */
return 0; /* failed to allocate space for node */
@ -862,7 +974,8 @@ arena_map_mark_used(uintptr_t arena_base, int is_used)
* must overflow to 0. However, that would mean arena_base was
* "ideal" and we should not be in this case. */
assert(arena_base < arena_base_next);
arena_map_bot_t *n_lo = arena_map_get((pymem_block *)arena_base_next, is_used);
arena_map_bot_t *n_lo = arena_map_get(
state, (pymem_block *)arena_base_next, is_used);
if (n_lo == NULL) {
assert(is_used); /* otherwise should already exist */
n_hi->arenas[i3].tail_hi = 0;
@ -877,9 +990,9 @@ arena_map_mark_used(uintptr_t arena_base, int is_used)
/* Return true if 'p' is a pointer inside an obmalloc arena.
* _PyObject_Free() calls this so it needs to be very fast. */
static int
arena_map_is_used(pymem_block *p)
arena_map_is_used(OMState *state, pymem_block *p)
{
arena_map_bot_t *n = arena_map_get(p, 0);
arena_map_bot_t *n = arena_map_get(state, p, 0);
if (n == NULL) {
return 0;
}
@ -902,7 +1015,7 @@ arena_map_is_used(pymem_block *p)
* `usable_arenas` to the return value.
*/
static struct arena_object*
new_arena(void)
new_arena(OMState *state)
{
struct arena_object* arenaobj;
uint excess; /* number of bytes above pool alignment */
@ -968,7 +1081,7 @@ new_arena(void)
address = _PyObject_Arena.alloc(_PyObject_Arena.ctx, ARENA_SIZE);
#if WITH_PYMALLOC_RADIX_TREE
if (address != NULL) {
if (!arena_map_mark_used((uintptr_t)address, 1)) {
if (!arena_map_mark_used(state, (uintptr_t)address, 1)) {
/* marking arena in radix tree failed, abort */
_PyObject_Arena.free(_PyObject_Arena.ctx, address, ARENA_SIZE);
address = NULL;
@ -1011,9 +1124,9 @@ new_arena(void)
pymalloc. When the radix tree is used, 'poolp' is unused.
*/
static bool
address_in_range(void *p, poolp Py_UNUSED(pool))
address_in_range(OMState *state, void *p, poolp Py_UNUSED(pool))
{
return arena_map_is_used(p);
return arena_map_is_used(state, p);
}
#else
/*
@ -1094,7 +1207,7 @@ extremely desirable that it be this fast.
static bool _Py_NO_SANITIZE_ADDRESS
_Py_NO_SANITIZE_THREAD
_Py_NO_SANITIZE_MEMORY
address_in_range(void *p, poolp pool)
address_in_range(OMState *state, void *p, poolp pool)
{
// Since address_in_range may be reading from memory which was not allocated
// by Python, it is important that pool->arenaindex is read only once, as
@ -1111,8 +1224,6 @@ address_in_range(void *p, poolp pool)
/*==========================================================================*/
#define usedpools (_PyRuntime.obmalloc.pools.used)
// Called when freelist is exhausted. Extend the freelist if there is
// space for a block. Otherwise, remove this pool from usedpools.
static void
@ -1138,7 +1249,7 @@ pymalloc_pool_extend(poolp pool, uint size)
* This function takes new pool and allocate a block from it.
*/
static void*
allocate_from_new_pool(uint size)
allocate_from_new_pool(OMState *state, uint size)
{
/* There isn't a pool of the right size class immediately
* available: use a free pool.
@ -1150,7 +1261,7 @@ allocate_from_new_pool(uint size)
return NULL;
}
#endif
usable_arenas = new_arena();
usable_arenas = new_arena(state);
if (usable_arenas == NULL) {
return NULL;
}
@ -1274,7 +1385,7 @@ allocate_from_new_pool(uint size)
or when the max memory limit has been reached.
*/
static inline void*
pymalloc_alloc(void *Py_UNUSED(ctx), size_t nbytes)
pymalloc_alloc(OMState *state, void *Py_UNUSED(ctx), size_t nbytes)
{
#ifdef WITH_VALGRIND
if (UNLIKELY(running_on_valgrind == -1)) {
@ -1314,7 +1425,7 @@ pymalloc_alloc(void *Py_UNUSED(ctx), size_t nbytes)
/* There isn't a pool of the right size class immediately
* available: use a free pool.
*/
bp = allocate_from_new_pool(size);
bp = allocate_from_new_pool(state, size);
}
return (void *)bp;
@ -1324,7 +1435,8 @@ pymalloc_alloc(void *Py_UNUSED(ctx), size_t nbytes)
void *
_PyObject_Malloc(void *ctx, size_t nbytes)
{
void* ptr = pymalloc_alloc(ctx, nbytes);
OMState *state = get_state();
void* ptr = pymalloc_alloc(state, ctx, nbytes);
if (LIKELY(ptr != NULL)) {
return ptr;
}
@ -1343,7 +1455,8 @@ _PyObject_Calloc(void *ctx, size_t nelem, size_t elsize)
assert(elsize == 0 || nelem <= (size_t)PY_SSIZE_T_MAX / elsize);
size_t nbytes = nelem * elsize;
void* ptr = pymalloc_alloc(ctx, nbytes);
OMState *state = get_state();
void* ptr = pymalloc_alloc(state, ctx, nbytes);
if (LIKELY(ptr != NULL)) {
memset(ptr, 0, nbytes);
return ptr;
@ -1358,7 +1471,7 @@ _PyObject_Calloc(void *ctx, size_t nelem, size_t elsize)
static void
insert_to_usedpool(poolp pool)
insert_to_usedpool(OMState *state, poolp pool)
{
assert(pool->ref.count > 0); /* else the pool is empty */
@ -1374,7 +1487,7 @@ insert_to_usedpool(poolp pool)
}
static void
insert_to_freepool(poolp pool)
insert_to_freepool(OMState *state, poolp pool)
{
poolp next = pool->nextpool;
poolp prev = pool->prevpool;
@ -1457,7 +1570,7 @@ insert_to_freepool(poolp pool)
#if WITH_PYMALLOC_RADIX_TREE
/* mark arena region as not under control of obmalloc */
arena_map_mark_used(ao->address, 0);
arena_map_mark_used(state, ao->address, 0);
#endif
/* Free the entire arena. */
@ -1544,7 +1657,7 @@ insert_to_freepool(poolp pool)
Return 1 if it was freed.
Return 0 if the block was not allocated by pymalloc_alloc(). */
static inline int
pymalloc_free(void *Py_UNUSED(ctx), void *p)
pymalloc_free(OMState *state, void *Py_UNUSED(ctx), void *p)
{
assert(p != NULL);
@ -1555,7 +1668,7 @@ pymalloc_free(void *Py_UNUSED(ctx), void *p)
#endif
poolp pool = POOL_ADDR(p);
if (UNLIKELY(!address_in_range(p, pool))) {
if (UNLIKELY(!address_in_range(state, p, pool))) {
return 0;
}
/* We allocated this address. */
@ -1579,7 +1692,7 @@ pymalloc_free(void *Py_UNUSED(ctx), void *p)
* targets optimal filling when several pools contain
* blocks of the same size class.
*/
insert_to_usedpool(pool);
insert_to_usedpool(state, pool);
return 1;
}
@ -1596,7 +1709,7 @@ pymalloc_free(void *Py_UNUSED(ctx), void *p)
* previously freed pools will be allocated later
* (being not referenced, they are perhaps paged out).
*/
insert_to_freepool(pool);
insert_to_freepool(state, pool);
return 1;
}
@ -1609,7 +1722,8 @@ _PyObject_Free(void *ctx, void *p)
return;
}
if (UNLIKELY(!pymalloc_free(ctx, p))) {
OMState *state = get_state();
if (UNLIKELY(!pymalloc_free(state, ctx, p))) {
/* pymalloc didn't allocate this address */
PyMem_RawFree(p);
raw_allocated_blocks--;
@ -1627,7 +1741,8 @@ _PyObject_Free(void *ctx, void *p)
Return 0 if pymalloc didn't allocated p. */
static int
pymalloc_realloc(void *ctx, void **newptr_p, void *p, size_t nbytes)
pymalloc_realloc(OMState *state, void *ctx,
void **newptr_p, void *p, size_t nbytes)
{
void *bp;
poolp pool;
@ -1643,7 +1758,7 @@ pymalloc_realloc(void *ctx, void **newptr_p, void *p, size_t nbytes)
#endif
pool = POOL_ADDR(p);
if (!address_in_range(p, pool)) {
if (!address_in_range(state, p, pool)) {
/* pymalloc is not managing this block.
If nbytes <= SMALL_REQUEST_THRESHOLD, it's tempting to try to take
@ -1696,7 +1811,8 @@ _PyObject_Realloc(void *ctx, void *ptr, size_t nbytes)
return _PyObject_Malloc(ctx, nbytes);
}
if (pymalloc_realloc(ctx, &ptr2, ptr, nbytes)) {
OMState *state = get_state();
if (pymalloc_realloc(state, ctx, &ptr2, ptr, nbytes)) {
return ptr2;
}
@ -1710,11 +1826,29 @@ _PyObject_Realloc(void *ctx, void *ptr, size_t nbytes)
* only be used by extensions that are compiled with pymalloc enabled. */
Py_ssize_t
_Py_GetAllocatedBlocks(void)
_PyInterpreterState_GetAllocatedBlocks(PyInterpreterState *Py_UNUSED(interp))
{
return 0;
}
Py_ssize_t
_Py_GetGlobalAllocatedBlocks(void)
{
return 0;
}
void
_PyInterpreterState_FinalizeAllocatedBlocks(PyInterpreterState *Py_UNUSED(interp))
{
return;
}
void
_Py_FinalizeAllocatedBlocks(_PyRuntimeState *Py_UNUSED(runtime))
{
return;
}
#endif /* WITH_PYMALLOC */
@ -2289,6 +2423,7 @@ _PyObject_DebugMallocStats(FILE *out)
if (!_PyMem_PymallocEnabled()) {
return 0;
}
OMState *state = get_state();
uint i;
const uint numclasses = SMALL_REQUEST_THRESHOLD >> ALIGNMENT_SHIFT;

View file

@ -45,7 +45,9 @@ class object "PyObject *" "&PyBaseObject_Type"
PyUnicode_IS_READY(name) && \
(PyUnicode_GET_LENGTH(name) <= MCACHE_MAX_ATTR_SIZE)
#define next_version_tag (_PyRuntime.types.next_version_tag)
#define NEXT_GLOBAL_VERSION_TAG _PyRuntime.types.next_version_tag
#define NEXT_VERSION_TAG(interp) \
(interp)->types.next_version_tag
typedef struct PySlot_Offset {
short subslot_offset;
@ -332,7 +334,7 @@ _PyType_ClearCache(PyInterpreterState *interp)
// use Py_SETREF() rather than using slower Py_XSETREF().
type_cache_clear(cache, Py_None);
return next_version_tag - 1;
return NEXT_VERSION_TAG(interp) - 1;
}
@ -401,7 +403,7 @@ PyType_ClearWatcher(int watcher_id)
return 0;
}
static int assign_version_tag(PyTypeObject *type);
static int assign_version_tag(PyInterpreterState *interp, PyTypeObject *type);
int
PyType_Watch(int watcher_id, PyObject* obj)
@ -416,7 +418,7 @@ PyType_Watch(int watcher_id, PyObject* obj)
return -1;
}
// ensure we will get a callback on the next modification
assign_version_tag(type);
assign_version_tag(interp, type);
type->tp_watched |= (1 << watcher_id);
return 0;
}
@ -549,7 +551,9 @@ type_mro_modified(PyTypeObject *type, PyObject *bases) {
}
}
return;
clear:
assert(!(type->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN));
type->tp_flags &= ~Py_TPFLAGS_VALID_VERSION_TAG;
type->tp_version_tag = 0; /* 0 is not a valid version tag */
if (PyType_HasFeature(type, Py_TPFLAGS_HEAPTYPE)) {
@ -560,7 +564,7 @@ type_mro_modified(PyTypeObject *type, PyObject *bases) {
}
static int
assign_version_tag(PyTypeObject *type)
assign_version_tag(PyInterpreterState *interp, PyTypeObject *type)
{
/* Ensure that the tp_version_tag is valid and set
Py_TPFLAGS_VALID_VERSION_TAG. To respect the invariant, this
@ -574,24 +578,42 @@ assign_version_tag(PyTypeObject *type)
return 0;
}
if (next_version_tag == 0) {
/* We have run out of version numbers */
return 0;
if (type->tp_flags & Py_TPFLAGS_IMMUTABLETYPE) {
/* static types */
if (NEXT_GLOBAL_VERSION_TAG > _Py_MAX_GLOBAL_TYPE_VERSION_TAG) {
/* We have run out of version numbers */
return 0;
}
type->tp_version_tag = NEXT_GLOBAL_VERSION_TAG++;
assert (type->tp_version_tag <= _Py_MAX_GLOBAL_TYPE_VERSION_TAG);
}
else {
/* heap types */
if (NEXT_VERSION_TAG(interp) == 0) {
/* We have run out of version numbers */
return 0;
}
type->tp_version_tag = NEXT_VERSION_TAG(interp)++;
assert (type->tp_version_tag != 0);
}
type->tp_version_tag = next_version_tag++;
assert (type->tp_version_tag != 0);
PyObject *bases = type->tp_bases;
Py_ssize_t n = PyTuple_GET_SIZE(bases);
for (Py_ssize_t i = 0; i < n; i++) {
PyObject *b = PyTuple_GET_ITEM(bases, i);
if (!assign_version_tag(_PyType_CAST(b)))
if (!assign_version_tag(interp, _PyType_CAST(b)))
return 0;
}
type->tp_flags |= Py_TPFLAGS_VALID_VERSION_TAG;
return 1;
}
int PyUnstable_Type_AssignVersionTag(PyTypeObject *type)
{
PyInterpreterState *interp = _PyInterpreterState_GET();
return assign_version_tag(interp, type);
}
static PyMemberDef type_members[] = {
{"__basicsize__", T_PYSSIZET, offsetof(PyTypeObject,tp_basicsize),READONLY},
@ -2341,7 +2363,15 @@ mro_internal(PyTypeObject *type, PyObject **p_old_mro)
from the custom MRO */
type_mro_modified(type, type->tp_bases);
PyType_Modified(type);
// XXX Expand this to Py_TPFLAGS_IMMUTABLETYPE?
if (!(type->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN)) {
PyType_Modified(type);
}
else {
/* For static builtin types, this is only called during init
before the method cache has been populated. */
assert(_PyType_HasFeature(type, Py_TPFLAGS_VALID_VERSION_TAG));
}
if (p_old_mro != NULL)
*p_old_mro = old_mro; /* transfer the ownership */
@ -4176,6 +4206,7 @@ _PyType_Lookup(PyTypeObject *type, PyObject *name)
{
PyObject *res;
int error;
PyInterpreterState *interp = _PyInterpreterState_GET();
unsigned int h = MCACHE_HASH_METHOD(type, name);
struct type_cache *cache = get_type_cache();
@ -4210,7 +4241,7 @@ _PyType_Lookup(PyTypeObject *type, PyObject *name)
return NULL;
}
if (MCACHE_CACHEABLE_NAME(name) && assign_version_tag(type)) {
if (MCACHE_CACHEABLE_NAME(name) && assign_version_tag(interp, type)) {
h = MCACHE_HASH_METHOD(type, name);
struct type_cache_entry *entry = &cache->hashtable[h];
entry->version = type->tp_version_tag;
@ -4328,7 +4359,7 @@ _Py_type_getattro_impl(PyTypeObject *type, PyObject *name, int * suppress_missin
/* Give up */
if (suppress_missing_attribute == NULL) {
PyErr_Format(PyExc_AttributeError,
"type object '%.50s' has no attribute '%U'",
"type object '%.100s' has no attribute '%U'",
type->tp_name, name);
} else {
// signal the caller we have not set an PyExc_AttributeError and gave up
@ -6671,8 +6702,11 @@ type_ready_mro(PyTypeObject *type)
assert(type->tp_mro != NULL);
assert(PyTuple_Check(type->tp_mro));
/* All bases of statically allocated type should be statically allocated */
/* All bases of statically allocated type should be statically allocated,
and static builtin types must have static builtin bases. */
if (!(type->tp_flags & Py_TPFLAGS_HEAPTYPE)) {
assert(type->tp_flags & Py_TPFLAGS_IMMUTABLETYPE);
int isbuiltin = type->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN;
PyObject *mro = type->tp_mro;
Py_ssize_t n = PyTuple_GET_SIZE(mro);
for (Py_ssize_t i = 0; i < n; i++) {
@ -6684,6 +6718,7 @@ type_ready_mro(PyTypeObject *type)
type->tp_name, base->tp_name);
return -1;
}
assert(!isbuiltin || (base->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN));
}
}
return 0;
@ -6995,7 +7030,11 @@ PyType_Ready(PyTypeObject *type)
int
_PyStaticType_InitBuiltin(PyTypeObject *self)
{
self->tp_flags = self->tp_flags | _Py_TPFLAGS_STATIC_BUILTIN;
self->tp_flags |= _Py_TPFLAGS_STATIC_BUILTIN;
assert(NEXT_GLOBAL_VERSION_TAG <= _Py_MAX_GLOBAL_TYPE_VERSION_TAG);
self->tp_version_tag = NEXT_GLOBAL_VERSION_TAG++;
self->tp_flags |= Py_TPFLAGS_VALID_VERSION_TAG;
static_builtin_state_init(self);
@ -9341,26 +9380,20 @@ super_repr(PyObject *self)
su->type ? su->type->tp_name : "NULL");
}
// if `method` is non-NULL, we are looking for a method descriptor,
// and setting `*method` to 1 means we found one.
static PyObject *
super_getattro(PyObject *self, PyObject *name)
do_super_lookup(superobject *su, PyTypeObject *su_type, PyObject *su_obj,
PyTypeObject *su_obj_type, PyObject *name, int *method)
{
superobject *su = (superobject *)self;
PyTypeObject *starttype;
PyObject *mro;
PyObject *mro, *res;
Py_ssize_t i, n;
int temp_su = 0;
starttype = su->obj_type;
if (starttype == NULL)
if (su_obj_type == NULL)
goto skip;
/* We want __class__ to return the class of the super object
(i.e. super, or a subclass), not the class of su->obj. */
if (PyUnicode_Check(name) &&
PyUnicode_GET_LENGTH(name) == 9 &&
_PyUnicode_Equal(name, &_Py_ID(__class__)))
goto skip;
mro = starttype->tp_mro;
mro = su_obj_type->tp_mro;
if (mro == NULL)
goto skip;
@ -9369,14 +9402,14 @@ super_getattro(PyObject *self, PyObject *name)
/* No need to check the last one: it's gonna be skipped anyway. */
for (i = 0; i+1 < n; i++) {
if ((PyObject *)(su->type) == PyTuple_GET_ITEM(mro, i))
if ((PyObject *)(su_type) == PyTuple_GET_ITEM(mro, i))
break;
}
i++; /* skip su->type (if any) */
if (i >= n)
goto skip;
/* keep a strong reference to mro because starttype->tp_mro can be
/* keep a strong reference to mro because su_obj_type->tp_mro can be
replaced during PyDict_GetItemWithError(dict, name) */
Py_INCREF(mro);
do {
@ -9384,19 +9417,23 @@ super_getattro(PyObject *self, PyObject *name)
PyObject *dict = _PyType_CAST(obj)->tp_dict;
assert(dict != NULL && PyDict_Check(dict));
PyObject *res = PyDict_GetItemWithError(dict, name);
res = PyDict_GetItemWithError(dict, name);
if (res != NULL) {
Py_INCREF(res);
descrgetfunc f = Py_TYPE(res)->tp_descr_get;
if (f != NULL) {
PyObject *res2;
res2 = f(res,
/* Only pass 'obj' param if this is instance-mode super
(See SF ID #743627) */
(su->obj == (PyObject *)starttype) ? NULL : su->obj,
(PyObject *)starttype);
Py_SETREF(res, res2);
if (method && _PyType_HasFeature(Py_TYPE(res), Py_TPFLAGS_METHOD_DESCRIPTOR)) {
*method = 1;
}
else {
descrgetfunc f = Py_TYPE(res)->tp_descr_get;
if (f != NULL) {
PyObject *res2;
res2 = f(res,
/* Only pass 'obj' param if this is instance-mode super
(See SF ID #743627) */
(su_obj == (PyObject *)su_obj_type) ? NULL : su_obj,
(PyObject *)su_obj_type);
Py_SETREF(res, res2);
}
}
Py_DECREF(mro);
@ -9412,7 +9449,34 @@ super_getattro(PyObject *self, PyObject *name)
Py_DECREF(mro);
skip:
return PyObject_GenericGetAttr(self, name);
if (su == NULL) {
PyObject *args[] = {(PyObject *)su_type, su_obj};
su = (superobject *)PyObject_Vectorcall((PyObject *)&PySuper_Type, args, 2, NULL);
if (su == NULL) {
return NULL;
}
temp_su = 1;
}
res = PyObject_GenericGetAttr((PyObject *)su, name);
if (temp_su) {
Py_DECREF(su);
}
return res;
}
static PyObject *
super_getattro(PyObject *self, PyObject *name)
{
superobject *su = (superobject *)self;
/* We want __class__ to return the class of the super object
(i.e. super, or a subclass), not the class of su->obj. */
if (PyUnicode_Check(name) &&
PyUnicode_GET_LENGTH(name) == 9 &&
_PyUnicode_Equal(name, &_Py_ID(__class__)))
return PyObject_GenericGetAttr(self, name);
return do_super_lookup(su, su->type, su->obj, su->obj_type, name, NULL);
}
static PyTypeObject *
@ -9468,6 +9532,18 @@ supercheck(PyTypeObject *type, PyObject *obj)
return NULL;
}
PyObject *
_PySuper_Lookup(PyTypeObject *su_type, PyObject *su_obj, PyObject *name, int *method)
{
PyTypeObject *su_obj_type = supercheck(su_type, su_obj);
if (su_obj_type == NULL) {
return NULL;
}
PyObject *res = do_super_lookup(NULL, su_type, su_obj, su_obj_type, name, method);
Py_DECREF(su_obj_type);
return res;
}
static PyObject *
super_descr_get(PyObject *self, PyObject *obj, PyObject *type)
{

View file

@ -170,10 +170,7 @@ weakref_repr(PyWeakReference *self)
}
Py_INCREF(obj);
if (_PyObject_LookupAttr(obj, &_Py_ID(__name__), &name) < 0) {
Py_DECREF(obj);
return NULL;
}
name = _PyObject_LookupSpecial(obj, &_Py_ID(__name__));
if (name == NULL || !PyUnicode_Check(name)) {
repr = PyUnicode_FromFormat(
"<weakref at %p; to '%s' at %p>",

View file

@ -2481,19 +2481,21 @@ tok_get_fstring_mode(struct tok_state *tok, tokenizer_mode* current_tok, struct
// If we start with a bracket, we defer to the normal mode as there is nothing for us to tokenize
// before it.
int start_char = tok_nextc(tok);
int peek1 = tok_nextc(tok);
tok_backup(tok, peek1);
tok_backup(tok, start_char);
if ((start_char == '{' && peek1 != '{') || (start_char == '}' && peek1 != '}')) {
if (start_char == '{') {
if (start_char == '{') {
int peek1 = tok_nextc(tok);
tok_backup(tok, peek1);
tok_backup(tok, start_char);
if (peek1 != '{') {
current_tok->curly_bracket_expr_start_depth++;
if (current_tok->curly_bracket_expr_start_depth >= MAX_EXPR_NESTING) {
return MAKE_TOKEN(syntaxerror(tok, "f-string: expressions nested too deeply"));
}
TOK_GET_MODE(tok)->kind = TOK_REGULAR_MODE;
return tok_get_normal_mode(tok, current_tok, token);
}
TOK_GET_MODE(tok)->kind = TOK_REGULAR_MODE;
return tok_get_normal_mode(tok, current_tok, token);
}
else {
tok_backup(tok, start_char);
}
// Check if we are at the end of the string

View file

@ -25,6 +25,7 @@
#include "pycore_sliceobject.h" // _PyBuildSlice_ConsumeRefs
#include "pycore_sysmodule.h" // _PySys_Audit()
#include "pycore_tuple.h" // _PyTuple_ITEMS()
#include "pycore_typeobject.h" // _PySuper_Lookup()
#include "pycore_emscripten_signal.h" // _Py_CHECK_EMSCRIPTEN_SIGNALS
#include "pycore_dict.h"
@ -1553,6 +1554,36 @@ dummy_func(
PREDICT(JUMP_BACKWARD);
}
inst(LOAD_SUPER_ATTR, (global_super, class, self -- res2 if (oparg & 1), res)) {
PyObject *name = GETITEM(frame->f_code->co_names, oparg >> 2);
if (global_super == (PyObject *)&PySuper_Type && PyType_Check(class)) {
int method = 0;
Py_DECREF(global_super);
res = _PySuper_Lookup((PyTypeObject *)class, self, name, oparg & 1 ? &method : NULL);
Py_DECREF(class);
if (res == NULL) {
Py_DECREF(self);
ERROR_IF(true, error);
}
// Works with CALL, pushes two values: either `meth | self` or `NULL | meth`.
if (method) {
res2 = res;
res = self; // transfer ownership
} else {
res2 = NULL;
Py_DECREF(self);
}
} else {
PyObject *stack[] = {class, self};
PyObject *super = PyObject_Vectorcall(global_super, stack, oparg & 2, NULL);
DECREF_INPUTS();
ERROR_IF(super == NULL, error);
res = PyObject_GetAttr(super, name);
Py_DECREF(super);
ERROR_IF(res == NULL, error);
}
}
family(load_attr, INLINE_CACHE_ENTRIES_LOAD_ATTR) = {
LOAD_ATTR,
LOAD_ATTR_INSTANCE_VALUE,

Some files were not shown because too many files have changed in this diff Show more