Merge branch 'main' into imaplib-append

This commit is contained in:
Hugo van Kemenade 2025-04-18 15:42:40 +03:00 committed by GitHub
commit c31d47d313
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
3222 changed files with 466606 additions and 212026 deletions

View file

@ -1,11 +1,11 @@
trigger: ['main', '3.12', '3.11', '3.10', '3.9', '3.8', '3.7']
trigger: ['main', '3.*']
jobs:
- job: Prebuild
displayName: Pre-build checks
pool:
vmImage: ubuntu-22.04
vmImage: ubuntu-24.04
steps:
- template: ./prebuild-checks.yml

View file

@ -1,27 +0,0 @@
#!/bin/sh
apt-get update
apt-get -yq install \
build-essential \
zlib1g-dev \
libbz2-dev \
liblzma-dev \
libncurses5-dev \
libreadline6-dev \
libsqlite3-dev \
libssl-dev \
libgdbm-dev \
tk-dev \
lzma \
lzma-dev \
liblzma-dev \
libffi-dev \
uuid-dev \
xvfb
if [ ! -z "$1" ]
then
echo ##vso[task.prependpath]$PWD/multissl/openssl/$1
echo ##vso[task.setvariable variable=OPENSSL_DIR]$PWD/multissl/openssl/$1
python3 Tools/ssl/multissltests.py --steps=library --base-directory $PWD/multissl --openssl $1 --system Linux
fi

View file

@ -1,26 +0,0 @@
steps:
- checkout: self
clean: true
fetchDepth: 5
# Work around a known issue affecting Ubuntu VMs on Pipelines
- script: sudo setfacl -Rb /home/vsts
displayName: 'Workaround ACL issue'
- script: sudo ./.azure-pipelines/posix-deps-apt.sh $(openssl_version)
displayName: 'Install dependencies'
- script: ./configure --with-pydebug
displayName: 'Configure CPython (debug)'
- script: make -j4
displayName: 'Build CPython'
- script: make pythoninfo
displayName: 'Display build info'
- script: |
git fetch origin
./python Tools/patchcheck/patchcheck.py --ci true
displayName: 'Run patchcheck.py'
condition: and(succeeded(), eq(variables['Build.Reason'], 'PullRequest'))

View file

@ -1,28 +0,0 @@
pr: ['main', '3.12', '3.11', '3.10', '3.9', '3.8', '3.7']
jobs:
- job: Prebuild
displayName: Pre-build checks
pool:
vmImage: ubuntu-22.04
steps:
- template: ./prebuild-checks.yml
- job: Ubuntu_Patchcheck
displayName: Ubuntu patchcheck
dependsOn: Prebuild
condition: and(succeeded(), eq(dependencies.Prebuild.outputs['tests.run'], 'true'))
pool:
vmImage: ubuntu-22.04
variables:
testRunTitle: '$(system.pullRequest.TargetBranch)-linux'
testRunPlatform: linux
openssl_version: 1.1.1u
steps:
- template: ./posix-steps.yml

View file

@ -1,29 +0,0 @@
# gh-91960: Job disabled since Python is out of free credit (September 2023):
# https://discuss.python.org/t/freebsd-gets-a-new-cirrus-ci-github-action-job-and-a-new-buildbot/33122/26
freebsd_task:
freebsd_instance:
matrix:
- image: freebsd-13-2-release-amd64
# Turn off TCP and UDP blackhole. It is not enabled by default in FreeBSD,
# but it is in the FreeBSD GCE images as used by Cirrus-CI. It causes even
# local local connections to fail with ETIMEDOUT instead of ECONNREFUSED.
# For more information see https://reviews.freebsd.org/D41751 and
# https://github.com/cirruslabs/cirrus-ci-docs/issues/483.
sysctl_script:
- sysctl net.inet.tcp.blackhole=0
- sysctl net.inet.udp.blackhole=0
configure_script:
- mkdir build
- cd build
- ../configure --with-pydebug
build_script:
- cd build
- make -j$(sysctl -n hw.ncpu)
pythoninfo_script:
- cd build
- make pythoninfo
test_script:
- cd build
# dtrace fails to build on FreeBSD - see gh-73263
- make buildbottest TESTOPTS="-j0 -x test_dtrace --timeout=600"

View file

@ -1,24 +0,0 @@
FROM docker.io/library/fedora:37
ENV CC=clang
ENV WASI_SDK_VERSION=20
ENV WASI_SDK_PATH=/opt/wasi-sdk
ENV WASMTIME_HOME=/opt/wasmtime
ENV WASMTIME_VERSION=14.0.4
ENV WASMTIME_CPU_ARCH=x86_64
RUN dnf -y --nodocs --setopt=install_weak_deps=False install /usr/bin/{blurb,clang,curl,git,ln,tar,xz} 'dnf-command(builddep)' && \
dnf -y --nodocs --setopt=install_weak_deps=False builddep python3 && \
dnf -y clean all
RUN mkdir ${WASI_SDK_PATH} && \
curl --location https://github.com/WebAssembly/wasi-sdk/releases/download/wasi-sdk-${WASI_SDK_VERSION}/wasi-sdk-${WASI_SDK_VERSION}.0-linux.tar.gz | \
tar --strip-components 1 --directory ${WASI_SDK_PATH} --extract --gunzip
RUN mkdir --parents ${WASMTIME_HOME} && \
curl --location "https://github.com/bytecodealliance/wasmtime/releases/download/v${WASMTIME_VERSION}/wasmtime-v${WASMTIME_VERSION}-${WASMTIME_CPU_ARCH}-linux.tar.xz" | \
xz --decompress | \
tar --strip-components 1 --directory ${WASMTIME_HOME} -x && \
ln -s ${WASMTIME_HOME}/wasmtime /usr/local/bin

View file

@ -1,7 +1,5 @@
{
"build": {
"dockerfile": "Dockerfile"
},
"image": "ghcr.io/python/devcontainer:2024.09.25.11038928730",
"onCreateCommand": [
// Install common tooling.
"dnf",

View file

@ -1,6 +1,6 @@
root = true
[*.{py,c,cpp,h,rst,md,yml}]
[*.{py,c,cpp,h,js,rst,md,yml,yaml}]
trim_trailing_whitespace = true
insert_final_newline = true
indent_style = space
@ -11,5 +11,5 @@ indent_size = 4
[*.rst]
indent_size = 3
[*.yml]
[*.{js,yml,yaml}]
indent_size = 2

6
.gitattributes vendored
View file

@ -27,8 +27,6 @@ Lib/test/cjkencodings/* noeol
Lib/test/tokenizedata/coding20731.py noeol
Lib/test/decimaltestdata/*.decTest noeol
Lib/test/test_email/data/*.txt noeol
Lib/test/test_importlib/resources/data01/* noeol
Lib/test/test_importlib/resources/namespacedata01/* noeol
Lib/test/xmltestdata/* noeol
# Shell scripts should have LF even on Windows because of Cygwin
@ -77,11 +75,14 @@ Include/internal/pycore_opcode.h generated
Include/internal/pycore_opcode_metadata.h generated
Include/internal/pycore_*_generated.h generated
Include/internal/pycore_uop_ids.h generated
Include/internal/pycore_uop_metadata.h generated
Include/opcode.h generated
Include/opcode_ids.h generated
Include/token.h generated
Lib/_opcode_metadata.py generated
Lib/keyword.py generated
Lib/test/certdata/*.pem generated
Lib/test/certdata/*.0 generated
Lib/test/levenshtein_examples.json generated
Lib/test/test_stable_abi_ctypes.py generated
Lib/token.py generated
@ -94,6 +95,7 @@ Programs/test_frozenmain.h generated
Python/Python-ast.c generated
Python/executor_cases.c.h generated
Python/generated_cases.c.h generated
Python/optimizer_cases.c.h generated
Python/opcode_targets.h generated
Python/stdlib_module_names.h generated
Tools/peg_generator/pegen/grammar_parser.py generated

190
.github/CODEOWNERS vendored
View file

@ -5,22 +5,28 @@
# https://git-scm.com/docs/gitignore#_pattern_format
# GitHub
.github/** @ezio-melotti @hugovk
.github/** @ezio-melotti @hugovk @AA-Turner
# pre-commit
.pre-commit-config.yaml @hugovk @AlexWaygood
.ruff.toml @hugovk @AlexWaygood
.ruff.toml @hugovk @AlexWaygood @AA-Turner
# Build system
configure* @erlend-aasland @corona10
Makefile.pre.in @erlend-aasland
Modules/Setup* @erlend-aasland
# argparse
**/*argparse* @savannahostrowski
# asyncio
**/*asyncio* @1st1 @asvetlov @gvanrossum @kumaraditya303 @willingc
**/*asyncio* @1st1 @asvetlov @kumaraditya303 @willingc
# Core
**/*context* @1st1
**/*genobject* @markshannon
**/*hamt* @1st1
**/*jit* @brandtbucher @savannahostrowski
Objects/set* @rhettinger
Objects/dict* @methane @markshannon
Objects/typevarobject.c @JelleZijlstra
@ -28,41 +34,94 @@ Objects/type* @markshannon
Objects/codeobject.c @markshannon
Objects/frameobject.c @markshannon
Objects/call.c @markshannon
Python/ceval*.c @markshannon @gvanrossum
Python/ceval*.h @markshannon @gvanrossum
Python/ceval*.c @markshannon
Python/ceval*.h @markshannon
Python/codegen.c @markshannon @iritkatriel
Python/compile.c @markshannon @iritkatriel
Python/assemble.c @markshannon @iritkatriel
Python/flowgraph.c @markshannon @iritkatriel
Python/ast_opt.c @isidentical
Python/bytecodes.c @markshannon @gvanrossum
Python/optimizer*.c @markshannon @gvanrossum
Python/instruction_sequence.c @iritkatriel
Python/bytecodes.c @markshannon
Python/optimizer*.c @markshannon
Python/optimizer_analysis.c @Fidget-Spinner
Python/optimizer_bytecodes.c @Fidget-Spinner
Python/symtable.c @JelleZijlstra @carljm
Lib/_pyrepl/* @pablogsal @lysnikolaou @ambv
Lib/test/test_patma.py @brandtbucher
Lib/test/test_peepholer.py @brandtbucher
Lib/test/test_type_*.py @JelleZijlstra
Lib/test/test_capi/test_misc.py @markshannon @gvanrossum
Lib/test/test_capi/test_misc.py @markshannon
Lib/test/test_pyrepl/* @pablogsal @lysnikolaou @ambv
Tools/c-analyzer/ @ericsnowcurrently
# Exceptions
Lib/traceback.py @iritkatriel
Lib/test/test_except*.py @iritkatriel
Lib/test/test_traceback.py @iritkatriel
Objects/exceptions.c @iritkatriel
Python/traceback.c @iritkatriel
# dbm
**/*dbm* @corona10 @erlend-aasland @serhiy-storchaka
# Hashing
**/*hashlib* @tiran
**/*pyhash* @tiran
**/*sha* @tiran
**/*md5* @tiran
**/*blake* @tiran
/Modules/_blake2/** @tiran
/Modules/_sha3/** @tiran
# Doc/ tools
Doc/conf.py @AA-Turner @hugovk
Doc/Makefile @AA-Turner @hugovk
Doc/make.bat @AA-Turner @hugovk
Doc/requirements.txt @AA-Turner @hugovk
Doc/_static/** @AA-Turner @hugovk
Doc/tools/** @AA-Turner @hugovk
# runtime state/lifecycle
**/*pylifecycle* @ericsnowcurrently
**/*pystate* @ericsnowcurrently
**/*preconfig* @ericsnowcurrently
**/*initconfig* @ericsnowcurrently
**/*pathconfig* @ericsnowcurrently
**/*sysmodule* @ericsnowcurrently
**/*bltinmodule* @ericsnowcurrently
**/*gil* @ericsnowcurrently
Include/internal/pycore_runtime.h @ericsnowcurrently
Include/internal/pycore_interp.h @ericsnowcurrently
Include/internal/pycore_tstate.h @ericsnowcurrently
Include/internal/pycore_*_state.h @ericsnowcurrently
Include/internal/pycore_*_init.h @ericsnowcurrently
Include/internal/pycore_atexit.h @ericsnowcurrently
Include/internal/pycore_freelist.h @ericsnowcurrently
Include/internal/pycore_global_objects.h @ericsnowcurrently
Include/internal/pycore_obmalloc.h @ericsnowcurrently
Include/internal/pycore_pymem.h @ericsnowcurrently
Include/internal/pycore_stackref.h @Fidget-Spinner
Modules/main.c @ericsnowcurrently
Programs/_bootstrap_python.c @ericsnowcurrently
Programs/python.c @ericsnowcurrently
Tools/build/generate_global_objects.py @ericsnowcurrently
# Initialization
Doc/library/sys_path_init.rst @FFY00
Doc/c-api/init_config.rst @FFY00
# getpath
**/*getpath* @FFY00
# site
**/*site.py @FFY00
Doc/library/site.rst @FFY00
# Exceptions
Lib/test/test_except*.py @iritkatriel
Objects/exceptions.c @iritkatriel
# Hashing & cryptographic primitives
**/*hashlib* @gpshead @tiran @picnixz
**/*hashopenssl* @gpshead @tiran @picnixz
**/*pyhash* @gpshead @tiran @picnixz
Modules/*blake* @gpshead @tiran @picnixz
Modules/*md5* @gpshead @tiran @picnixz
Modules/*sha* @gpshead @tiran @picnixz
Modules/_hacl/** @gpshead @picnixz
**/*hmac* @gpshead @picnixz
# libssl
**/*ssl* @gpshead @picnixz
# logging
**/*logging* @vsajip
# venv
**/*venv* @vsajip
**/*venv* @vsajip @FFY00
# Launcher
/PC/launcher.c @vsajip
@ -76,8 +135,21 @@ Python/traceback.c @iritkatriel
# Import (including importlib).
**/*import* @brettcannon @ericsnowcurrently @ncoghlan @warsaw
/Python/import.c @kumaraditya303
**/*importlib/resources/* @jaraco @warsaw @FFY00
**/importlib/metadata/* @jaraco @warsaw
Python/dynload_*.c @ericsnowcurrently
**/*freeze* @ericsnowcurrently
**/*frozen* @ericsnowcurrently
**/*modsupport* @ericsnowcurrently
**/*modulefinder* @ericsnowcurrently
**/*moduleobject* @ericsnowcurrently
**/*multiphase* @ericsnowcurrently
**/*pkgutil* @ericsnowcurrently
**/*pythonrun* @ericsnowcurrently
**/*runpy* @ericsnowcurrently
**/*singlephase* @ericsnowcurrently
Lib/test/test_module/ @ericsnowcurrently
Doc/c-api/module.rst @ericsnowcurrently
**/*importlib/resources/* @jaraco @warsaw @FFY00
**/*importlib/metadata/* @jaraco @warsaw
# Dates and times
**/*datetime* @pganssle @abalkin
@ -108,18 +180,24 @@ Include/internal/pycore_time.h @pganssle @abalkin
/Lib/test/test_tokenize.py @pablogsal @lysnikolaou
# Code generator
/Tools/cases_generator/ @gvanrossum
/Tools/cases_generator/ @markshannon
# AST
Python/ast.c @isidentical
Parser/asdl.py @isidentical
Parser/asdl_c.py @isidentical
Lib/ast.py @isidentical
Python/ast.c @isidentical @JelleZijlstra @eclips4
Python/ast_opt.c @isidentical @eclips4
Parser/asdl.py @isidentical @JelleZijlstra @eclips4
Parser/asdl_c.py @isidentical @JelleZijlstra @eclips4
Lib/ast.py @isidentical @JelleZijlstra @eclips4
Lib/_ast_unparse.py @isidentical @JelleZijlstra @eclips4
Lib/test/test_ast/ @eclips4
# Mock
/Lib/unittest/mock.py @cjw296
/Lib/test/test_unittest/testmock/* @cjw296
# multiprocessing
**/*multiprocessing* @gpshead
# SQLite 3
**/*sqlite* @berkerpeksag @erlend-aasland
@ -128,6 +206,10 @@ Lib/ast.py @isidentical
/Lib/test/test_subprocess.py @gpshead
/Modules/*subprocess* @gpshead
# debugger
**/*pdb* @gaogaotiantian
**/*bdb* @gaogaotiantian
# Limited C API & stable ABI
Tools/build/stable_abi.py @encukou
Misc/stable_abi.toml @encukou
@ -149,18 +231,19 @@ Doc/c-api/stable.rst @encukou
**/*itertools* @rhettinger
**/*collections* @rhettinger
**/*random* @rhettinger
**/*queue* @rhettinger
**/*bisect* @rhettinger
**/*heapq* @rhettinger
**/*functools* @rhettinger
**/*decimal* @rhettinger
**/*dataclasses* @ericvsmith
**/*ensurepip* @pfmoore @pradyunsg
/Doc/library/idle.rst @terryjreedy
**/*idlelib* @terryjreedy
**/*turtledemo* @terryjreedy
**/*annotationlib* @JelleZijlstra
**/*typing* @JelleZijlstra @AlexWaygood
**/*ftplib @giampaolo
@ -187,18 +270,49 @@ Doc/c-api/stable.rst @encukou
**/*zipfile/_path/* @jaraco
# Argument Clinic
/Tools/clinic/** @erlend-aasland @AlexWaygood
/Lib/test/test_clinic.py @erlend-aasland @AlexWaygood
/Tools/clinic/** @erlend-aasland
/Lib/test/test_clinic.py @erlend-aasland
Doc/howto/clinic.rst @erlend-aasland
# Subinterpreters
**/*interpreteridobject.* @ericsnowcurrently
**/*crossinterp* @ericsnowcurrently
Lib/test/support/interpreters/ @ericsnowcurrently
Modules/_xx*interp*module.c @ericsnowcurrently
Modules/_interp*module.c @ericsnowcurrently
Lib/test/test_interpreters/ @ericsnowcurrently
# Android
**/*Android* @mhsmith @freakboy3742
**/*android* @mhsmith @freakboy3742
# iOS (but not termios)
**/iOS* @freakboy3742
**/ios* @freakboy3742
**/*_iOS* @freakboy3742
**/*_ios* @freakboy3742
**/*-iOS* @freakboy3742
**/*-ios* @freakboy3742
# WebAssembly
/Tools/wasm/ @brettcannon
/Tools/wasm/ @brettcannon @freakboy3742
# SBOM
/Misc/externals.spdx.json @sethmlarson
/Misc/sbom.spdx.json @sethmlarson
/Tools/build/generate_sbom.py @sethmlarson
# Config Parser
Lib/configparser.py @jaraco
Lib/test/test_configparser.py @jaraco
# Doc sections
Doc/reference/ @willingc @AA-Turner
**/*weakref* @kumaraditya303
# Colorize
Lib/_colorize.py @hugovk
Lib/test/test__colorize.py @hugovk
# Fuzzing
Modules/_xxtestfuzz/ @ammaraskar

View file

@ -34,12 +34,12 @@ body:
label: "CPython versions tested on:"
multiple: true
options:
- "3.8"
- "3.9"
- "3.10"
- "3.11"
- "3.12"
- "3.13"
- "3.14"
- "CPython main branch"
validations:
required: true

View file

@ -27,11 +27,12 @@ body:
label: "CPython versions tested on:"
multiple: true
options:
- "3.8"
- "3.9"
- "3.10"
- "3.11"
- "3.12"
- "3.13"
- "3.14"
- "CPython main branch"
validations:
required: true

View file

@ -7,10 +7,10 @@ Please read this comment in its entirety. It's quite important.
It should be in the following format:
```
gh-NNNNN: Summary of the changes made
gh-NNNNNN: Summary of the changes made
```
Where: gh-NNNNN refers to the GitHub issue number.
Where: gh-NNNNNN refers to the GitHub issue number.
Most PRs will require an issue number. Trivial changes, like fixing a typo, do not need an issue.
@ -20,11 +20,11 @@ If this is a backport PR (PR made against branches other than `main`),
please ensure that the PR title is in the following format:
```
[X.Y] <title from the original PR> (GH-NNNN)
[X.Y] <title from the original PR> (GH-NNNNNN)
```
Where: [X.Y] is the branch name, e.g. [3.6].
Where: [X.Y] is the branch name, for example: [3.13].
GH-NNNN refers to the PR number from `main`.
GH-NNNNNN refers to the PR number from `main`.
-->

11
.github/actionlint.yaml vendored Normal file
View file

@ -0,0 +1,11 @@
self-hosted-runner:
# Pending https://github.com/rhysd/actionlint/issues/533
labels: ["windows-11-arm"]
config-variables: null
paths:
.github/workflows/**/*.yml:
ignore:
- 1st argument of function call is not assignable
- SC2(015|038|086|091|097|098|129|155)

View file

@ -18,6 +18,7 @@ jobs:
runs-on: ubuntu-latest
permissions:
issues: write
timeout-minutes: 5
steps:
- uses: actions/github-script@v7
with:

View file

@ -1,26 +1,15 @@
name: Tests
# gh-84728: "paths-ignore" is not used to skip documentation-only PRs, because
# it prevents to mark a job as mandatory. A PR cannot be merged if a job is
# mandatory but not scheduled because of "paths-ignore".
on:
workflow_dispatch:
push:
branches:
- 'main'
- '3.12'
- '3.11'
- '3.10'
- '3.9'
- '3.8'
- '3.*'
pull_request:
branches:
- 'main'
- '3.12'
- '3.11'
- '3.10'
- '3.9'
- '3.8'
- '3.*'
permissions:
contents: read
@ -29,136 +18,113 @@ concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}-reusable
cancel-in-progress: true
env:
FORCE_COLOR: 1
jobs:
check_source:
name: 'Check for source changes'
runs-on: ubuntu-latest
timeout-minutes: 10
outputs:
run-docs: ${{ steps.docs-changes.outputs.run-docs || false }}
run_tests: ${{ steps.check.outputs.run_tests }}
run_hypothesis: ${{ steps.check.outputs.run_hypothesis }}
run_cifuzz: ${{ steps.check.outputs.run_cifuzz }}
config_hash: ${{ steps.config_hash.outputs.hash }}
steps:
- uses: actions/checkout@v4
- name: Check for source changes
id: check
run: |
if [ -z "$GITHUB_BASE_REF" ]; then
echo "run_tests=true" >> $GITHUB_OUTPUT
else
git fetch origin $GITHUB_BASE_REF --depth=1
# git diff "origin/$GITHUB_BASE_REF..." (3 dots) may be more
# reliable than git diff "origin/$GITHUB_BASE_REF.." (2 dots),
# but it requires to download more commits (this job uses
# "git fetch --depth=1").
#
# git diff "origin/$GITHUB_BASE_REF..." (3 dots) works with Git
# 2.26, but Git 2.28 is stricter and fails with "no merge base".
#
# git diff "origin/$GITHUB_BASE_REF.." (2 dots) should be enough on
# GitHub, since GitHub starts by merging origin/$GITHUB_BASE_REF
# into the PR branch anyway.
#
# https://github.com/python/core-workflow/issues/373
git diff --name-only origin/$GITHUB_BASE_REF.. | grep -qvE '(\.rst$|^Doc|^Misc|^\.pre-commit-config\.yaml$|\.ruff\.toml$)' && echo "run_tests=true" >> $GITHUB_OUTPUT || true
fi
# Check if we should run hypothesis tests
GIT_BRANCH=${GITHUB_BASE_REF:-${GITHUB_REF#refs/heads/}}
echo $GIT_BRANCH
if $(echo "$GIT_BRANCH" | grep -q -w '3\.\(8\|9\|10\|11\)'); then
echo "Branch too old for hypothesis tests"
echo "run_hypothesis=false" >> $GITHUB_OUTPUT
else
echo "Run hypothesis tests"
echo "run_hypothesis=true" >> $GITHUB_OUTPUT
fi
# oss-fuzz maintains a configuration for fuzzing the main branch of
# CPython, so CIFuzz should be run only for code that is likely to be
# merged into the main branch; compatibility with older branches may
# be broken.
FUZZ_RELEVANT_FILES='(\.c$|\.h$|\.cpp$|^configure$|^\.github/workflows/build\.yml$|^Modules/_xxtestfuzz)'
if [ "$GITHUB_BASE_REF" = "main" ] && [ "$(git diff --name-only origin/$GITHUB_BASE_REF.. | grep -qE $FUZZ_RELEVANT_FILES; echo $?)" -eq 0 ]; then
# The tests are pretty slow so they are executed only for PRs
# changing relevant files.
echo "Run CIFuzz tests"
echo "run_cifuzz=true" >> $GITHUB_OUTPUT
else
echo "Branch too old for CIFuzz tests; or no C files were changed"
echo "run_cifuzz=false" >> $GITHUB_OUTPUT
fi
- name: Compute hash for config cache key
id: config_hash
run: |
echo "hash=${{ hashFiles('configure', 'configure.ac', '.github/workflows/build.yml') }}" >> $GITHUB_OUTPUT
- name: Get a list of the changed documentation-related files
if: github.event_name == 'pull_request'
id: changed-docs-files
uses: Ana06/get-changed-files@v2.2.0
with:
filter: |
Doc/**
Misc/**
.github/workflows/reusable-docs.yml
format: csv # works for paths with spaces
- name: Check for docs changes
if: >-
github.event_name == 'pull_request'
&& steps.changed-docs-files.outputs.added_modified_renamed != ''
id: docs-changes
run: |
echo "run-docs=true" >> "${GITHUB_OUTPUT}"
build-context:
name: Change detection
# To use boolean outputs from this job, parse them as JSON.
# Here's some examples:
#
# if: fromJSON(needs.build-context.outputs.run-docs)
#
# ${{
# fromJSON(needs.build-context.outputs.run-tests)
# && 'truthy-branch'
# || 'falsy-branch'
# }}
#
uses: ./.github/workflows/reusable-context.yml
check-docs:
name: Docs
needs: check_source
if: fromJSON(needs.check_source.outputs.run-docs)
needs: build-context
if: fromJSON(needs.build-context.outputs.run-docs)
uses: ./.github/workflows/reusable-docs.yml
check_generated_files:
name: 'Check if generated files are up to date'
check-autoconf-regen:
name: 'Check if Autoconf files are up to date'
# Don't use ubuntu-latest but a specific version to make the job
# reproducible: to get the same tools versions (autoconf, aclocal, ...)
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04
container:
image: ghcr.io/python/autoconf:2025.01.02.12581854023
timeout-minutes: 60
needs: check_source
if: needs.check_source.outputs.run_tests == 'true'
needs: build-context
if: needs.build-context.outputs.run-tests == 'true'
steps:
- name: Install Git
run: |
apt update && apt install git -yq
git config --global --add safe.directory "$GITHUB_WORKSPACE"
- uses: actions/checkout@v4
- uses: actions/setup-python@v4
with:
python-version: '3.x'
- name: Restore config.cache
uses: actions/cache@v3
with:
path: config.cache
key: ${{ github.job }}-${{ runner.os }}-${{ needs.check_source.outputs.config_hash }}-${{ env.pythonLocation }}
- name: Install Dependencies
run: sudo ./.github/workflows/posix-deps-apt.sh
- name: Add ccache to PATH
run: echo "PATH=/usr/lib/ccache:$PATH" >> $GITHUB_ENV
- name: Configure ccache action
uses: hendrikmuhs/ccache-action@v1.2
fetch-depth: 1
persist-credentials: false
- name: Check Autoconf and aclocal versions
run: |
grep "Generated by GNU Autoconf 2.71" configure
grep "Generated by GNU Autoconf 2.72" configure
grep "aclocal 1.16.5" aclocal.m4
grep -q "runstatedir" configure
grep -q "PKG_PROG_PKG_CONFIG" aclocal.m4
- name: Regenerate autoconf files
# Same command used by Tools/build/regen-configure.sh ($AUTORECONF)
run: autoreconf -ivf -Werror
- name: Check for changes
run: |
git add -u
changes=$(git status --porcelain)
# Check for changes in regenerated files
if test -n "$changes"; then
echo "Generated files not up to date."
echo "Perhaps you forgot to run make regen-configure ;)"
echo "configure files must be regenerated with a specific version of autoconf."
echo "$changes"
echo ""
git diff --staged || true
exit 1
fi
check-generated-files:
name: 'Check if generated files are up to date'
# Don't use ubuntu-latest but a specific version to make the job
# reproducible: to get the same tools versions (autoconf, aclocal, ...)
runs-on: ubuntu-24.04
timeout-minutes: 60
needs: build-context
if: needs.build-context.outputs.run-tests == 'true'
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
- uses: actions/setup-python@v5
with:
python-version: '3.x'
- name: Runner image version
run: echo "IMAGE_OS_VERSION=${ImageOS}-${ImageVersion}" >> "$GITHUB_ENV"
- name: Restore config.cache
uses: actions/cache@v4
with:
path: config.cache
# Include env.pythonLocation in key to avoid changes in environment when setup-python updates Python
key: ${{ github.job }}-${{ env.IMAGE_OS_VERSION }}-${{ needs.build-context.outputs.config-hash }}-${{ env.pythonLocation }}
- name: Install dependencies
run: sudo ./.github/workflows/posix-deps-apt.sh
- name: Add ccache to PATH
run: echo "PATH=/usr/lib/ccache:$PATH" >> "$GITHUB_ENV"
- name: Configure ccache action
uses: hendrikmuhs/ccache-action@v1.2
with:
save: false
- name: Configure CPython
run: |
# Build Python with the libpython dynamic library
./configure --config-cache --with-pydebug --enable-shared
- name: Regenerate autoconf files
# Same command used by Tools/build/regen-configure.sh ($AUTORECONF)
run: autoreconf -ivf -Werror
- name: Build CPython
run: |
make -j4 regen-all
make regen-stdlib-module-names
make regen-stdlib-module-names regen-sbom regen-unicodedata
- name: Check for changes
run: |
git add -u
@ -181,74 +147,125 @@ jobs:
if: github.event_name == 'pull_request' # $GITHUB_EVENT_NAME
run: make check-c-globals
build_windows:
name: 'Windows'
needs: check_source
if: needs.check_source.outputs.run_tests == 'true'
uses: ./.github/workflows/reusable-windows.yml
build_windows_free_threaded:
name: 'Windows (free-threaded)'
needs: check_source
if: needs.check_source.outputs.run_tests == 'true'
uses: ./.github/workflows/reusable-windows.yml
with:
free-threaded: true
build_macos:
name: 'macOS'
needs: check_source
if: needs.check_source.outputs.run_tests == 'true'
uses: ./.github/workflows/reusable-macos.yml
with:
config_hash: ${{ needs.check_source.outputs.config_hash }}
build_macos_free_threaded:
name: 'macOS (free-threaded)'
needs: check_source
if: needs.check_source.outputs.run_tests == 'true'
uses: ./.github/workflows/reusable-macos.yml
with:
config_hash: ${{ needs.check_source.outputs.config_hash }}
free-threaded: true
build_ubuntu:
name: 'Ubuntu'
needs: check_source
if: needs.check_source.outputs.run_tests == 'true'
uses: ./.github/workflows/reusable-ubuntu.yml
with:
config_hash: ${{ needs.check_source.outputs.config_hash }}
options: |
../cpython-ro-srcdir/configure \
--config-cache \
--with-pydebug \
--with-openssl=$OPENSSL_DIR
build_ubuntu_free_threaded:
name: 'Ubuntu (free-threaded)'
needs: check_source
if: needs.check_source.outputs.run_tests == 'true'
uses: ./.github/workflows/reusable-ubuntu.yml
with:
config_hash: ${{ needs.check_source.outputs.config_hash }}
options: |
../cpython-ro-srcdir/configure \
--config-cache \
--with-pydebug \
--with-openssl=$OPENSSL_DIR \
--disable-gil
build_ubuntu_ssltests:
name: 'Ubuntu SSL tests with OpenSSL'
runs-on: ubuntu-20.04
timeout-minutes: 60
needs: check_source
if: needs.check_source.outputs.run_tests == 'true'
build-windows:
name: >-
Windows
${{ fromJSON(matrix.free-threading) && '(free-threading)' || '' }}
needs: build-context
if: fromJSON(needs.build-context.outputs.run-windows-tests)
strategy:
fail-fast: false
matrix:
openssl_ver: [1.1.1w, 3.0.11, 3.1.3]
arch:
- x64
- Win32
- arm64
free-threading:
- false
- true
exclude:
# Skip Win32 on free-threaded builds
- { arch: Win32, free-threading: true }
uses: ./.github/workflows/reusable-windows.yml
with:
arch: ${{ matrix.arch }}
free-threading: ${{ matrix.free-threading }}
build-windows-msi:
name: >- # ${{ '' } is a hack to nest jobs under the same sidebar category
Windows MSI${{ '' }}
needs: build-context
if: fromJSON(needs.build-context.outputs.run-windows-msi)
strategy:
fail-fast: false
matrix:
arch:
- x86
- x64
- arm64
uses: ./.github/workflows/reusable-windows-msi.yml
with:
arch: ${{ matrix.arch }}
build-macos:
name: >-
macOS
${{ fromJSON(matrix.free-threading) && '(free-threading)' || '' }}
needs: build-context
if: needs.build-context.outputs.run-tests == 'true'
strategy:
fail-fast: false
matrix:
# Cirrus and macos-14 are M1, macos-13 is default GHA Intel.
# macOS 13 only runs tests against the GIL-enabled CPython.
# Cirrus used for upstream, macos-14 for forks.
os:
- ghcr.io/cirruslabs/macos-runner:sonoma
- macos-14
- macos-13
is-fork: # only used for the exclusion trick
- ${{ github.repository_owner != 'python' }}
free-threading:
- false
- true
exclude:
- os: ghcr.io/cirruslabs/macos-runner:sonoma
is-fork: true
- os: macos-14
is-fork: false
- os: macos-13
free-threading: true
uses: ./.github/workflows/reusable-macos.yml
with:
config_hash: ${{ needs.build-context.outputs.config-hash }}
free-threading: ${{ matrix.free-threading }}
os: ${{ matrix.os }}
build-ubuntu:
name: >-
Ubuntu
${{ fromJSON(matrix.free-threading) && '(free-threading)' || '' }}
${{ fromJSON(matrix.bolt) && '(bolt)' || '' }}
needs: build-context
if: needs.build-context.outputs.run-tests == 'true'
strategy:
fail-fast: false
matrix:
bolt:
- false
- true
free-threading:
- false
- true
os:
- ubuntu-24.04
- ubuntu-24.04-arm
exclude:
# Do not test BOLT with free-threading, to conserve resources
- bolt: true
free-threading: true
# BOLT currently crashes during instrumentation on aarch64
- os: ubuntu-24.04-arm
bolt: true
uses: ./.github/workflows/reusable-ubuntu.yml
with:
config_hash: ${{ needs.build-context.outputs.config-hash }}
bolt-optimizations: ${{ matrix.bolt }}
free-threading: ${{ matrix.free-threading }}
os: ${{ matrix.os }}
build-ubuntu-ssltests:
name: 'Ubuntu SSL tests with OpenSSL'
runs-on: ${{ matrix.os }}
timeout-minutes: 60
needs: build-context
if: needs.build-context.outputs.run-tests == 'true'
strategy:
fail-fast: false
matrix:
os: [ubuntu-24.04]
openssl_ver: [3.0.15, 3.1.7, 3.2.3, 3.3.2, 3.4.0]
# See Tools/ssl/make_ssl_data.py for notes on adding a new version
env:
OPENSSL_VER: ${{ matrix.openssl_ver }}
MULTISSL_DIR: ${{ github.workspace }}/multissl
@ -256,36 +273,42 @@ jobs:
LD_LIBRARY_PATH: ${{ github.workspace }}/multissl/openssl/${{ matrix.openssl_ver }}/lib
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
- name: Runner image version
run: echo "IMAGE_OS_VERSION=${ImageOS}-${ImageVersion}" >> "$GITHUB_ENV"
- name: Restore config.cache
uses: actions/cache@v3
uses: actions/cache@v4
with:
path: config.cache
key: ${{ github.job }}-${{ runner.os }}-${{ needs.check_source.outputs.config_hash }}
key: ${{ github.job }}-${{ env.IMAGE_OS_VERSION }}-${{ needs.build-context.outputs.config-hash }}
- name: Register gcc problem matcher
run: echo "::add-matcher::.github/problem-matchers/gcc.json"
- name: Install Dependencies
- name: Install dependencies
run: sudo ./.github/workflows/posix-deps-apt.sh
- name: Configure OpenSSL env vars
run: |
echo "MULTISSL_DIR=${GITHUB_WORKSPACE}/multissl" >> $GITHUB_ENV
echo "OPENSSL_DIR=${GITHUB_WORKSPACE}/multissl/openssl/${OPENSSL_VER}" >> $GITHUB_ENV
echo "LD_LIBRARY_PATH=${GITHUB_WORKSPACE}/multissl/openssl/${OPENSSL_VER}/lib" >> $GITHUB_ENV
echo "MULTISSL_DIR=${GITHUB_WORKSPACE}/multissl" >> "$GITHUB_ENV"
echo "OPENSSL_DIR=${GITHUB_WORKSPACE}/multissl/openssl/${OPENSSL_VER}" >> "$GITHUB_ENV"
echo "LD_LIBRARY_PATH=${GITHUB_WORKSPACE}/multissl/openssl/${OPENSSL_VER}/lib" >> "$GITHUB_ENV"
- name: 'Restore OpenSSL build'
id: cache-openssl
uses: actions/cache@v3
uses: actions/cache@v4
with:
path: ./multissl/openssl/${{ env.OPENSSL_VER }}
key: ${{ runner.os }}-multissl-openssl-${{ env.OPENSSL_VER }}
key: ${{ matrix.os }}-multissl-openssl-${{ env.OPENSSL_VER }}
- name: Install OpenSSL
if: steps.cache-openssl.outputs.cache-hit != 'true'
run: python3 Tools/ssl/multissltests.py --steps=library --base-directory $MULTISSL_DIR --openssl $OPENSSL_VER --system Linux
run: python3 Tools/ssl/multissltests.py --steps=library --base-directory "$MULTISSL_DIR" --openssl "$OPENSSL_VER" --system Linux
- name: Add ccache to PATH
run: |
echo "PATH=/usr/lib/ccache:$PATH" >> $GITHUB_ENV
echo "PATH=/usr/lib/ccache:$PATH" >> "$GITHUB_ENV"
- name: Configure ccache action
uses: hendrikmuhs/ccache-action@v1.2
with:
save: false
- name: Configure CPython
run: ./configure --config-cache --with-pydebug --with-openssl=$OPENSSL_DIR
run: ./configure CFLAGS="-fdiagnostics-format=json" --config-cache --enable-slower-safety --with-pydebug --with-openssl="$OPENSSL_DIR"
- name: Build CPython
run: make -j4
- name: Display build info
@ -293,60 +316,75 @@ jobs:
- name: SSL tests
run: ./python Lib/test/ssltests.py
test_hypothesis:
build-wasi:
name: 'WASI'
needs: build-context
if: needs.build-context.outputs.run-tests == 'true'
uses: ./.github/workflows/reusable-wasi.yml
with:
config_hash: ${{ needs.build-context.outputs.config-hash }}
test-hypothesis:
name: "Hypothesis tests on Ubuntu"
runs-on: ubuntu-20.04
runs-on: ubuntu-24.04
timeout-minutes: 60
needs: check_source
if: needs.check_source.outputs.run_tests == 'true' && needs.check_source.outputs.run_hypothesis == 'true'
needs: build-context
if: needs.build-context.outputs.run-tests == 'true'
env:
OPENSSL_VER: 3.0.11
OPENSSL_VER: 3.0.15
PYTHONSTRICTEXTENSIONBUILD: 1
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
- name: Register gcc problem matcher
run: echo "::add-matcher::.github/problem-matchers/gcc.json"
- name: Install Dependencies
- name: Install dependencies
run: sudo ./.github/workflows/posix-deps-apt.sh
- name: Configure OpenSSL env vars
run: |
echo "MULTISSL_DIR=${GITHUB_WORKSPACE}/multissl" >> $GITHUB_ENV
echo "OPENSSL_DIR=${GITHUB_WORKSPACE}/multissl/openssl/${OPENSSL_VER}" >> $GITHUB_ENV
echo "LD_LIBRARY_PATH=${GITHUB_WORKSPACE}/multissl/openssl/${OPENSSL_VER}/lib" >> $GITHUB_ENV
echo "MULTISSL_DIR=${GITHUB_WORKSPACE}/multissl" >> "$GITHUB_ENV"
echo "OPENSSL_DIR=${GITHUB_WORKSPACE}/multissl/openssl/${OPENSSL_VER}" >> "$GITHUB_ENV"
echo "LD_LIBRARY_PATH=${GITHUB_WORKSPACE}/multissl/openssl/${OPENSSL_VER}/lib" >> "$GITHUB_ENV"
- name: 'Restore OpenSSL build'
id: cache-openssl
uses: actions/cache@v3
uses: actions/cache@v4
with:
path: ./multissl/openssl/${{ env.OPENSSL_VER }}
key: ${{ runner.os }}-multissl-openssl-${{ env.OPENSSL_VER }}
- name: Install OpenSSL
if: steps.cache-openssl.outputs.cache-hit != 'true'
run: python3 Tools/ssl/multissltests.py --steps=library --base-directory $MULTISSL_DIR --openssl $OPENSSL_VER --system Linux
run: python3 Tools/ssl/multissltests.py --steps=library --base-directory "$MULTISSL_DIR" --openssl "$OPENSSL_VER" --system Linux
- name: Add ccache to PATH
run: |
echo "PATH=/usr/lib/ccache:$PATH" >> $GITHUB_ENV
echo "PATH=/usr/lib/ccache:$PATH" >> "$GITHUB_ENV"
- name: Configure ccache action
uses: hendrikmuhs/ccache-action@v1.2
with:
save: false
- name: Setup directory envs for out-of-tree builds
run: |
echo "CPYTHON_RO_SRCDIR=$(realpath -m ${GITHUB_WORKSPACE}/../cpython-ro-srcdir)" >> $GITHUB_ENV
echo "CPYTHON_BUILDDIR=$(realpath -m ${GITHUB_WORKSPACE}/../cpython-builddir)" >> $GITHUB_ENV
echo "CPYTHON_RO_SRCDIR=$(realpath -m "${GITHUB_WORKSPACE}"/../cpython-ro-srcdir)" >> "$GITHUB_ENV"
echo "CPYTHON_BUILDDIR=$(realpath -m "${GITHUB_WORKSPACE}"/../cpython-builddir)" >> "$GITHUB_ENV"
- name: Create directories for read-only out-of-tree builds
run: mkdir -p $CPYTHON_RO_SRCDIR $CPYTHON_BUILDDIR
run: mkdir -p "$CPYTHON_RO_SRCDIR" "$CPYTHON_BUILDDIR"
- name: Bind mount sources read-only
run: sudo mount --bind -o ro $GITHUB_WORKSPACE $CPYTHON_RO_SRCDIR
run: sudo mount --bind -o ro "$GITHUB_WORKSPACE" "$CPYTHON_RO_SRCDIR"
- name: Runner image version
run: echo "IMAGE_OS_VERSION=${ImageOS}-${ImageVersion}" >> "$GITHUB_ENV"
- name: Restore config.cache
uses: actions/cache@v3
uses: actions/cache@v4
with:
path: ${{ env.CPYTHON_BUILDDIR }}/config.cache
key: ${{ github.job }}-${{ runner.os }}-${{ needs.check_source.outputs.config_hash }}
key: ${{ github.job }}-${{ env.IMAGE_OS_VERSION }}-${{ needs.build-context.outputs.config-hash }}
- name: Configure CPython out-of-tree
working-directory: ${{ env.CPYTHON_BUILDDIR }}
run: |
../cpython-ro-srcdir/configure \
--config-cache \
--with-pydebug \
--with-openssl=$OPENSSL_DIR
--enable-slower-safety \
--with-openssl="$OPENSSL_DIR"
- name: Build CPython out-of-tree
working-directory: ${{ env.CPYTHON_BUILDDIR }}
run: make -j4
@ -355,26 +393,26 @@ jobs:
run: make pythoninfo
- name: Remount sources writable for tests
# some tests write to srcdir, lack of pyc files slows down testing
run: sudo mount $CPYTHON_RO_SRCDIR -oremount,rw
run: sudo mount "$CPYTHON_RO_SRCDIR" -oremount,rw
- name: Setup directory envs for out-of-tree builds
run: |
echo "CPYTHON_BUILDDIR=$(realpath -m ${GITHUB_WORKSPACE}/../cpython-builddir)" >> $GITHUB_ENV
echo "CPYTHON_BUILDDIR=$(realpath -m "${GITHUB_WORKSPACE}"/../cpython-builddir)" >> "$GITHUB_ENV"
- name: "Create hypothesis venv"
working-directory: ${{ env.CPYTHON_BUILDDIR }}
run: |
VENV_LOC=$(realpath -m .)/hypovenv
VENV_PYTHON=$VENV_LOC/bin/python
echo "HYPOVENV=${VENV_LOC}" >> $GITHUB_ENV
echo "VENV_PYTHON=${VENV_PYTHON}" >> $GITHUB_ENV
./python -m venv $VENV_LOC && $VENV_PYTHON -m pip install -r ${GITHUB_WORKSPACE}/Tools/requirements-hypothesis.txt
echo "HYPOVENV=${VENV_LOC}" >> "$GITHUB_ENV"
echo "VENV_PYTHON=${VENV_PYTHON}" >> "$GITHUB_ENV"
./python -m venv "$VENV_LOC" && "$VENV_PYTHON" -m pip install -r "${GITHUB_WORKSPACE}/Tools/requirements-hypothesis.txt"
- name: 'Restore Hypothesis database'
id: cache-hypothesis-database
uses: actions/cache@v3
uses: actions/cache@v4
with:
path: ./hypothesis
path: ${{ env.CPYTHON_BUILDDIR }}/.hypothesis/
key: hypothesis-database-${{ github.head_ref || github.run_id }}
restore-keys: |
- hypothesis-database-
hypothesis-database-
- name: "Run tests"
working-directory: ${{ env.CPYTHON_BUILDDIR }}
run: |
@ -382,7 +420,7 @@ jobs:
#
# (GH-104097) test_sysconfig is skipped because it has tests that are
# failing when executed from inside a virtual environment.
${{ env.VENV_PYTHON }} -m test \
"${VENV_PYTHON}" -m test \
-W \
-o \
-j4 \
@ -395,33 +433,40 @@ jobs:
-x test_subprocess \
-x test_signal \
-x test_sysconfig
- uses: actions/upload-artifact@v3
- uses: actions/upload-artifact@v4
if: always()
with:
name: hypothesis-example-db
path: .hypothesis/examples/
path: ${{ env.CPYTHON_BUILDDIR }}/.hypothesis/examples/
build_asan:
build-asan:
name: 'Address sanitizer'
runs-on: ubuntu-20.04
runs-on: ${{ matrix.os }}
timeout-minutes: 60
needs: check_source
if: needs.check_source.outputs.run_tests == 'true'
needs: build-context
if: needs.build-context.outputs.run-tests == 'true'
strategy:
fail-fast: false
matrix:
os: [ubuntu-24.04]
env:
OPENSSL_VER: 3.0.11
OPENSSL_VER: 3.0.15
PYTHONSTRICTEXTENSIONBUILD: 1
ASAN_OPTIONS: detect_leaks=0:allocator_may_return_null=1:handle_segv=0
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
- name: Runner image version
run: echo "IMAGE_OS_VERSION=${ImageOS}-${ImageVersion}" >> "$GITHUB_ENV"
- name: Restore config.cache
uses: actions/cache@v3
uses: actions/cache@v4
with:
path: config.cache
key: ${{ github.job }}-${{ runner.os }}-${{ needs.check_source.outputs.config_hash }}
key: ${{ github.job }}-${{ env.IMAGE_OS_VERSION }}-${{ needs.build-context.outputs.config-hash }}
- name: Register gcc problem matcher
run: echo "::add-matcher::.github/problem-matchers/gcc.json"
- name: Install Dependencies
- name: Install dependencies
run: sudo ./.github/workflows/posix-deps-apt.sh
- name: Set up GCC-10 for ASAN
uses: egor-tensin/setup-gcc@v1
@ -429,23 +474,26 @@ jobs:
version: 10
- name: Configure OpenSSL env vars
run: |
echo "MULTISSL_DIR=${GITHUB_WORKSPACE}/multissl" >> $GITHUB_ENV
echo "OPENSSL_DIR=${GITHUB_WORKSPACE}/multissl/openssl/${OPENSSL_VER}" >> $GITHUB_ENV
echo "LD_LIBRARY_PATH=${GITHUB_WORKSPACE}/multissl/openssl/${OPENSSL_VER}/lib" >> $GITHUB_ENV
echo "MULTISSL_DIR=${GITHUB_WORKSPACE}/multissl" >> "$GITHUB_ENV"
echo "OPENSSL_DIR=${GITHUB_WORKSPACE}/multissl/openssl/${OPENSSL_VER}" >> "$GITHUB_ENV"
echo "LD_LIBRARY_PATH=${GITHUB_WORKSPACE}/multissl/openssl/${OPENSSL_VER}/lib" >> "$GITHUB_ENV"
- name: 'Restore OpenSSL build'
id: cache-openssl
uses: actions/cache@v3
uses: actions/cache@v4
with:
path: ./multissl/openssl/${{ env.OPENSSL_VER }}
key: ${{ runner.os }}-multissl-openssl-${{ env.OPENSSL_VER }}
key: ${{ matrix.os }}-multissl-openssl-${{ env.OPENSSL_VER }}
- name: Install OpenSSL
if: steps.cache-openssl.outputs.cache-hit != 'true'
run: python3 Tools/ssl/multissltests.py --steps=library --base-directory $MULTISSL_DIR --openssl $OPENSSL_VER --system Linux
run: python3 Tools/ssl/multissltests.py --steps=library --base-directory "$MULTISSL_DIR" --openssl "$OPENSSL_VER" --system Linux
- name: Add ccache to PATH
run: |
echo "PATH=/usr/lib/ccache:$PATH" >> $GITHUB_ENV
echo "PATH=/usr/lib/ccache:$PATH" >> "$GITHUB_ENV"
- name: Configure ccache action
uses: hendrikmuhs/ccache-action@v1.2
with:
save: ${{ github.event_name == 'push' }}
max-size: "200M"
- name: Configure CPython
run: ./configure --config-cache --with-address-sanitizer --without-pymalloc
- name: Build CPython
@ -453,15 +501,72 @@ jobs:
- name: Display build info
run: make pythoninfo
- name: Tests
run: xvfb-run make test
run: xvfb-run make ci
build-tsan:
name: >-
Thread sanitizer
${{ fromJSON(matrix.free-threading) && '(free-threading)' || '' }}
needs: build-context
if: needs.build-context.outputs.run-tests == 'true'
strategy:
fail-fast: false
matrix:
free-threading:
- false
- true
uses: ./.github/workflows/reusable-tsan.yml
with:
config_hash: ${{ needs.build-context.outputs.config-hash }}
free-threading: ${{ matrix.free-threading }}
cross-build-linux:
name: Cross build Linux
runs-on: ubuntu-latest
timeout-minutes: 60
needs: build-context
if: needs.build-context.outputs.run-tests == 'true'
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
- name: Runner image version
run: echo "IMAGE_OS_VERSION=${ImageOS}-${ImageVersion}" >> "$GITHUB_ENV"
- name: Restore config.cache
uses: actions/cache@v4
with:
path: config.cache
key: ${{ github.job }}-${{ env.IMAGE_OS_VERSION }}-${{ needs.build-context.outputs.config-hash }}
- name: Register gcc problem matcher
run: echo "::add-matcher::.github/problem-matchers/gcc.json"
- name: Set build dir
run:
# an absolute path outside of the working directoy
echo "BUILD_DIR=$(realpath ${{ github.workspace }}/../build)" >> "$GITHUB_ENV"
- name: Install dependencies
run: sudo ./.github/workflows/posix-deps-apt.sh
- name: Configure host build
run: ./configure --prefix="$BUILD_DIR/host-python"
- name: Install host Python
run: make -j8 install
- name: Run test subset with host build
run: |
"$BUILD_DIR/host-python/bin/python3" -m test test_sysconfig test_site test_embed
- name: Configure cross build
run: ./configure --prefix="$BUILD_DIR/cross-python" --with-build-python="$BUILD_DIR/host-python/bin/python3"
- name: Install cross Python
run: make -j8 install
- name: Run test subset with host build
run: |
"$BUILD_DIR/cross-python/bin/python3" -m test test_sysconfig test_site test_embed
# CIFuzz job based on https://google.github.io/oss-fuzz/getting-started/continuous-integration/
cifuzz:
name: CIFuzz
runs-on: ubuntu-latest
timeout-minutes: 60
needs: check_source
if: needs.check_source.outputs.run_cifuzz == 'true'
needs: build-context
if: needs.build-context.outputs.run-ci-fuzz == 'true'
permissions:
security-events: write
strategy:
@ -483,82 +588,85 @@ jobs:
output-sarif: true
sanitizer: ${{ matrix.sanitizer }}
- name: Upload crash
uses: actions/upload-artifact@v3
if: failure() && steps.build.outcome == 'success'
uses: actions/upload-artifact@v4
with:
name: ${{ matrix.sanitizer }}-artifacts
path: ./out/artifacts
- name: Upload SARIF
if: always() && steps.build.outcome == 'success'
uses: github/codeql-action/upload-sarif@v2
uses: github/codeql-action/upload-sarif@v3
with:
sarif_file: cifuzz-sarif/results.sarif
checkout_path: cifuzz-sarif
all-required-green: # This job does nothing and is only used for the branch protection
name: All required checks pass
if: always()
needs:
- check_source # Transitive dependency, needed to access `run_tests` value
- check-docs
- check_generated_files
- build_macos
- build_macos_free_threaded
- build_ubuntu
- build_ubuntu_free_threaded
- build_ubuntu_ssltests
- build_windows
- build_windows_free_threaded
- test_hypothesis
- build_asan
- cifuzz
runs-on: ubuntu-latest
timeout-minutes: 5
needs:
- build-context # Transitive dependency, needed to access `run-tests` value
- check-docs
- check-autoconf-regen
- check-generated-files
- build-windows
- build-windows-msi
- build-macos
- build-ubuntu
- build-ubuntu-ssltests
- build-wasi
- test-hypothesis
- build-asan
- build-tsan
- cross-build-linux
- cifuzz
if: always()
steps:
- name: Check whether the needed jobs succeeded or failed
uses: re-actors/alls-green@05ac9388f0aebcb5727afa17fcccfecd6f8ec5fe
with:
allowed-failures: >-
build_ubuntu_ssltests,
build-windows-msi,
build-ubuntu-ssltests,
test-hypothesis,
cifuzz,
test_hypothesis,
allowed-skips: >-
${{
!fromJSON(needs.check_source.outputs.run-docs)
!fromJSON(needs.build-context.outputs.run-docs)
&& '
check-docs,
'
|| ''
}}
${{
needs.check_source.outputs.run_tests != 'true'
needs.build-context.outputs.run-tests != 'true'
&& '
check_generated_files,
build_macos,
build_macos_free_threaded,
build_ubuntu,
build_ubuntu_free_threaded,
build_ubuntu_ssltests,
build_windows,
build_windows_free_threaded,
build_asan,
check-autoconf-regen,
check-generated-files,
build-macos,
build-ubuntu,
build-ubuntu-ssltests,
build-wasi,
test-hypothesis,
build-asan,
build-tsan,
cross-build-linux,
'
|| ''
}}
${{
!fromJSON(needs.check_source.outputs.run_cifuzz)
!fromJSON(needs.build-context.outputs.run-windows-tests)
&& '
build-windows,
'
|| ''
}}
${{
!fromJSON(needs.build-context.outputs.run-ci-fuzz)
&& '
cifuzz,
'
|| ''
}}
${{
!fromJSON(needs.check_source.outputs.run_hypothesis)
&& '
test_hypothesis,
'
|| ''
}}
jobs: ${{ toJSON(needs) }}

View file

@ -1,38 +0,0 @@
name: TestsMSI
on:
workflow_dispatch:
push:
branches:
- 'main'
- '3.*'
paths:
- 'Tools/msi/**'
- '.github/workflows/build_msi.yml'
pull_request:
branches:
- 'main'
- '3.*'
paths:
- 'Tools/msi/**'
- '.github/workflows/build_msi.yml'
permissions:
contents: read
concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
cancel-in-progress: true
jobs:
build:
name: Windows Installer
runs-on: windows-latest
timeout-minutes: 60
strategy:
matrix:
type: [x86, x64, arm64]
steps:
- uses: actions/checkout@v4
- name: Build CPython installer
run: .\Tools\msi\build.bat --doc -${{ matrix.type }}

View file

@ -10,9 +10,6 @@ on:
- 'Doc/**'
- '.github/workflows/doc.yml'
permissions:
pull-requests: write
concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
cancel-in-progress: true
@ -20,6 +17,10 @@ concurrency:
jobs:
documentation-links:
runs-on: ubuntu-latest
permissions:
pull-requests: write
timeout-minutes: 5
steps:
- uses: readthedocs/actions/preview@v1
with:

154
.github/workflows/jit.yml vendored Normal file
View file

@ -0,0 +1,154 @@
name: JIT
on:
pull_request:
paths:
- '**jit**'
- 'Python/bytecodes.c'
- 'Python/optimizer*.c'
- '!Python/perf_jit_trampoline.c'
- '!**/*.md'
- '!**/*.ini'
push:
paths:
- '**jit**'
- 'Python/bytecodes.c'
- 'Python/optimizer*.c'
- '!Python/perf_jit_trampoline.c'
- '!**/*.md'
- '!**/*.ini'
workflow_dispatch:
permissions:
contents: read
concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
cancel-in-progress: true
env:
FORCE_COLOR: 1
jobs:
interpreter:
name: Interpreter (Debug)
runs-on: ubuntu-24.04
timeout-minutes: 90
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
- name: Build tier two interpreter
run: |
./configure --enable-experimental-jit=interpreter --with-pydebug
make all --jobs 4
- name: Test tier two interpreter
run: |
./python -m test --multiprocess 0 --timeout 4500 --verbose2 --verbose3
jit:
name: ${{ matrix.target }} (${{ matrix.debug && 'Debug' || 'Release' }})
needs: interpreter
runs-on: ${{ matrix.runner }}
timeout-minutes: 90
strategy:
fail-fast: false
matrix:
target:
- i686-pc-windows-msvc/msvc
- x86_64-pc-windows-msvc/msvc
- aarch64-pc-windows-msvc/msvc
- x86_64-apple-darwin/clang
- aarch64-apple-darwin/clang
- x86_64-unknown-linux-gnu/gcc
- aarch64-unknown-linux-gnu/gcc
debug:
- true
- false
llvm:
- 19
include:
- target: i686-pc-windows-msvc/msvc
architecture: Win32
runner: windows-latest
- target: x86_64-pc-windows-msvc/msvc
architecture: x64
runner: windows-latest
- target: aarch64-pc-windows-msvc/msvc
architecture: ARM64
runner: windows-11-arm
- target: x86_64-apple-darwin/clang
architecture: x86_64
runner: macos-13
- target: aarch64-apple-darwin/clang
architecture: aarch64
runner: macos-14
- target: x86_64-unknown-linux-gnu/gcc
architecture: x86_64
runner: ubuntu-24.04
- target: aarch64-unknown-linux-gnu/gcc
architecture: aarch64
runner: ubuntu-24.04-arm
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
- uses: actions/setup-python@v5
with:
python-version: '3.11'
- name: Windows
if: runner.os == 'Windows'
run: |
choco install llvm --allow-downgrade --no-progress --version ${{ matrix.llvm }}.1.0
./PCbuild/build.bat --experimental-jit ${{ matrix.debug && '-d' || '' }} -p ${{ matrix.architecture }}
./PCbuild/rt.bat ${{ matrix.debug && '-d' || '' }} -p ${{ matrix.architecture }} -q --multiprocess 0 --timeout 4500 --verbose2 --verbose3
# The `find` line is required as a result of https://github.com/actions/runner-images/issues/9966.
# This is a bug in the macOS runner image where the pre-installed Python is installed in the same
# directory as the Homebrew Python, which causes the build to fail for macos-13. This line removes
# the symlink to the pre-installed Python so that the Homebrew Python is used instead.
- name: macOS
if: runner.os == 'macOS'
run: |
brew update
find /usr/local/bin -lname '*/Library/Frameworks/Python.framework/*' -delete
brew install llvm@${{ matrix.llvm }}
export SDKROOT="$(xcrun --show-sdk-path)"
./configure --enable-experimental-jit ${{ matrix.debug && '--with-pydebug' || '' }}
make all --jobs 4
./python.exe -m test --multiprocess 0 --timeout 4500 --verbose2 --verbose3
- name: Linux
if: runner.os == 'Linux'
run: |
sudo bash -c "$(wget -O - https://apt.llvm.org/llvm.sh)" ./llvm.sh ${{ matrix.llvm }}
export PATH="$(llvm-config-${{ matrix.llvm }} --bindir):$PATH"
./configure --enable-experimental-jit ${{ matrix.debug && '--with-pydebug' || '' }}
make all --jobs 4
./python -m test --multiprocess 0 --timeout 4500 --verbose2 --verbose3
jit-with-disabled-gil:
name: Free-Threaded (Debug)
needs: interpreter
runs-on: ubuntu-24.04
timeout-minutes: 90
strategy:
fail-fast: false
matrix:
llvm:
- 19
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
- uses: actions/setup-python@v5
with:
python-version: '3.11'
- name: Build with JIT enabled and GIL disabled
run: |
sudo bash -c "$(wget -O - https://apt.llvm.org/llvm.sh)" ./llvm.sh ${{ matrix.llvm }}
export PATH="$(llvm-config-${{ matrix.llvm }} --bindir):$PATH"
./configure --enable-experimental-jit --with-pydebug --disable-gil
make all --jobs 4
- name: Run tests
run: |
./python -m test --multiprocess 0 --timeout 4500 --verbose2 --verbose3

View file

@ -20,7 +20,9 @@ jobs:
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v4
with:
persist-credentials: false
- uses: actions/setup-python@v5
with:
python-version: "3.x"
- uses: pre-commit/action@v3.0.0
- uses: pre-commit/action@v3.0.1

View file

@ -8,10 +8,12 @@ on:
pull_request:
paths:
- ".github/workflows/mypy.yml"
- "Lib/_pyrepl/**"
- "Lib/test/libregrtest/**"
- "Tools/build/generate_sbom.py"
- "Tools/cases_generator/**"
- "Tools/clinic/**"
- "Tools/jit/**"
- "Tools/peg_generator/**"
- "Tools/requirements-dev.txt"
- "Tools/wasm/**"
@ -31,25 +33,31 @@ concurrency:
jobs:
mypy:
strategy:
matrix:
target: [
"Lib/test/libregrtest",
"Tools/build/",
"Tools/cases_generator",
"Tools/clinic",
"Tools/peg_generator",
"Tools/wasm",
]
name: Run mypy on ${{ matrix.target }}
runs-on: ubuntu-latest
timeout-minutes: 10
strategy:
fail-fast: false
matrix:
target: [
"Lib/_pyrepl",
"Lib/test/libregrtest",
"Tools/build",
"Tools/cases_generator",
"Tools/clinic",
"Tools/jit",
"Tools/peg_generator",
"Tools/wasm",
]
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v4
with:
python-version: "3.11"
persist-credentials: false
- uses: actions/setup-python@v5
with:
python-version: "3.13"
cache: pip
cache-dependency-path: Tools/requirements-dev.txt
- run: pip install -r Tools/requirements-dev.txt
- run: python3 Misc/mypy/make_symlinks.py --symlink
- run: mypy --config-file ${{ matrix.target }}/mypy.ini

View file

@ -1,11 +1,9 @@
#!/bin/sh
apt-get update
# autoconf-archive is needed by autoreconf (check_generated_files job)
apt-get -yq install \
build-essential \
pkg-config \
autoconf-archive \
ccache \
gdb \
lcov \

View file

@ -15,6 +15,7 @@ jobs:
runs-on: ubuntu-latest
timeout-minutes: 10
strategy:
fail-fast: false
matrix:
include:
# if an issue has any of these labels, it will be added
@ -23,7 +24,7 @@ jobs:
- { project: 32, label: sprint }
steps:
- uses: actions/add-to-project@v0.1.0
- uses: actions/add-to-project@v1.0.0
with:
project-url: https://github.com/orgs/python/projects/${{ matrix.project }}
github-token: ${{ secrets.ADD_TO_PROJECT_PAT }}

View file

@ -4,19 +4,58 @@ on:
pull_request:
types: [opened, reopened, labeled, unlabeled, synchronize]
permissions:
issues: write
pull-requests: write
jobs:
label:
name: DO-NOT-MERGE / unresolved review
label-dnm:
name: DO-NOT-MERGE
if: github.repository_owner == 'python'
runs-on: ubuntu-latest
permissions:
pull-requests: read
timeout-minutes: 10
steps:
- uses: mheap/github-action-required-labels@v5
- name: Check there's no DO-NOT-MERGE
uses: mheap/github-action-required-labels@v5
with:
mode: exactly
count: 0
labels: "DO-NOT-MERGE, awaiting changes, awaiting change review"
labels: |
DO-NOT-MERGE
label-reviews:
name: Unresolved review
if: github.repository_owner == 'python'
runs-on: ubuntu-latest
permissions:
pull-requests: read
timeout-minutes: 10
steps:
# Check that the PR is not awaiting changes from the author due to previous review.
- name: Check there's no required changes
uses: mheap/github-action-required-labels@v5
with:
mode: exactly
count: 0
labels: |
awaiting changes
awaiting change review
- id: is-feature
name: Check whether this PR is a feature (contains a "type-feature" label)
uses: mheap/github-action-required-labels@v5
with:
mode: exactly
count: 1
labels: |
type-feature
exit_type: success # don't fail the check if the PR is not a feature, just record the result
# In case of a feature PR, check for a complete review (contains an "awaiting merge" label).
- id: awaiting-merge
if: steps.is-feature.outputs.status == 'success'
name: Check for complete review
uses: mheap/github-action-required-labels@v5
with:
mode: exactly
count: 1
labels: |
awaiting merge

104
.github/workflows/reusable-context.yml vendored Normal file
View file

@ -0,0 +1,104 @@
name: Reusable build context
on: # yamllint disable-line rule:truthy
workflow_call:
outputs:
# Every referenced step MUST always set its output variable,
# either via ``Tools/build/compute-changes.py`` or in this workflow file.
# Boolean outputs (generally prefixed ``run-``) can then later be used
# safely through the following idiom in job conditionals and other
# expressions. Here's some examples:
#
# if: fromJSON(needs.build-context.outputs.run-tests)
#
# ${{
# fromJSON(needs.build-context.outputs.run-tests)
# && 'truthy-branch'
# || 'falsy-branch'
# }}
#
config-hash:
description: Config hash value for use in cache keys
value: ${{ jobs.compute-changes.outputs.config-hash }} # str
run-docs:
description: Whether to build the docs
value: ${{ jobs.compute-changes.outputs.run-docs }} # bool
run-tests:
description: Whether to run the regular tests
value: ${{ jobs.compute-changes.outputs.run-tests }} # bool
run-windows-tests:
description: Whether to run the Windows tests
value: ${{ jobs.compute-changes.outputs.run-windows-tests }} # bool
run-windows-msi:
description: Whether to run the MSI installer smoke tests
value: ${{ jobs.compute-changes.outputs.run-windows-msi }} # bool
run-ci-fuzz:
description: Whether to run the CIFuzz job
value: ${{ jobs.compute-changes.outputs.run-ci-fuzz }} # bool
jobs:
compute-changes:
name: Create context from changed files
runs-on: ubuntu-latest
timeout-minutes: 10
outputs:
config-hash: ${{ steps.config-hash.outputs.hash }}
run-ci-fuzz: ${{ steps.changes.outputs.run-ci-fuzz }}
run-docs: ${{ steps.changes.outputs.run-docs }}
run-tests: ${{ steps.changes.outputs.run-tests }}
run-windows-msi: ${{ steps.changes.outputs.run-windows-msi }}
run-windows-tests: ${{ steps.changes.outputs.run-windows-tests }}
steps:
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: "3"
- run: >-
echo '${{ github.event_name }}'
- uses: actions/checkout@v4
with:
persist-credentials: false
ref: >-
${{
github.event_name == 'pull_request'
&& github.event.pull_request.head.sha
|| ''
}}
# Adapted from https://github.com/actions/checkout/issues/520#issuecomment-1167205721
- name: Fetch commits to get branch diff
if: github.event_name == 'pull_request'
run: |
set -eux
# Fetch enough history to find a common ancestor commit (aka merge-base):
git fetch origin "${refspec_pr}" --depth=$(( commits + 1 )) \
--no-tags --prune --no-recurse-submodules
# This should get the oldest commit in the local fetched history (which may not be the commit the PR branched from):
COMMON_ANCESTOR=$( git rev-list --first-parent --max-parents=0 --max-count=1 "${branch_pr}" )
DATE=$( git log --date=iso8601 --format=%cd "${COMMON_ANCESTOR}" )
# Get all commits since that commit date from the base branch (eg: main):
git fetch origin "${refspec_base}" --shallow-since="${DATE}" \
--no-tags --prune --no-recurse-submodules
env:
branch_pr: 'origin/${{ github.event.pull_request.head.ref }}'
commits: ${{ github.event.pull_request.commits }}
refspec_base: '+${{ github.event.pull_request.base.sha }}:remotes/origin/${{ github.event.pull_request.base.ref }}'
refspec_pr: '+${{ github.event.pull_request.head.sha }}:remotes/origin/${{ github.event.pull_request.head.ref }}'
# We only want to run tests on PRs when related files are changed,
# or when someone triggers a manual workflow run.
- name: Compute changed files
id: changes
run: python Tools/build/compute-changes.py
env:
GITHUB_DEFAULT_BRANCH: ${{ github.event.repository.default_branch }}
- name: Compute hash for config cache key
id: config-hash
run: |
echo "hash=${{ hashFiles('configure', 'configure.ac', '.github/workflows/build.yml') }}" >> "$GITHUB_OUTPUT"

View file

@ -1,4 +1,4 @@
name: Docs
name: Reusable Docs
on:
workflow_call:
@ -11,37 +11,48 @@ concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
cancel-in-progress: true
env:
FORCE_COLOR: 1
jobs:
build_doc:
build-doc:
name: 'Docs'
runs-on: ubuntu-latest
timeout-minutes: 60
env:
branch_base: 'origin/${{ github.event.pull_request.base.ref }}'
branch_pr: 'origin/${{ github.event.pull_request.head.ref }}'
commits: ${{ github.event.pull_request.commits }}
refspec_base: '+${{ github.event.pull_request.base.sha }}:remotes/origin/${{ github.event.pull_request.base.ref }}'
refspec_pr: '+${{ github.event.pull_request.head.sha }}:remotes/origin/${{ github.event.pull_request.head.ref }}'
steps:
- name: 'Check out latest PR branch commit'
uses: actions/checkout@v4
with:
ref: ${{ github.event.pull_request.head.sha }}
persist-credentials: false
ref: >-
${{
github.event_name == 'pull_request'
&& github.event.pull_request.head.sha
|| ''
}}
# Adapted from https://github.com/actions/checkout/issues/520#issuecomment-1167205721
- name: 'Fetch commits to get branch diff'
if: github.event_name == 'pull_request'
run: |
# Fetch enough history to find a common ancestor commit (aka merge-base):
git fetch origin ${{ env.refspec_pr }} --depth=$(( ${{ github.event.pull_request.commits }} + 1 )) \
git fetch origin "${refspec_pr}" --depth=$(( commits + 1 )) \
--no-tags --prune --no-recurse-submodules
# This should get the oldest commit in the local fetched history (which may not be the commit the PR branched from):
COMMON_ANCESTOR=$( git rev-list --first-parent --max-parents=0 --max-count=1 ${{ env.branch_pr }} )
COMMON_ANCESTOR=$( git rev-list --first-parent --max-parents=0 --max-count=1 "${branch_pr}" )
DATE=$( git log --date=iso8601 --format=%cd "${COMMON_ANCESTOR}" )
# Get all commits since that commit date from the base branch (eg: master or main):
git fetch origin ${{ env.refspec_base }} --shallow-since="${DATE}" \
git fetch origin "${refspec_base}" --shallow-since="${DATE}" \
--no-tags --prune --no-recurse-submodules
- name: 'Set up Python'
uses: actions/setup-python@v4
uses: actions/setup-python@v5
with:
python-version: '3'
cache: 'pip'
@ -54,42 +65,27 @@ jobs:
continue-on-error: true
run: |
set -Eeuo pipefail
# Build docs with the '-n' (nit-picky) option; write warnings to file
make -C Doc/ PYTHON=../python SPHINXOPTS="-q -n -W --keep-going -w sphinx-warnings.txt" html
# Build docs with the nit-picky option; write warnings to file
make -C Doc/ PYTHON=../python SPHINXOPTS="--quiet --nitpicky --fail-on-warning --keep-going --warning-file sphinx-warnings.txt" html
- name: 'Check warnings'
if: github.event_name == 'pull_request'
run: |
python Doc/tools/check-warnings.py \
--annotate-diff '${{ env.branch_base }}' '${{ env.branch_pr }}' \
--annotate-diff "${branch_base}" "${branch_pr}" \
--fail-if-regression \
--fail-if-improved
# This build doesn't use problem matchers or check annotations
build_doc_oldest_supported_sphinx:
name: 'Docs (Oldest Sphinx)'
runs-on: ubuntu-latest
timeout-minutes: 60
steps:
- uses: actions/checkout@v4
- name: 'Set up Python'
uses: actions/setup-python@v4
with:
python-version: '3.11' # known to work with Sphinx 4.2
cache: 'pip'
cache-dependency-path: 'Doc/requirements-oldest-sphinx.txt'
- name: 'Install build dependencies'
run: make -C Doc/ venv REQUIREMENTS="requirements-oldest-sphinx.txt"
- name: 'Build HTML documentation'
run: make -C Doc/ SPHINXOPTS="-q" SPHINXERRORHANDLING="-W --keep-going" html
--fail-if-improved \
--fail-if-new-news-nit
# Run "doctest" on HEAD as new syntax doesn't exist in the latest stable release
doctest:
name: 'Doctest'
runs-on: ubuntu-latest
runs-on: ubuntu-24.04
timeout-minutes: 60
steps:
- uses: actions/checkout@v4
- uses: actions/cache@v3
with:
persist-credentials: false
- uses: actions/cache@v4
with:
path: ~/.cache/pip
key: ubuntu-doc-${{ hashFiles('Doc/requirements.txt') }}
@ -105,4 +101,4 @@ jobs:
run: make -C Doc/ PYTHON=../python venv
# Use "xvfb-run" since some doctest tests open GUI windows
- name: 'Run documentation doctest'
run: xvfb-run make -C Doc/ PYTHON=../python SPHINXERRORHANDLING="-W --keep-going" doctest
run: xvfb-run make -C Doc/ PYTHON=../python SPHINXERRORHANDLING="--fail-on-warning --keep-going" doctest

View file

@ -1,46 +1,81 @@
name: Reusable macOS
on:
workflow_call:
inputs:
config_hash:
required: true
type: string
free-threaded:
free-threading:
required: false
type: boolean
default: false
os:
description: OS to run the job
required: true
type: string
env:
FORCE_COLOR: 1
jobs:
build_macos:
name: 'build and test'
runs-on: macos-latest
build-macos:
name: build and test (${{ inputs.os }})
runs-on: ${{ inputs.os }}
timeout-minutes: 60
env:
HOMEBREW_NO_ANALYTICS: 1
HOMEBREW_NO_AUTO_UPDATE: 1
HOMEBREW_NO_INSTALL_CLEANUP: 1
HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK: 1
PYTHONSTRICTEXTENSIONBUILD: 1
TERM: linux
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
- name: Runner image version
run: echo "IMAGE_OS_VERSION=${ImageOS}-${ImageVersion}" >> "$GITHUB_ENV"
- name: Restore config.cache
uses: actions/cache@v3
uses: actions/cache@v4
with:
path: config.cache
key: ${{ github.job }}-${{ runner.os }}-${{ inputs.config_hash }}
key: ${{ github.job }}-${{ env.IMAGE_OS_VERSION }}-${{ inputs.config_hash }}
- name: Install Homebrew dependencies
run: brew install pkg-config openssl@3.0 xz gdbm tcl-tk
run: |
brew install pkg-config openssl@3.0 xz gdbm tcl-tk@8 make
# Because alternate versions are not symlinked into place by default:
brew link --overwrite tcl-tk@8
- name: Configure CPython
run: |
MACOSX_DEPLOYMENT_TARGET=10.15 \
GDBM_CFLAGS="-I$(brew --prefix gdbm)/include" \
GDBM_LIBS="-L$(brew --prefix gdbm)/lib -lgdbm" \
./configure \
--config-cache \
--with-pydebug \
${{ inputs.free-threaded && '--disable-gil' || '' }} \
--enable-slower-safety \
--enable-safety \
${{ inputs.free-threading && '--disable-gil' || '' }} \
--prefix=/opt/python-dev \
--with-openssl="$(brew --prefix openssl@3.0)"
- name: Build CPython
run: make -j4
if : ${{ inputs.free-threading || inputs.os != 'macos-13' }}
run: gmake -j8
- name: Build CPython for compiler warning check
if : ${{ !inputs.free-threading && inputs.os == 'macos-13' }}
run: set -o pipefail; gmake -j8 --output-sync 2>&1 | tee compiler_output_macos.txt
- name: Display build info
run: make pythoninfo
- name: Check compiler warnings
if : ${{ !inputs.free-threading && inputs.os == 'macos-13' }}
run: >-
python3 Tools/build/check_warnings.py
--compiler-output-file-path=compiler_output_macos.txt
--warning-ignore-file-path=Tools/build/.warningignore_macos
--compiler-output-type=clang
--fail-on-regression
--fail-on-improvement
--path-prefix="./"
- name: Tests
run: make test
run: make ci

94
.github/workflows/reusable-tsan.yml vendored Normal file
View file

@ -0,0 +1,94 @@
name: Reusable Thread Sanitizer
on:
workflow_call:
inputs:
config_hash:
required: true
type: string
free-threading:
description: Whether to use free-threaded mode
required: false
type: boolean
default: false
env:
FORCE_COLOR: 1
jobs:
build-tsan-reusable:
name: 'Thread sanitizer'
runs-on: ubuntu-24.04
timeout-minutes: 60
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
- name: Runner image version
run: echo "IMAGE_OS_VERSION=${ImageOS}-${ImageVersion}" >> "$GITHUB_ENV"
- name: Restore config.cache
uses: actions/cache@v4
with:
path: config.cache
key: ${{ github.job }}-${{ env.IMAGE_OS_VERSION }}-${{ inputs.config_hash }}
- name: Install dependencies
run: |
sudo ./.github/workflows/posix-deps-apt.sh
# Install clang-18
wget https://apt.llvm.org/llvm.sh
chmod +x llvm.sh
sudo ./llvm.sh 17 # gh-121946: llvm-18 package is temporarily broken
sudo update-alternatives --install /usr/bin/clang clang /usr/bin/clang-17 100
sudo update-alternatives --set clang /usr/bin/clang-17
sudo update-alternatives --install /usr/bin/clang++ clang++ /usr/bin/clang++-17 100
sudo update-alternatives --set clang++ /usr/bin/clang++-17
# Reduce ASLR to avoid TSAN crashing
sudo sysctl -w vm.mmap_rnd_bits=28
- name: TSAN option setup
run: |
echo "TSAN_OPTIONS=log_path=${GITHUB_WORKSPACE}/tsan_log suppressions=${GITHUB_WORKSPACE}/Tools/tsan/suppressions${{
fromJSON(inputs.free-threading)
&& '_free_threading'
|| ''
}}.txt handle_segv=0" >> "$GITHUB_ENV"
echo "CC=clang" >> "$GITHUB_ENV"
echo "CXX=clang++" >> "$GITHUB_ENV"
- name: Add ccache to PATH
run: |
echo "PATH=/usr/lib/ccache:$PATH" >> "$GITHUB_ENV"
- name: Configure ccache action
uses: hendrikmuhs/ccache-action@v1.2
with:
save: ${{ github.event_name == 'push' }}
max-size: "200M"
- name: Configure CPython
run: >-
./configure
--config-cache
--with-thread-sanitizer
--with-pydebug
${{ fromJSON(inputs.free-threading) && '--disable-gil' || '' }}
- name: Build CPython
run: make -j4
- name: Display build info
run: make pythoninfo
- name: Tests
run: ./python -m test --tsan -j4
- name: Parallel tests
if: fromJSON(inputs.free-threading)
run: ./python -m test --tsan-parallel --parallel-threads=4 -j4
- name: Display TSAN logs
if: always()
run: find "${GITHUB_WORKSPACE}" -name 'tsan_log.*' | xargs head -n 1000
- name: Archive TSAN logs
if: always()
uses: actions/upload-artifact@v4
with:
name: >-
tsan-logs-${{
fromJSON(inputs.free-threading)
&& 'free-threading'
|| 'default'
}}
path: tsan_log.*
if-no-files-found: ignore

View file

@ -1,71 +1,127 @@
name: Reusable Ubuntu
on:
workflow_call:
inputs:
config_hash:
required: true
type: string
options:
required: true
type: string
bolt-optimizations:
description: Whether to enable BOLT optimizations
required: false
type: boolean
default: false
free-threading:
description: Whether to use free-threaded mode
required: false
type: boolean
default: false
os:
description: OS to run the job
required: true
type: string
env:
FORCE_COLOR: 1
jobs:
build_ubuntu_reusable:
name: 'build and test'
build-ubuntu-reusable:
name: build and test (${{ inputs.os }})
runs-on: ${{ inputs.os }}
timeout-minutes: 60
runs-on: ubuntu-20.04
env:
OPENSSL_VER: 3.0.11
OPENSSL_VER: 3.0.15
PYTHONSTRICTEXTENSIONBUILD: 1
TERM: linux
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
- name: Register gcc problem matcher
run: echo "::add-matcher::.github/problem-matchers/gcc.json"
- name: Install dependencies
run: sudo ./.github/workflows/posix-deps-apt.sh
- name: Install Clang and BOLT
if: ${{ fromJSON(inputs.bolt-optimizations) }}
run: |
sudo bash -c "$(wget -O - https://apt.llvm.org/llvm.sh)" ./llvm.sh 19
sudo apt-get install bolt-19
echo PATH="$(llvm-config-19 --bindir):$PATH" >> $GITHUB_ENV
- name: Configure OpenSSL env vars
run: |
echo "MULTISSL_DIR=${GITHUB_WORKSPACE}/multissl" >> $GITHUB_ENV
echo "OPENSSL_DIR=${GITHUB_WORKSPACE}/multissl/openssl/${OPENSSL_VER}" >> $GITHUB_ENV
echo "LD_LIBRARY_PATH=${GITHUB_WORKSPACE}/multissl/openssl/${OPENSSL_VER}/lib" >> $GITHUB_ENV
echo "MULTISSL_DIR=${GITHUB_WORKSPACE}/multissl" >> "$GITHUB_ENV"
echo "OPENSSL_DIR=${GITHUB_WORKSPACE}/multissl/openssl/${OPENSSL_VER}" >> "$GITHUB_ENV"
echo "LD_LIBRARY_PATH=${GITHUB_WORKSPACE}/multissl/openssl/${OPENSSL_VER}/lib" >> "$GITHUB_ENV"
- name: 'Restore OpenSSL build'
id: cache-openssl
uses: actions/cache@v3
uses: actions/cache@v4
with:
path: ./multissl/openssl/${{ env.OPENSSL_VER }}
key: ${{ runner.os }}-multissl-openssl-${{ env.OPENSSL_VER }}
key: ${{ inputs.os }}-multissl-openssl-${{ env.OPENSSL_VER }}
- name: Install OpenSSL
if: steps.cache-openssl.outputs.cache-hit != 'true'
run: python3 Tools/ssl/multissltests.py --steps=library --base-directory $MULTISSL_DIR --openssl $OPENSSL_VER --system Linux
run: python3 Tools/ssl/multissltests.py --steps=library --base-directory "$MULTISSL_DIR" --openssl "$OPENSSL_VER" --system Linux
- name: Add ccache to PATH
run: |
echo "PATH=/usr/lib/ccache:$PATH" >> $GITHUB_ENV
echo "PATH=/usr/lib/ccache:$PATH" >> "$GITHUB_ENV"
- name: Configure ccache action
uses: hendrikmuhs/ccache-action@v1.2
with:
save: ${{ github.event_name == 'push' }}
max-size: "200M"
- name: Setup directory envs for out-of-tree builds
run: |
echo "CPYTHON_RO_SRCDIR=$(realpath -m ${GITHUB_WORKSPACE}/../cpython-ro-srcdir)" >> $GITHUB_ENV
echo "CPYTHON_BUILDDIR=$(realpath -m ${GITHUB_WORKSPACE}/../cpython-builddir)" >> $GITHUB_ENV
echo "CPYTHON_RO_SRCDIR=$(realpath -m "${GITHUB_WORKSPACE}"/../cpython-ro-srcdir)" >> "$GITHUB_ENV"
echo "CPYTHON_BUILDDIR=$(realpath -m "${GITHUB_WORKSPACE}"/../cpython-builddir)" >> "$GITHUB_ENV"
- name: Create directories for read-only out-of-tree builds
run: mkdir -p $CPYTHON_RO_SRCDIR $CPYTHON_BUILDDIR
run: mkdir -p "$CPYTHON_RO_SRCDIR" "$CPYTHON_BUILDDIR"
- name: Bind mount sources read-only
run: sudo mount --bind -o ro $GITHUB_WORKSPACE $CPYTHON_RO_SRCDIR
run: sudo mount --bind -o ro "$GITHUB_WORKSPACE" "$CPYTHON_RO_SRCDIR"
- name: Runner image version
run: echo "IMAGE_OS_VERSION=${ImageOS}-${ImageVersion}" >> "$GITHUB_ENV"
- name: Restore config.cache
uses: actions/cache@v3
uses: actions/cache@v4
with:
path: ${{ env.CPYTHON_BUILDDIR }}/config.cache
key: ${{ github.job }}-${{ runner.os }}-${{ inputs.config_hash }}
key: ${{ github.job }}-${{ env.IMAGE_OS_VERSION }}-${{ inputs.config_hash }}
- name: Configure CPython out-of-tree
working-directory: ${{ env.CPYTHON_BUILDDIR }}
run: ${{ inputs.options }}
# `test_unpickle_module_race` writes to the source directory, which is
# read-only during builds — so we exclude it from profiling with BOLT.
run: >-
PROFILE_TASK='-m test --pgo --ignore test_unpickle_module_race'
../cpython-ro-srcdir/configure
--config-cache
--with-pydebug
--enable-slower-safety
--enable-safety
--with-openssl="$OPENSSL_DIR"
${{ fromJSON(inputs.free-threading) && '--disable-gil' || '' }}
${{ fromJSON(inputs.bolt-optimizations) && '--enable-bolt' || '' }}
- name: Build CPython out-of-tree
if: ${{ inputs.free-threading }}
working-directory: ${{ env.CPYTHON_BUILDDIR }}
run: make -j4
run: make -j
- name: Build CPython out-of-tree (for compiler warning check)
if: ${{ !inputs.free-threading }}
working-directory: ${{ env.CPYTHON_BUILDDIR }}
run: set -o pipefail; make -j --output-sync 2>&1 | tee compiler_output_ubuntu.txt
- name: Display build info
working-directory: ${{ env.CPYTHON_BUILDDIR }}
run: make pythoninfo
- name: Check compiler warnings
if: ${{ !inputs.free-threading }}
run: >-
python Tools/build/check_warnings.py
--compiler-output-file-path="${CPYTHON_BUILDDIR}/compiler_output_ubuntu.txt"
--warning-ignore-file-path "${GITHUB_WORKSPACE}/Tools/build/.warningignore_ubuntu"
--compiler-output-type=gcc
--fail-on-regression
--fail-on-improvement
--path-prefix="../cpython-ro-srcdir/"
- name: Remount sources writable for tests
# some tests write to srcdir, lack of pyc files slows down testing
run: sudo mount $CPYTHON_RO_SRCDIR -oremount,rw
run: sudo mount "$CPYTHON_RO_SRCDIR" -oremount,rw
- name: Tests
working-directory: ${{ env.CPYTHON_BUILDDIR }}
run: xvfb-run make test
run: xvfb-run make ci

84
.github/workflows/reusable-wasi.yml vendored Normal file
View file

@ -0,0 +1,84 @@
name: Reusable WASI
on:
workflow_call:
inputs:
config_hash:
required: true
type: string
env:
FORCE_COLOR: 1
jobs:
build-wasi-reusable:
name: 'build and test'
runs-on: ubuntu-24.04
timeout-minutes: 60
env:
WASMTIME_VERSION: 22.0.0
WASI_SDK_VERSION: 24
WASI_SDK_PATH: /opt/wasi-sdk
CROSS_BUILD_PYTHON: cross-build/build
CROSS_BUILD_WASI: cross-build/wasm32-wasip1
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
# No problem resolver registered as one doesn't currently exist for Clang.
- name: "Install wasmtime"
uses: bytecodealliance/actions/wasmtime/setup@v1
with:
version: ${{ env.WASMTIME_VERSION }}
- name: "Restore WASI SDK"
id: cache-wasi-sdk
uses: actions/cache@v4
with:
path: ${{ env.WASI_SDK_PATH }}
key: ${{ runner.os }}-wasi-sdk-${{ env.WASI_SDK_VERSION }}
- name: "Install WASI SDK" # Hard-coded to x64.
if: steps.cache-wasi-sdk.outputs.cache-hit != 'true'
run: |
mkdir "${WASI_SDK_PATH}" && \
curl -s -S --location "https://github.com/WebAssembly/wasi-sdk/releases/download/wasi-sdk-${WASI_SDK_VERSION}/wasi-sdk-${WASI_SDK_VERSION}.0-x86_64-linux.tar.gz" | \
tar --strip-components 1 --directory "${WASI_SDK_PATH}" --extract --gunzip
- name: "Configure ccache action"
uses: hendrikmuhs/ccache-action@v1.2
with:
save: ${{ github.event_name == 'push' }}
max-size: "200M"
- name: "Add ccache to PATH"
run: echo "PATH=/usr/lib/ccache:$PATH" >> "$GITHUB_ENV"
- name: "Install Python"
uses: actions/setup-python@v5
with:
python-version: '3.x'
- name: "Runner image version"
run: echo "IMAGE_OS_VERSION=${ImageOS}-${ImageVersion}" >> "$GITHUB_ENV"
- name: "Restore Python build config.cache"
uses: actions/cache@v4
with:
path: ${{ env.CROSS_BUILD_PYTHON }}/config.cache
# Include env.pythonLocation in key to avoid changes in environment when setup-python updates Python.
# Include the hash of `Tools/wasm/wasi.py` as it may change the environment variables.
# (Make sure to keep the key in sync with the other config.cache step below.)
key: ${{ github.job }}-${{ env.IMAGE_OS_VERSION }}-${{ env.WASI_SDK_VERSION }}-${{ env.WASMTIME_VERSION }}-${{ inputs.config_hash }}-${{ hashFiles('Tools/wasm/wasi.py') }}-${{ env.pythonLocation }}
- name: "Configure build Python"
run: python3 Tools/wasm/wasi.py configure-build-python -- --config-cache --with-pydebug
- name: "Make build Python"
run: python3 Tools/wasm/wasi.py make-build-python
- name: "Restore host config.cache"
uses: actions/cache@v4
with:
path: ${{ env.CROSS_BUILD_WASI }}/config.cache
# Should be kept in sync with the other config.cache step above.
key: ${{ github.job }}-${{ env.IMAGE_OS_VERSION }}-${{ env.WASI_SDK_VERSION }}-${{ env.WASMTIME_VERSION }}-${{ inputs.config_hash }}-${{ hashFiles('Tools/wasm/wasi.py') }}-${{ env.pythonLocation }}
- name: "Configure host"
# `--with-pydebug` inferred from configure-build-python
run: python3 Tools/wasm/wasi.py configure-host -- --config-cache
- name: "Make host"
run: python3 Tools/wasm/wasi.py make-host
- name: "Display build info"
run: make --directory "${CROSS_BUILD_WASI}" pythoninfo
- name: "Test"
run: make --directory "${CROSS_BUILD_WASI}" test

View file

@ -0,0 +1,31 @@
name: Reusable Windows MSI
on:
workflow_call:
inputs:
arch:
description: CPU architecture
required: true
type: string
permissions:
contents: read
env:
FORCE_COLOR: 1
jobs:
build:
name: installer for ${{ inputs.arch }}
runs-on: ${{ inputs.arch == 'arm64' && 'windows-11-arm' || 'windows-latest' }}
timeout-minutes: 60
env:
ARCH: ${{ inputs.arch }}
IncludeFreethreaded: true
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
- name: Build CPython installer
run: ./Tools/msi/build.bat --doc -"${ARCH}"
shell: bash

View file

@ -1,53 +1,50 @@
name: Reusable Windows
on:
workflow_call:
inputs:
free-threaded:
arch:
description: CPU architecture
required: true
type: string
free-threading:
description: Whether to compile CPython in free-threading mode
required: false
type: boolean
default: false
env:
FORCE_COLOR: 1
IncludeUwp: >-
true
jobs:
build_win32:
name: 'build and test (x86)'
runs-on: windows-latest
build:
name: Build and test (${{ inputs.arch }})
runs-on: ${{ inputs.arch == 'arm64' && 'windows-11-arm' || 'windows-latest' }}
timeout-minutes: 60
env:
IncludeUwp: 'true'
steps:
- uses: actions/checkout@v4
- name: Build CPython
run: .\PCbuild\build.bat -e -d -v -p Win32 ${{ inputs.free-threaded && '--disable-gil' || '' }}
- name: Display build info
run: .\python.bat -m test.pythoninfo
- name: Tests
run: .\PCbuild\rt.bat -p Win32 -d -q --fast-ci
build_win_amd64:
name: 'build and test (x64)'
runs-on: windows-latest
timeout-minutes: 60
env:
IncludeUwp: 'true'
ARCH: ${{ inputs.arch }}
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
- name: Register MSVC problem matcher
if: inputs.arch != 'Win32'
run: echo "::add-matcher::.github/problem-matchers/msvc.json"
- name: Build CPython
run: .\PCbuild\build.bat -e -d -v -p x64 ${{ inputs.free-threaded && '--disable-gil' || '' }}
run: >-
.\\PCbuild\\build.bat
-e -d -v
-p "${ARCH}"
${{ fromJSON(inputs.free-threading) && '--disable-gil' || '' }}
shell: bash
- name: Display build info
run: .\python.bat -m test.pythoninfo
run: .\\python.bat -m test.pythoninfo
- name: Tests
run: .\PCbuild\rt.bat -p x64 -d -q --fast-ci
build_win_arm64:
name: 'build (arm64)'
runs-on: windows-latest
timeout-minutes: 60
env:
IncludeUwp: 'true'
steps:
- uses: actions/checkout@v4
- name: Register MSVC problem matcher
run: echo "::add-matcher::.github/problem-matchers/msvc.json"
- name: Build CPython
run: .\PCbuild\build.bat -e -d -v -p arm64 ${{ inputs.free-threaded && '--disable-gil' || '' }}
run: >-
.\\PCbuild\\rt.bat
-p "${ARCH}"
-d -q --fast-ci
${{ fromJSON(inputs.free-threading) && '--disable-gil' || '' }}
shell: bash

View file

@ -2,21 +2,19 @@ name: Mark stale pull requests
on:
schedule:
- cron: "0 0 * * *"
permissions:
pull-requests: write
- cron: "0 */6 * * *"
jobs:
stale:
if: github.repository_owner == 'python'
runs-on: ubuntu-latest
permissions:
pull-requests: write
timeout-minutes: 10
steps:
- name: "Check PRs"
uses: actions/stale@v8
uses: actions/stale@v9
with:
repo-token: ${{ secrets.GITHUB_TOKEN }}
stale-pr-message: 'This PR is stale because it has been open for 30 days with no activity.'

140
.github/workflows/tail-call.yml vendored Normal file
View file

@ -0,0 +1,140 @@
name: Tail calling interpreter
on:
pull_request:
paths:
- '.github/workflows/tail-call.yml'
- 'Python/bytecodes.c'
- 'Python/ceval.c'
- 'Python/ceval_macros.h'
- 'Python/generated_cases.c.h'
push:
paths:
- '.github/workflows/tail-call.yml'
- 'Python/bytecodes.c'
- 'Python/ceval.c'
- 'Python/ceval_macros.h'
- 'Python/generated_cases.c.h'
workflow_dispatch:
permissions:
contents: read
concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
cancel-in-progress: true
env:
FORCE_COLOR: 1
jobs:
tail-call:
name: ${{ matrix.target }}
runs-on: ${{ matrix.runner }}
timeout-minutes: 90
strategy:
fail-fast: false
matrix:
target:
# Un-comment as we add support for more platforms for tail-calling interpreters.
# - i686-pc-windows-msvc/msvc
- x86_64-pc-windows-msvc/msvc
# - aarch64-pc-windows-msvc/msvc
- x86_64-apple-darwin/clang
- aarch64-apple-darwin/clang
- x86_64-unknown-linux-gnu/gcc
- aarch64-unknown-linux-gnu/gcc
- free-threading
llvm:
- 20
include:
# - target: i686-pc-windows-msvc/msvc
# architecture: Win32
# runner: windows-latest
- target: x86_64-pc-windows-msvc/msvc
architecture: x64
runner: windows-latest
# - target: aarch64-pc-windows-msvc/msvc
# architecture: ARM64
# runner: windows-latest
- target: x86_64-apple-darwin/clang
architecture: x86_64
runner: macos-13
- target: aarch64-apple-darwin/clang
architecture: aarch64
runner: macos-14
- target: x86_64-unknown-linux-gnu/gcc
architecture: x86_64
runner: ubuntu-24.04
- target: aarch64-unknown-linux-gnu/gcc
architecture: aarch64
runner: ubuntu-24.04-arm
- target: free-threading
architecture: x86_64
runner: ubuntu-24.04
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
- uses: actions/setup-python@v5
with:
python-version: '3.11'
- name: Native Windows (debug)
if: runner.os == 'Windows' && matrix.architecture != 'ARM64'
shell: cmd
run: |
choco install llvm --allow-downgrade --no-progress --version ${{ matrix.llvm }}.1.0
set PlatformToolset=clangcl
set LLVMToolsVersion=${{ matrix.llvm }}.1.0
set LLVMInstallDir=C:\Program Files\LLVM
call ./PCbuild/build.bat --tail-call-interp -d -p ${{ matrix.architecture }}
call ./PCbuild/rt.bat -d -p ${{ matrix.architecture }} -q --multiprocess 0 --timeout 4500 --verbose2 --verbose3
# No tests (yet):
- name: Emulated Windows (release)
if: runner.os == 'Windows' && matrix.architecture == 'ARM64'
shell: cmd
run: |
choco install llvm --allow-downgrade --no-progress --version ${{ matrix.llvm }}.1.0
set PlatformToolset=clangcl
set LLVMToolsVersion=${{ matrix.llvm }}.1.0
set LLVMInstallDir=C:\Program Files\LLVM
./PCbuild/build.bat --tail-call-interp -p ${{ matrix.architecture }}
# The `find` line is required as a result of https://github.com/actions/runner-images/issues/9966.
# This is a bug in the macOS runner image where the pre-installed Python is installed in the same
# directory as the Homebrew Python, which causes the build to fail for macos-13. This line removes
# the symlink to the pre-installed Python so that the Homebrew Python is used instead.
# Note: when a new LLVM is released, the homebrew installation directory changes, so the builds will fail.
# We either need to upgrade LLVM or change the directory being pointed to.
- name: Native macOS (release)
if: runner.os == 'macOS'
run: |
brew update
find /usr/local/bin -lname '*/Library/Frameworks/Python.framework/*' -delete
brew install llvm@${{ matrix.llvm }}
export SDKROOT="$(xcrun --show-sdk-path)"
export PATH="/usr/local/opt/llvm/bin:$PATH"
export PATH="/opt/homebrew/opt/llvm/bin:$PATH"
CC=clang-20 ./configure --with-tail-call-interp
make all --jobs 4
./python.exe -m test --multiprocess 0 --timeout 4500 --verbose2 --verbose3
- name: Native Linux (debug)
if: runner.os == 'Linux' && matrix.target != 'free-threading'
run: |
sudo bash -c "$(wget -O - https://apt.llvm.org/llvm.sh)" ./llvm.sh ${{ matrix.llvm }}
export PATH="$(llvm-config-${{ matrix.llvm }} --bindir):$PATH"
CC=clang-20 ./configure --with-tail-call-interp --with-pydebug
make all --jobs 4
./python -m test --multiprocess 0 --timeout 4500 --verbose2 --verbose3
- name: Native Linux with free-threading (release)
if: matrix.target == 'free-threading'
run: |
sudo bash -c "$(wget -O - https://apt.llvm.org/llvm.sh)" ./llvm.sh ${{ matrix.llvm }}
export PATH="$(llvm-config-${{ matrix.llvm }} --bindir):$PATH"
CC=clang-20 ./configure --with-tail-call-interp --disable-gil
make all --jobs 4
./python -m test --multiprocess 0 --timeout 4500 --verbose2 --verbose3

View file

@ -26,7 +26,9 @@ jobs:
timeout-minutes: 10
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v4
with:
persist-credentials: false
- uses: actions/setup-python@v5
with:
python-version: '3'
- name: Compare checksum of bundled wheels to the ones published on PyPI

6
.github/zizmor.yml vendored Normal file
View file

@ -0,0 +1,6 @@
# Configuration for the zizmor static analysis tool, run via pre-commit in CI
# https://woodruffw.github.io/zizmor/configuration/
rules:
dangerous-triggers:
ignore:
- documentation-links.yml

16
.gitignore vendored
View file

@ -38,6 +38,7 @@ tags
TAGS
.vs/
.vscode/
.cache/
gmon.out
.coverage
.mypy_cache/
@ -69,6 +70,17 @@ Lib/test/data/*
/_bootstrap_python
/Makefile
/Makefile.pre
/iOSTestbed.*
iOS/Frameworks/
iOS/Resources/Info.plist
iOS/testbed/build
iOS/testbed/Python.xcframework/ios-*/bin
iOS/testbed/Python.xcframework/ios-*/include
iOS/testbed/Python.xcframework/ios-*/lib
iOS/testbed/Python.xcframework/ios-*/Python.framework
iOS/testbed/iOSTestbed.xcodeproj/project.xcworkspace
iOS/testbed/iOSTestbed.xcodeproj/xcuserdata
iOS/testbed/iOSTestbed.xcodeproj/xcshareddata
Mac/Makefile
Mac/PythonLauncher/Info.plist
Mac/PythonLauncher/Makefile
@ -126,11 +138,11 @@ Tools/unicode/data/
# hendrikmuhs/ccache-action@v1
/.ccache
/cross-build/
/jit_stencils.h
/platform
/profile-clean-stamp
/profile-run-stamp
/profile-bolt-stamp
/Python/deepfreeze/*.c
/pybuilddir.txt
/pyconfig.h
/python-config
@ -158,5 +170,5 @@ Python/frozen_modules/MANIFEST
/python
!/Python/
# main branch only: ABI files are not checked/maintained
# main branch only: ABI files are not checked/maintained.
Doc/data/python*.abi

View file

@ -1,19 +1,44 @@
repos:
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.1.7
rev: v0.9.1
hooks:
- id: ruff
name: Run Ruff on Lib/test/
name: Run Ruff (lint) on Doc/
args: [--exit-non-zero-on-fix]
files: ^Doc/
- id: ruff
name: Run Ruff (lint) on Lib/test/
args: [--exit-non-zero-on-fix]
files: ^Lib/test/
- id: ruff
name: Run Ruff on Argument Clinic
name: Run Ruff (lint) on Tools/build/check_warnings.py
args: [--exit-non-zero-on-fix, --config=Tools/build/.ruff.toml]
files: ^Tools/build/check_warnings.py
- id: ruff
name: Run Ruff (lint) on Argument Clinic
args: [--exit-non-zero-on-fix, --config=Tools/clinic/.ruff.toml]
files: ^Tools/clinic/|Lib/test/test_clinic.py
- id: ruff-format
name: Run Ruff (format) on Doc/
args: [--check]
files: ^Doc/
- repo: https://github.com/psf/black-pre-commit-mirror
rev: 24.10.0
hooks:
- id: black
name: Run Black on Tools/build/check_warnings.py
files: ^Tools/build/check_warnings.py
args: [--line-length=79]
- id: black
name: Run Black on Tools/jit/
files: ^Tools/jit/
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.5.0
rev: v5.0.0
hooks:
- id: check-case-conflict
- id: check-merge-conflict
- id: check-toml
exclude: ^Lib/test/test_tomllib/
- id: check-yaml
@ -23,12 +48,45 @@ repos:
- id: trailing-whitespace
types_or: [c, inc, python, rst]
- repo: https://github.com/python-jsonschema/check-jsonschema
rev: 0.31.0
hooks:
- id: check-dependabot
- id: check-github-workflows
- id: check-readthedocs
- repo: https://github.com/rhysd/actionlint
rev: v1.7.7
hooks:
- id: actionlint
- repo: https://github.com/woodruffw/zizmor-pre-commit
rev: v1.1.1
hooks:
- id: zizmor
- repo: https://github.com/sphinx-contrib/sphinx-lint
rev: v0.9.1
rev: v1.0.0
hooks:
- id: sphinx-lint
args: [--enable=default-role]
files: ^Doc/|^Misc/NEWS.d/next/
files: ^Doc/|^Misc/NEWS.d/
- repo: local
hooks:
- id: blurb-no-space-c-api
name: Check C API news entries
language: fail
entry: Space found in path, move to Misc/NEWS.d/next/C_API/
files: Misc/NEWS.d/next/C API/20.*.rst
- repo: local
hooks:
- id: blurb-no-space-core-and-builtins
name: Check Core and Builtins news entries
language: fail
entry: Space found in path, move to Misc/NEWS.d/next/Core_and_Builtins/
files: Misc/NEWS.d/next/Core and Builtins/20.*.rst
- repo: meta
hooks:

View file

@ -8,7 +8,7 @@ sphinx:
configuration: Doc/conf.py
build:
os: ubuntu-22.04
os: ubuntu-24.04
tools:
python: "3"
@ -26,6 +26,9 @@ build:
exit 183;
fi
- asdf plugin add uv
- asdf install uv latest
- asdf global uv latest
- make -C Doc venv html
- mkdir _readthedocs
- mv Doc/build/html _readthedocs/html

160
Android/README.md Normal file
View file

@ -0,0 +1,160 @@
# Python for Android
If you obtained this README as part of a release package, then the only
applicable sections are "Prerequisites", "Testing", and "Using in your own app".
If you obtained this README as part of the CPython source tree, then you can
also follow the other sections to compile Python for Android yourself.
However, most app developers should not need to do any of these things manually.
Instead, use one of the tools listed
[here](https://docs.python.org/3/using/android.html), which will provide a much
easier experience.
## Prerequisites
If you already have an Android SDK installed, export the `ANDROID_HOME`
environment variable to point at its location. Otherwise, here's how to install
it:
* Download the "Command line tools" from <https://developer.android.com/studio>.
* Create a directory `android-sdk/cmdline-tools`, and unzip the command line
tools package into it.
* Rename `android-sdk/cmdline-tools/cmdline-tools` to
`android-sdk/cmdline-tools/latest`.
* `export ANDROID_HOME=/path/to/android-sdk`
The `android.py` script also requires the following commands to be on the `PATH`:
* `curl`
* `java` (or set the `JAVA_HOME` environment variable)
* `tar`
## Building
Python can be built for Android on any POSIX platform supported by the Android
development tools, which currently means Linux or macOS.
First we'll make a "build" Python (for your development machine), then use it to
help produce a "host" Python for Android. So make sure you have all the usual
tools and libraries needed to build Python for your development machine.
The easiest way to do a build is to use the `android.py` script. You can either
have it perform the entire build process from start to finish in one step, or
you can do it in discrete steps that mirror running `configure` and `make` for
each of the two builds of Python you end up producing.
The discrete steps for building via `android.py` are:
```sh
./android.py configure-build
./android.py make-build
./android.py configure-host HOST
./android.py make-host HOST
```
`HOST` identifies which architecture to build. To see the possible values, run
`./android.py configure-host --help`.
To do all steps in a single command, run:
```sh
./android.py build HOST
```
In the end you should have a build Python in `cross-build/build`, and a host
Python in `cross-build/HOST`.
You can use `--` as a separator for any of the `configure`-related commands
including `build` itself to pass arguments to the underlying `configure`
call. For example, if you want a pydebug build that also caches the results from
`configure`, you can do:
```sh
./android.py build HOST -- -C --with-pydebug
```
## Packaging
After building an architecture as described in the section above, you can
package it for release with this command:
```sh
./android.py package HOST
```
`HOST` is defined in the section above.
This will generate a tarball in `cross-build/HOST/dist`, whose structure is
similar to the `Android` directory of the CPython source tree.
## Testing
The Python test suite can be run on Linux, macOS, or Windows:
* On Linux, the emulator needs access to the KVM virtualization interface, and
a DISPLAY environment variable pointing at an X server.
The test suite can usually be run on a device with 2 GB of RAM, but this is
borderline, so you may need to increase it to 4 GB. As of Android
Studio Koala, 2 GB is the default for all emulators, although the user interface
may indicate otherwise. Locate the emulator's directory under `~/.android/avd`,
and find `hw.ramSize` in both config.ini and hardware-qemu.ini. Either set these
manually to the same value, or use the Android Studio Device Manager, which will
update both files.
You can run the test suite either:
* Within the CPython repository, after doing a build as described above. On
Windows, you won't be able to do the build on the same machine, so you'll have
to copy the `cross-build/HOST/prefix` directory from somewhere else.
* Or by taking a release package built using the `package` command, extracting
it wherever you want, and using its own copy of `android.py`.
The test script supports the following modes:
* In `--connected` mode, it runs on a device or emulator you have already
connected to the build machine. List the available devices with
`$ANDROID_HOME/platform-tools/adb devices -l`, then pass a device ID to the
script like this:
```sh
./android.py test --connected emulator-5554
```
* In `--managed` mode, it uses a temporary headless emulator defined in the
`managedDevices` section of testbed/app/build.gradle.kts. This mode is slower,
but more reproducible.
We currently define two devices: `minVersion` and `maxVersion`, corresponding
to our minimum and maximum supported Android versions. For example:
```sh
./android.py test --managed maxVersion
```
By default, the only messages the script will show are Python's own stdout and
stderr. Add the `-v` option to also show Gradle output, and non-Python logcat
messages.
Any other arguments on the `android.py test` command line will be passed through
to `python -m test` use `--` to separate them from android.py's own options.
See the [Python Developer's
Guide](https://devguide.python.org/testing/run-write-tests/) for common options
most of them will work on Android, except for those that involve subprocesses,
such as `-j`.
Every time you run `android.py test`, changes in pure-Python files in the
repository's `Lib` directory will be picked up immediately. Changes in C files,
and architecture-specific files such as sysconfigdata, will not take effect
until you re-run `android.py make-host` or `build`.
## Using in your own app
See https://docs.python.org/3/using/android.html.

99
Android/android-env.sh Normal file
View file

@ -0,0 +1,99 @@
# This script must be sourced with the following variables already set:
: "${ANDROID_HOME:?}" # Path to Android SDK
: "${HOST:?}" # GNU target triplet
# You may also override the following:
: "${api_level:=24}" # Minimum Android API level the build will run on
: "${PREFIX:-}" # Path in which to find required libraries
# Print all messages on stderr so they're visible when running within build-wheel.
log() {
echo "$1" >&2
}
fail() {
log "$1"
exit 1
}
# When moving to a new version of the NDK, carefully review the following:
#
# * https://developer.android.com/ndk/downloads/revision_history
#
# * https://android.googlesource.com/platform/ndk/+/ndk-rXX-release/docs/BuildSystemMaintainers.md
# where XX is the NDK version. Do a diff against the version you're upgrading from, e.g.:
# https://android.googlesource.com/platform/ndk/+/ndk-r25-release..ndk-r26-release/docs/BuildSystemMaintainers.md
ndk_version=27.1.12297006
ndk=$ANDROID_HOME/ndk/$ndk_version
if ! [ -e "$ndk" ]; then
log "Installing NDK - this may take several minutes"
yes | "$ANDROID_HOME/cmdline-tools/latest/bin/sdkmanager" "ndk;$ndk_version"
fi
if [ "$HOST" = "arm-linux-androideabi" ]; then
clang_triplet=armv7a-linux-androideabi
else
clang_triplet="$HOST"
fi
# These variables are based on BuildSystemMaintainers.md above, and
# $ndk/build/cmake/android.toolchain.cmake.
toolchain=$(echo "$ndk"/toolchains/llvm/prebuilt/*)
export AR="$toolchain/bin/llvm-ar"
export AS="$toolchain/bin/llvm-as"
export CC="$toolchain/bin/${clang_triplet}${api_level}-clang"
export CXX="${CC}++"
export LD="$toolchain/bin/ld"
export NM="$toolchain/bin/llvm-nm"
export RANLIB="$toolchain/bin/llvm-ranlib"
export READELF="$toolchain/bin/llvm-readelf"
export STRIP="$toolchain/bin/llvm-strip"
# The quotes make sure the wildcard in the `toolchain` assignment has been expanded.
for path in "$AR" "$AS" "$CC" "$CXX" "$LD" "$NM" "$RANLIB" "$READELF" "$STRIP"; do
if ! [ -e "$path" ]; then
fail "$path does not exist"
fi
done
export CFLAGS="-D__BIONIC_NO_PAGE_SIZE_MACRO"
export LDFLAGS="-Wl,--build-id=sha1 -Wl,--no-rosegment -Wl,-z,max-page-size=16384"
# Unlike Linux, Android does not implicitly use a dlopened library to resolve
# relocations in subsequently-loaded libraries, even if RTLD_GLOBAL is used
# (https://github.com/android/ndk/issues/1244). So any library that fails to
# build with this flag, would also fail to load at runtime.
LDFLAGS="$LDFLAGS -Wl,--no-undefined"
# Many packages get away with omitting -lm on Linux, but Android is stricter.
LDFLAGS="$LDFLAGS -lm"
# -mstackrealign is included where necessary in the clang launcher scripts which are
# pointed to by $CC, so we don't need to include it here.
if [ "$HOST" = "arm-linux-androideabi" ]; then
CFLAGS="$CFLAGS -march=armv7-a -mthumb"
fi
if [ -n "${PREFIX:-}" ]; then
abs_prefix="$(realpath "$PREFIX")"
CFLAGS="$CFLAGS -I$abs_prefix/include"
LDFLAGS="$LDFLAGS -L$abs_prefix/lib"
export PKG_CONFIG="pkg-config --define-prefix"
export PKG_CONFIG_LIBDIR="$abs_prefix/lib/pkgconfig"
fi
# When compiling C++, some build systems will combine CFLAGS and CXXFLAGS, and some will
# use CXXFLAGS alone.
export CXXFLAGS="$CFLAGS"
# Use the same variable name as conda-build
if [ "$(uname)" = "Darwin" ]; then
CPU_COUNT="$(sysctl -n hw.ncpu)"
export CPU_COUNT
else
CPU_COUNT="$(nproc)"
export CPU_COUNT
fi

725
Android/android.py Executable file
View file

@ -0,0 +1,725 @@
#!/usr/bin/env python3
import asyncio
import argparse
import os
import re
import shlex
import shutil
import signal
import subprocess
import sys
import sysconfig
from asyncio import wait_for
from contextlib import asynccontextmanager
from datetime import datetime, timezone
from glob import glob
from os.path import basename, relpath
from pathlib import Path
from subprocess import CalledProcessError
from tempfile import TemporaryDirectory
SCRIPT_NAME = Path(__file__).name
ANDROID_DIR = Path(__file__).resolve().parent
CHECKOUT = ANDROID_DIR.parent
TESTBED_DIR = ANDROID_DIR / "testbed"
CROSS_BUILD_DIR = CHECKOUT / "cross-build"
HOSTS = ["aarch64-linux-android", "x86_64-linux-android"]
APP_ID = "org.python.testbed"
DECODE_ARGS = ("UTF-8", "backslashreplace")
try:
android_home = Path(os.environ['ANDROID_HOME'])
except KeyError:
sys.exit("The ANDROID_HOME environment variable is required.")
adb = Path(
f"{android_home}/platform-tools/adb"
+ (".exe" if os.name == "nt" else "")
)
gradlew = Path(
f"{TESTBED_DIR}/gradlew"
+ (".bat" if os.name == "nt" else "")
)
logcat_started = False
def delete_glob(pattern):
# Path.glob doesn't accept non-relative patterns.
for path in glob(str(pattern)):
path = Path(path)
print(f"Deleting {path} ...")
if path.is_dir() and not path.is_symlink():
shutil.rmtree(path)
else:
path.unlink()
def subdir(*parts, create=False):
path = CROSS_BUILD_DIR.joinpath(*parts)
if not path.exists():
if not create:
sys.exit(
f"{path} does not exist. Create it by running the appropriate "
f"`configure` subcommand of {SCRIPT_NAME}.")
else:
path.mkdir(parents=True)
return path
def run(command, *, host=None, env=None, log=True, **kwargs):
kwargs.setdefault("check", True)
if env is None:
env = os.environ.copy()
original_env = env.copy()
if host:
env_script = ANDROID_DIR / "android-env.sh"
env_output = subprocess.run(
f"set -eu; "
f"HOST={host}; "
f"PREFIX={subdir(host)}/prefix; "
f". {env_script}; "
f"export",
check=True, shell=True, text=True, stdout=subprocess.PIPE
).stdout
for line in env_output.splitlines():
# We don't require every line to match, as there may be some other
# output from installing the NDK.
if match := re.search(
"^(declare -x |export )?(\\w+)=['\"]?(.*?)['\"]?$", line
):
key, value = match[2], match[3]
if env.get(key) != value:
print(line)
env[key] = value
if env == original_env:
raise ValueError(f"Found no variables in {env_script.name} output:\n"
+ env_output)
if log:
print(">", " ".join(map(str, command)))
return subprocess.run(command, env=env, **kwargs)
def build_python_path():
"""The path to the build Python binary."""
build_dir = subdir("build")
binary = build_dir / "python"
if not binary.is_file():
binary = binary.with_suffix(".exe")
if not binary.is_file():
raise FileNotFoundError("Unable to find `python(.exe)` in "
f"{build_dir}")
return binary
def configure_build_python(context):
if context.clean:
clean("build")
os.chdir(subdir("build", create=True))
command = [relpath(CHECKOUT / "configure")]
if context.args:
command.extend(context.args)
run(command)
def make_build_python(context):
os.chdir(subdir("build"))
run(["make", "-j", str(os.cpu_count())])
def unpack_deps(host):
deps_url = "https://github.com/beeware/cpython-android-source-deps/releases/download"
for name_ver in ["bzip2-1.0.8-2", "libffi-3.4.4-3", "openssl-3.0.15-4",
"sqlite-3.49.1-0", "xz-5.4.6-1"]:
filename = f"{name_ver}-{host}.tar.gz"
download(f"{deps_url}/{name_ver}/{filename}")
run(["tar", "-xf", filename])
os.remove(filename)
def download(url, target_dir="."):
out_path = f"{target_dir}/{basename(url)}"
run(["curl", "-Lf", "-o", out_path, url])
return out_path
def configure_host_python(context):
if context.clean:
clean(context.host)
host_dir = subdir(context.host, create=True)
prefix_dir = host_dir / "prefix"
if not prefix_dir.exists():
prefix_dir.mkdir()
os.chdir(prefix_dir)
unpack_deps(context.host)
os.chdir(host_dir)
command = [
# Basic cross-compiling configuration
relpath(CHECKOUT / "configure"),
f"--host={context.host}",
f"--build={sysconfig.get_config_var('BUILD_GNU_TYPE')}",
f"--with-build-python={build_python_path()}",
"--without-ensurepip",
# Android always uses a shared libpython.
"--enable-shared",
"--without-static-libpython",
# Dependent libraries. The others are found using pkg-config: see
# android-env.sh.
f"--with-openssl={prefix_dir}",
]
if context.args:
command.extend(context.args)
run(command, host=context.host)
def make_host_python(context):
# The CFLAGS and LDFLAGS set in android-env include the prefix dir, so
# delete any previous Python installation to prevent it being used during
# the build.
host_dir = subdir(context.host)
prefix_dir = host_dir / "prefix"
for pattern in ("include/python*", "lib/libpython*", "lib/python*"):
delete_glob(f"{prefix_dir}/{pattern}")
os.chdir(host_dir)
run(["make", "-j", str(os.cpu_count())], host=context.host)
run(["make", "install", f"prefix={prefix_dir}"], host=context.host)
def build_all(context):
steps = [configure_build_python, make_build_python, configure_host_python,
make_host_python]
for step in steps:
step(context)
def clean(host):
delete_glob(CROSS_BUILD_DIR / host)
def clean_all(context):
for host in HOSTS + ["build"]:
clean(host)
def setup_sdk():
sdkmanager = android_home / (
"cmdline-tools/latest/bin/sdkmanager"
+ (".bat" if os.name == "nt" else "")
)
# Gradle will fail if it needs to install an SDK package whose license
# hasn't been accepted, so pre-accept all licenses.
if not all((android_home / "licenses" / path).exists() for path in [
"android-sdk-arm-dbt-license", "android-sdk-license"
]):
run([sdkmanager, "--licenses"], text=True, input="y\n" * 100)
# Gradle may install this automatically, but we can't rely on that because
# we need to run adb within the logcat task.
if not adb.exists():
run([sdkmanager, "platform-tools"])
# To avoid distributing compiled artifacts without corresponding source code,
# the Gradle wrapper is not included in the CPython repository. Instead, we
# extract it from the Gradle GitHub repository.
def setup_testbed():
# The Gradle version used for the build is specified in
# testbed/gradle/wrapper/gradle-wrapper.properties. This wrapper version
# doesn't need to match, as any version of the wrapper can download any
# version of Gradle.
version = "8.9.0"
paths = ["gradlew", "gradlew.bat", "gradle/wrapper/gradle-wrapper.jar"]
if all((TESTBED_DIR / path).exists() for path in paths):
return
for path in paths:
out_path = TESTBED_DIR / path
out_path.parent.mkdir(exist_ok=True)
download(
f"https://raw.githubusercontent.com/gradle/gradle/v{version}/{path}",
out_path.parent,
)
os.chmod(out_path, 0o755)
# run_testbed will build the app automatically, but it's useful to have this as
# a separate command to allow running the app outside of this script.
def build_testbed(context):
setup_sdk()
setup_testbed()
run(
[gradlew, "--console", "plain", "packageDebug", "packageDebugAndroidTest"],
cwd=TESTBED_DIR,
)
# Work around a bug involving sys.exit and TaskGroups
# (https://github.com/python/cpython/issues/101515).
def exit(*args):
raise MySystemExit(*args)
class MySystemExit(Exception):
pass
# The `test` subcommand runs all subprocesses through this context manager so
# that no matter what happens, they can always be cancelled from another task,
# and they will always be cleaned up on exit.
@asynccontextmanager
async def async_process(*args, **kwargs):
process = await asyncio.create_subprocess_exec(*args, **kwargs)
try:
yield process
finally:
if process.returncode is None:
# Allow a reasonably long time for Gradle to clean itself up,
# because we don't want stale emulators left behind.
timeout = 10
process.terminate()
try:
await wait_for(process.wait(), timeout)
except TimeoutError:
print(
f"Command {args} did not terminate after {timeout} seconds "
f" - sending SIGKILL"
)
process.kill()
# Even after killing the process we must still wait for it,
# otherwise we'll get the warning "Exception ignored in __del__".
await wait_for(process.wait(), timeout=1)
async def async_check_output(*args, **kwargs):
async with async_process(
*args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, **kwargs
) as process:
stdout, stderr = await process.communicate()
if process.returncode == 0:
return stdout.decode(*DECODE_ARGS)
else:
raise CalledProcessError(
process.returncode, args,
stdout.decode(*DECODE_ARGS), stderr.decode(*DECODE_ARGS)
)
# Return a list of the serial numbers of connected devices. Emulators will have
# serials of the form "emulator-5678".
async def list_devices():
serials = []
header_found = False
lines = (await async_check_output(adb, "devices")).splitlines()
for line in lines:
# Ignore blank lines, and all lines before the header.
line = line.strip()
if line == "List of devices attached":
header_found = True
elif header_found and line:
try:
serial, status = line.split()
except ValueError:
raise ValueError(f"failed to parse {line!r}")
if status == "device":
serials.append(serial)
if not header_found:
raise ValueError(f"failed to parse {lines}")
return serials
async def find_device(context, initial_devices):
if context.managed:
print("Waiting for managed device - this may take several minutes")
while True:
new_devices = set(await list_devices()).difference(initial_devices)
if len(new_devices) == 0:
await asyncio.sleep(1)
elif len(new_devices) == 1:
serial = new_devices.pop()
print(f"Serial: {serial}")
return serial
else:
exit(f"Found more than one new device: {new_devices}")
else:
return context.connected
# An older version of this script in #121595 filtered the logs by UID instead.
# But logcat can't filter by UID until API level 31. If we ever switch back to
# filtering by UID, we'll also have to filter by time so we only show messages
# produced after the initial call to `stop_app`.
#
# We're more likely to miss the PID because it's shorter-lived, so there's a
# workaround in PythonSuite.kt to stop it being *too* short-lived.
async def find_pid(serial):
print("Waiting for app to start - this may take several minutes")
shown_error = False
while True:
try:
# `pidof` requires API level 24 or higher. The level 23 emulator
# includes it, but it doesn't work (it returns all processes).
pid = (await async_check_output(
adb, "-s", serial, "shell", "pidof", "-s", APP_ID
)).strip()
except CalledProcessError as e:
# If the app isn't running yet, pidof gives no output. So if there
# is output, there must have been some other error. However, this
# sometimes happens transiently, especially when running a managed
# emulator for the first time, so don't make it fatal.
if (e.stdout or e.stderr) and not shown_error:
print_called_process_error(e)
print("This may be transient, so continuing to wait")
shown_error = True
else:
# Some older devices (e.g. Nexus 4) return zero even when no process
# was found, so check whether we actually got any output.
if pid:
print(f"PID: {pid}")
return pid
# Loop fairly rapidly to avoid missing a short-lived process.
await asyncio.sleep(0.2)
async def logcat_task(context, initial_devices):
# Gradle may need to do some large downloads of libraries and emulator
# images. This will happen during find_device in --managed mode, or find_pid
# in --connected mode.
startup_timeout = 600
serial = await wait_for(find_device(context, initial_devices), startup_timeout)
pid = await wait_for(find_pid(serial), startup_timeout)
# `--pid` requires API level 24 or higher.
args = [adb, "-s", serial, "logcat", "--pid", pid, "--format", "tag"]
hidden_output = []
async with async_process(
*args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
) as process:
while line := (await process.stdout.readline()).decode(*DECODE_ARGS):
if match := re.fullmatch(r"([A-Z])/(.*)", line, re.DOTALL):
level, message = match.groups()
else:
# If the regex doesn't match, this is probably the second or
# subsequent line of a multi-line message. Python won't produce
# such messages, but other components might.
level, message = None, line
# Exclude high-volume messages which are rarely useful.
if context.verbose < 2 and "from python test_syslog" in message:
continue
# Put high-level messages on stderr so they're highlighted in the
# buildbot logs. This will include Python's own stderr.
stream = (
sys.stderr
if level in ["W", "E", "F"] # WARNING, ERROR, FATAL (aka ASSERT)
else sys.stdout
)
# To simplify automated processing of the output, e.g. a buildbot
# posting a failure notice on a GitHub PR, we strip the level and
# tag indicators from Python's stdout and stderr.
for prefix in ["python.stdout: ", "python.stderr: "]:
if message.startswith(prefix):
global logcat_started
logcat_started = True
stream.write(message.removeprefix(prefix))
break
else:
if context.verbose:
# Non-Python messages add a lot of noise, but they may
# sometimes help explain a failure.
stream.write(line)
else:
hidden_output.append(line)
# If the device disconnects while logcat is running, which always
# happens in --managed mode, some versions of adb return non-zero.
# Distinguish this from a logcat startup error by checking whether we've
# received a message from Python yet.
status = await wait_for(process.wait(), timeout=1)
if status != 0 and not logcat_started:
raise CalledProcessError(status, args, "".join(hidden_output))
def stop_app(serial):
run([adb, "-s", serial, "shell", "am", "force-stop", APP_ID], log=False)
async def gradle_task(context):
env = os.environ.copy()
if context.managed:
task_prefix = context.managed
else:
task_prefix = "connected"
env["ANDROID_SERIAL"] = context.connected
args = [
gradlew, "--console", "plain", f"{task_prefix}DebugAndroidTest",
"-Pandroid.testInstrumentationRunnerArguments.pythonArgs="
+ shlex.join(context.args),
]
hidden_output = []
try:
async with async_process(
*args, cwd=TESTBED_DIR, env=env,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
) as process:
while line := (await process.stdout.readline()).decode(*DECODE_ARGS):
# Gradle may take several minutes to install SDK packages, so
# it's worth showing those messages even in non-verbose mode.
if context.verbose or line.startswith('Preparing "Install'):
sys.stdout.write(line)
else:
hidden_output.append(line)
status = await wait_for(process.wait(), timeout=1)
if status == 0:
exit(0)
else:
raise CalledProcessError(status, args)
finally:
# If logcat never started, then something has gone badly wrong, so the
# user probably wants to see the Gradle output even in non-verbose mode.
if hidden_output and not logcat_started:
sys.stdout.write("".join(hidden_output))
# Gradle does not stop the tests when interrupted.
if context.connected:
stop_app(context.connected)
async def run_testbed(context):
setup_sdk()
setup_testbed()
if context.managed:
# In this mode, Gradle will create a device with an unpredictable name.
# So we save a list of the running devices before starting Gradle, and
# find_device then waits for a new device to appear.
initial_devices = await list_devices()
else:
# In case the previous shutdown was unclean, make sure the app isn't
# running, otherwise we might show logs from a previous run. This is
# unnecessary in --managed mode, because Gradle creates a new emulator
# every time.
stop_app(context.connected)
initial_devices = None
try:
async with asyncio.TaskGroup() as tg:
tg.create_task(logcat_task(context, initial_devices))
tg.create_task(gradle_task(context))
except* MySystemExit as e:
raise SystemExit(*e.exceptions[0].args) from None
except* CalledProcessError as e:
# Extract it from the ExceptionGroup so it can be handled by `main`.
raise e.exceptions[0]
def package_version(prefix_dir):
patchlevel_glob = f"{prefix_dir}/include/python*/patchlevel.h"
patchlevel_paths = glob(patchlevel_glob)
if len(patchlevel_paths) != 1:
sys.exit(f"{patchlevel_glob} matched {len(patchlevel_paths)} paths.")
for line in open(patchlevel_paths[0]):
if match := re.fullmatch(r'\s*#define\s+PY_VERSION\s+"(.+)"\s*', line):
version = match[1]
break
else:
sys.exit(f"Failed to find Python version in {patchlevel_paths[0]}.")
# If not building against a tagged commit, add a timestamp to the version.
# Follow the PyPA version number rules, as this will make it easier to
# process with other tools.
if version.endswith("+"):
version += datetime.now(timezone.utc).strftime("%Y%m%d.%H%M%S")
return version
def package(context):
prefix_dir = subdir(context.host, "prefix")
version = package_version(prefix_dir)
with TemporaryDirectory(prefix=SCRIPT_NAME) as temp_dir:
temp_dir = Path(temp_dir)
# Include all tracked files from the Android directory.
for line in run(
["git", "ls-files"],
cwd=ANDROID_DIR, capture_output=True, text=True, log=False,
).stdout.splitlines():
src = ANDROID_DIR / line
dst = temp_dir / line
dst.parent.mkdir(parents=True, exist_ok=True)
shutil.copy2(src, dst, follow_symlinks=False)
# Include anything from the prefix directory which could be useful
# either for embedding Python in an app, or building third-party
# packages against it.
for rel_dir, patterns in [
("include", ["openssl*", "python*", "sqlite*"]),
("lib", ["engines-3", "libcrypto*.so", "libpython*", "libsqlite*",
"libssl*.so", "ossl-modules", "python*"]),
("lib/pkgconfig", ["*crypto*", "*ssl*", "*python*", "*sqlite*"]),
]:
for pattern in patterns:
for src in glob(f"{prefix_dir}/{rel_dir}/{pattern}"):
dst = temp_dir / relpath(src, prefix_dir.parent)
dst.parent.mkdir(parents=True, exist_ok=True)
if Path(src).is_dir():
shutil.copytree(
src, dst, symlinks=True,
ignore=lambda *args: ["__pycache__"]
)
else:
shutil.copy2(src, dst, follow_symlinks=False)
dist_dir = subdir(context.host, "dist", create=True)
package_path = shutil.make_archive(
f"{dist_dir}/python-{version}-{context.host}", "gztar", temp_dir
)
print(f"Wrote {package_path}")
# Handle SIGTERM the same way as SIGINT. This ensures that if we're terminated
# by the buildbot worker, we'll make an attempt to clean up our subprocesses.
def install_signal_handler():
def signal_handler(*args):
os.kill(os.getpid(), signal.SIGINT)
signal.signal(signal.SIGTERM, signal_handler)
def parse_args():
parser = argparse.ArgumentParser()
subcommands = parser.add_subparsers(dest="subcommand")
# Subcommands
build = subcommands.add_parser("build", help="Build everything")
configure_build = subcommands.add_parser("configure-build",
help="Run `configure` for the "
"build Python")
make_build = subcommands.add_parser("make-build",
help="Run `make` for the build Python")
configure_host = subcommands.add_parser("configure-host",
help="Run `configure` for Android")
make_host = subcommands.add_parser("make-host",
help="Run `make` for Android")
subcommands.add_parser(
"clean", help="Delete all build and prefix directories")
subcommands.add_parser(
"build-testbed", help="Build the testbed app")
test = subcommands.add_parser(
"test", help="Run the test suite")
package = subcommands.add_parser("package", help="Make a release package")
# Common arguments
for subcommand in build, configure_build, configure_host:
subcommand.add_argument(
"--clean", action="store_true", default=False, dest="clean",
help="Delete the relevant build and prefix directories first")
for subcommand in [build, configure_host, make_host, package]:
subcommand.add_argument(
"host", metavar="HOST", choices=HOSTS,
help="Host triplet: choices=[%(choices)s]")
for subcommand in build, configure_build, configure_host:
subcommand.add_argument("args", nargs="*",
help="Extra arguments to pass to `configure`")
# Test arguments
test.add_argument(
"-v", "--verbose", action="count", default=0,
help="Show Gradle output, and non-Python logcat messages. "
"Use twice to include high-volume messages which are rarely useful.")
device_group = test.add_mutually_exclusive_group(required=True)
device_group.add_argument(
"--connected", metavar="SERIAL", help="Run on a connected device. "
"Connect it yourself, then get its serial from `adb devices`.")
device_group.add_argument(
"--managed", metavar="NAME", help="Run on a Gradle-managed device. "
"These are defined in `managedDevices` in testbed/app/build.gradle.kts.")
test.add_argument(
"args", nargs="*", help=f"Arguments for `python -m test`. "
f"Separate them from {SCRIPT_NAME}'s own arguments with `--`.")
return parser.parse_args()
def main():
install_signal_handler()
# Under the buildbot, stdout is not a TTY, but we must still flush after
# every line to make sure our output appears in the correct order relative
# to the output of our subprocesses.
for stream in [sys.stdout, sys.stderr]:
stream.reconfigure(line_buffering=True)
context = parse_args()
dispatch = {
"configure-build": configure_build_python,
"make-build": make_build_python,
"configure-host": configure_host_python,
"make-host": make_host_python,
"build": build_all,
"clean": clean_all,
"build-testbed": build_testbed,
"test": run_testbed,
"package": package,
}
try:
result = dispatch[context.subcommand](context)
if asyncio.iscoroutine(result):
asyncio.run(result)
except CalledProcessError as e:
print_called_process_error(e)
sys.exit(1)
def print_called_process_error(e):
for stream_name in ["stdout", "stderr"]:
content = getattr(e, stream_name)
stream = getattr(sys, stream_name)
if content:
stream.write(content)
if not content.endswith("\n"):
stream.write("\n")
# Format the command so it can be copied into a shell. shlex uses single
# quotes, so we surround the whole command with double quotes.
args_joined = (
e.cmd if isinstance(e.cmd, str)
else " ".join(shlex.quote(str(arg)) for arg in e.cmd)
)
print(
f'Command "{args_joined}" returned exit status {e.returncode}'
)
if __name__ == "__main__":
main()

22
Android/testbed/.gitignore vendored Normal file
View file

@ -0,0 +1,22 @@
# The Gradle wrapper can be downloaded by running the `test` or `build-testbed`
# commands of android.py.
/gradlew
/gradlew.bat
/gradle/wrapper/gradle-wrapper.jar
# The repository's top-level .gitignore file ignores all .idea directories, but
# we want to keep any files which can't be regenerated from the Gradle
# configuration.
!.idea/
/.idea/*
!/.idea/inspectionProfiles
*.iml
.gradle
/local.properties
.DS_Store
/build
/captures
.externalNativeBuild
.cxx
local.properties

View file

@ -0,0 +1,8 @@
<component name="InspectionProjectProfileManager">
<profile version="1.0">
<option name="myName" value="Project Default" />
<inspection_tool class="AndroidLintGradleDependency" enabled="true" level="WEAK WARNING" enabled_by_default="true" editorAttributes="INFO_ATTRIBUTES" />
<inspection_tool class="AndroidLintOldTargetApi" enabled="true" level="WEAK WARNING" enabled_by_default="true" editorAttributes="INFO_ATTRIBUTES" />
<inspection_tool class="UnstableApiUsage" enabled="true" level="WEAK WARNING" enabled_by_default="true" editorAttributes="INFO_ATTRIBUTES" />
</profile>
</component>

1
Android/testbed/app/.gitignore vendored Normal file
View file

@ -0,0 +1 @@
/build

View file

@ -0,0 +1,250 @@
import com.android.build.api.variant.*
import kotlin.math.max
plugins {
id("com.android.application")
id("org.jetbrains.kotlin.android")
}
val ANDROID_DIR = file("../..")
val PYTHON_DIR = ANDROID_DIR.parentFile!!
val PYTHON_CROSS_DIR = file("$PYTHON_DIR/cross-build")
val inSourceTree = (
ANDROID_DIR.name == "Android" && file("$PYTHON_DIR/pyconfig.h.in").exists()
)
val KNOWN_ABIS = mapOf(
"aarch64-linux-android" to "arm64-v8a",
"x86_64-linux-android" to "x86_64",
)
// Discover prefixes.
val prefixes = ArrayList<File>()
if (inSourceTree) {
for ((triplet, _) in KNOWN_ABIS.entries) {
val prefix = file("$PYTHON_CROSS_DIR/$triplet/prefix")
if (prefix.exists()) {
prefixes.add(prefix)
}
}
} else {
// Testbed is inside a release package.
val prefix = file("$ANDROID_DIR/prefix")
if (prefix.exists()) {
prefixes.add(prefix)
}
}
if (prefixes.isEmpty()) {
throw GradleException(
"No Android prefixes found: see README.md for testing instructions"
)
}
// Detect Python versions and ABIs.
lateinit var pythonVersion: String
var abis = HashMap<File, String>()
for ((i, prefix) in prefixes.withIndex()) {
val libDir = file("$prefix/lib")
val version = run {
for (filename in libDir.list()!!) {
"""python(\d+\.\d+)""".toRegex().matchEntire(filename)?.let {
return@run it.groupValues[1]
}
}
throw GradleException("Failed to find Python version in $libDir")
}
if (i == 0) {
pythonVersion = version
} else if (pythonVersion != version) {
throw GradleException(
"${prefixes[0]} is Python $pythonVersion, but $prefix is Python $version"
)
}
val libPythonDir = file("$libDir/python$pythonVersion")
val triplet = run {
for (filename in libPythonDir.list()!!) {
"""_sysconfigdata__android_(.+).py""".toRegex().matchEntire(filename)?.let {
return@run it.groupValues[1]
}
}
throw GradleException("Failed to find Python triplet in $libPythonDir")
}
abis[prefix] = KNOWN_ABIS[triplet]!!
}
android {
val androidEnvFile = file("../../android-env.sh").absoluteFile
namespace = "org.python.testbed"
compileSdk = 34
defaultConfig {
applicationId = "org.python.testbed"
minSdk = androidEnvFile.useLines {
for (line in it) {
"""api_level:=(\d+)""".toRegex().find(line)?.let {
return@useLines it.groupValues[1].toInt()
}
}
throw GradleException("Failed to find API level in $androidEnvFile")
}
targetSdk = 34
versionCode = 1
versionName = "1.0"
ndk.abiFilters.addAll(abis.values)
externalNativeBuild.cmake.arguments(
"-DPYTHON_PREFIX_DIR=" + if (inSourceTree) {
// AGP uses the ${} syntax for its own purposes, so use a Jinja style
// placeholder.
"$PYTHON_CROSS_DIR/{{triplet}}/prefix"
} else {
prefixes[0]
},
"-DPYTHON_VERSION=$pythonVersion",
"-DANDROID_SUPPORT_FLEXIBLE_PAGE_SIZES=ON",
)
testInstrumentationRunner = "androidx.test.runner.AndroidJUnitRunner"
}
ndkVersion = androidEnvFile.useLines {
for (line in it) {
"""ndk_version=(\S+)""".toRegex().find(line)?.let {
return@useLines it.groupValues[1]
}
}
throw GradleException("Failed to find NDK version in $androidEnvFile")
}
externalNativeBuild.cmake {
path("src/main/c/CMakeLists.txt")
}
// Set this property to something non-empty, otherwise it'll use the default
// list, which ignores asset directories beginning with an underscore.
aaptOptions.ignoreAssetsPattern = ".git"
compileOptions {
sourceCompatibility = JavaVersion.VERSION_1_8
targetCompatibility = JavaVersion.VERSION_1_8
}
kotlinOptions {
jvmTarget = "1.8"
}
testOptions {
managedDevices {
localDevices {
create("minVersion") {
device = "Small Phone"
// Managed devices have a minimum API level of 27.
apiLevel = max(27, defaultConfig.minSdk!!)
// ATD devices are smaller and faster, but have a minimum
// API level of 30.
systemImageSource = if (apiLevel >= 30) "aosp-atd" else "aosp"
}
create("maxVersion") {
device = "Small Phone"
apiLevel = defaultConfig.targetSdk!!
systemImageSource = "aosp-atd"
}
}
// If the previous test run succeeded and nothing has changed,
// Gradle thinks there's no need to run it again. Override that.
afterEvaluate {
(localDevices.names + listOf("connected")).forEach {
tasks.named("${it}DebugAndroidTest") {
outputs.upToDateWhen { false }
}
}
}
}
}
}
dependencies {
implementation("androidx.appcompat:appcompat:1.6.1")
implementation("com.google.android.material:material:1.11.0")
implementation("androidx.constraintlayout:constraintlayout:2.1.4")
androidTestImplementation("androidx.test.ext:junit:1.1.5")
androidTestImplementation("androidx.test:rules:1.5.0")
}
// Create some custom tasks to copy Python and its standard library from
// elsewhere in the repository.
androidComponents.onVariants { variant ->
val pyPlusVer = "python$pythonVersion"
generateTask(variant, variant.sources.assets!!) {
into("python") {
// Include files such as pyconfig.h are used by some of the tests.
into("include/$pyPlusVer") {
for (prefix in prefixes) {
from("$prefix/include/$pyPlusVer")
}
duplicatesStrategy = DuplicatesStrategy.EXCLUDE
}
into("lib/$pyPlusVer") {
// To aid debugging, the source directory takes priority when
// running inside a CPython source tree.
if (inSourceTree) {
from("$PYTHON_DIR/Lib")
}
for (prefix in prefixes) {
from("$prefix/lib/$pyPlusVer")
}
into("site-packages") {
from("$projectDir/src/main/python")
}
duplicatesStrategy = DuplicatesStrategy.EXCLUDE
exclude("**/__pycache__")
}
}
}
generateTask(variant, variant.sources.jniLibs!!) {
for ((prefix, abi) in abis.entries) {
into(abi) {
from("$prefix/lib")
include("libpython*.*.so")
include("lib*_python.so")
}
}
}
}
fun generateTask(
variant: ApplicationVariant, directories: SourceDirectories,
configure: GenerateTask.() -> Unit
) {
val taskName = "generate" +
listOf(variant.name, "Python", directories.name)
.map { it.replaceFirstChar(Char::uppercase) }
.joinToString("")
directories.addGeneratedSourceDirectory(
tasks.register<GenerateTask>(taskName) {
into(outputDir)
configure()
},
GenerateTask::outputDir)
}
// addGeneratedSourceDirectory requires the task to have a DirectoryProperty.
abstract class GenerateTask: Sync() {
@get:OutputDirectory
abstract val outputDir: DirectoryProperty
}

View file

@ -0,0 +1,35 @@
package org.python.testbed
import androidx.test.annotation.UiThreadTest
import androidx.test.platform.app.InstrumentationRegistry
import androidx.test.ext.junit.runners.AndroidJUnit4
import org.junit.Test
import org.junit.runner.RunWith
import org.junit.Assert.*
@RunWith(AndroidJUnit4::class)
class PythonSuite {
@Test
@UiThreadTest
fun testPython() {
val start = System.currentTimeMillis()
try {
val context =
InstrumentationRegistry.getInstrumentation().targetContext
val args =
InstrumentationRegistry.getArguments().getString("pythonArgs", "")
val status = PythonTestRunner(context).run(args)
assertEquals(0, status)
} finally {
// Make sure the process lives long enough for the test script to
// detect it (see `find_pid` in android.py).
val delay = 2000 - (System.currentTimeMillis() - start)
if (delay > 0) {
Thread.sleep(delay)
}
}
}
}

View file

@ -0,0 +1,20 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android">
<uses-permission android:name="android.permission.INTERNET"/>
<application
android:icon="@drawable/ic_launcher"
android:label="@string/app_name"
android:theme="@style/Theme.Material3.Light.NoActionBar">
<activity
android:name=".MainActivity"
android:exported="true">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
</application>
</manifest>

View file

@ -0,0 +1,14 @@
cmake_minimum_required(VERSION 3.4.1)
project(testbed)
# Resolve variables from the command line.
string(
REPLACE {{triplet}} ${CMAKE_LIBRARY_ARCHITECTURE}
PYTHON_PREFIX_DIR ${PYTHON_PREFIX_DIR}
)
include_directories(${PYTHON_PREFIX_DIR}/include/python${PYTHON_VERSION})
link_directories(${PYTHON_PREFIX_DIR}/lib)
link_libraries(log python${PYTHON_VERSION})
add_library(main_activity SHARED main_activity.c)

View file

@ -0,0 +1,152 @@
#include <android/log.h>
#include <errno.h>
#include <jni.h>
#include <pthread.h>
#include <Python.h>
#include <stdio.h>
#include <string.h>
#include <unistd.h>
static void throw_runtime_exception(JNIEnv *env, const char *message) {
(*env)->ThrowNew(
env,
(*env)->FindClass(env, "java/lang/RuntimeException"),
message);
}
// --- Stdio redirection ------------------------------------------------------
// Most apps won't need this, because the Python-level sys.stdout and sys.stderr
// are redirected to the Android logcat by Python itself. However, in the
// testbed it's useful to redirect the native streams as well, to debug problems
// in the Python startup or redirection process.
//
// Based on
// https://github.com/beeware/briefcase-android-gradle-template/blob/v0.3.11/%7B%7B%20cookiecutter.safe_formal_name%20%7D%7D/app/src/main/cpp/native-lib.cpp
typedef struct {
FILE *file;
int fd;
android_LogPriority priority;
char *tag;
int pipe[2];
} StreamInfo;
// The FILE member can't be initialized here because stdout and stderr are not
// compile-time constants. Instead, it's initialized immediately before the
// redirection.
static StreamInfo STREAMS[] = {
{NULL, STDOUT_FILENO, ANDROID_LOG_INFO, "native.stdout", {-1, -1}},
{NULL, STDERR_FILENO, ANDROID_LOG_WARN, "native.stderr", {-1, -1}},
{NULL, -1, ANDROID_LOG_UNKNOWN, NULL, {-1, -1}},
};
// The maximum length of a log message in bytes, including the level marker and
// tag, is defined as LOGGER_ENTRY_MAX_PAYLOAD in
// platform/system/logging/liblog/include/log/log.h. As of API level 30, messages
// longer than this will be be truncated by logcat. This limit has already been
// reduced at least once in the history of Android (from 4076 to 4068 between API
// level 23 and 26), so leave some headroom.
static const int MAX_BYTES_PER_WRITE = 4000;
static void *redirection_thread(void *arg) {
StreamInfo *si = (StreamInfo*)arg;
ssize_t read_size;
char buf[MAX_BYTES_PER_WRITE];
while ((read_size = read(si->pipe[0], buf, sizeof buf - 1)) > 0) {
buf[read_size] = '\0'; /* add null-terminator */
__android_log_write(si->priority, si->tag, buf);
}
return 0;
}
static char *redirect_stream(StreamInfo *si) {
/* make the FILE unbuffered, to ensure messages are never lost */
if (setvbuf(si->file, 0, _IONBF, 0)) {
return "setvbuf";
}
/* create the pipe and redirect the file descriptor */
if (pipe(si->pipe)) {
return "pipe";
}
if (dup2(si->pipe[1], si->fd) == -1) {
return "dup2";
}
/* start the logging thread */
pthread_t thr;
if ((errno = pthread_create(&thr, 0, redirection_thread, si))) {
return "pthread_create";
}
if ((errno = pthread_detach(thr))) {
return "pthread_detach";
}
return 0;
}
JNIEXPORT void JNICALL Java_org_python_testbed_PythonTestRunner_redirectStdioToLogcat(
JNIEnv *env, jobject obj
) {
STREAMS[0].file = stdout;
STREAMS[1].file = stderr;
for (StreamInfo *si = STREAMS; si->file; si++) {
char *error_prefix;
if ((error_prefix = redirect_stream(si))) {
char error_message[1024];
snprintf(error_message, sizeof(error_message),
"%s: %s", error_prefix, strerror(errno));
throw_runtime_exception(env, error_message);
return;
}
}
}
// --- Python initialization ---------------------------------------------------
static PyStatus set_config_string(
JNIEnv *env, PyConfig *config, wchar_t **config_str, jstring value
) {
const char *value_utf8 = (*env)->GetStringUTFChars(env, value, NULL);
PyStatus status = PyConfig_SetBytesString(config, config_str, value_utf8);
(*env)->ReleaseStringUTFChars(env, value, value_utf8);
return status;
}
static void throw_status(JNIEnv *env, PyStatus status) {
throw_runtime_exception(env, status.err_msg ? status.err_msg : "");
}
JNIEXPORT int JNICALL Java_org_python_testbed_PythonTestRunner_runPython(
JNIEnv *env, jobject obj, jstring home, jstring runModule
) {
PyConfig config;
PyStatus status;
PyConfig_InitIsolatedConfig(&config);
status = set_config_string(env, &config, &config.home, home);
if (PyStatus_Exception(status)) {
throw_status(env, status);
return 1;
}
status = set_config_string(env, &config, &config.run_module, runModule);
if (PyStatus_Exception(status)) {
throw_status(env, status);
return 1;
}
// Some tests generate SIGPIPE and SIGXFSZ, which should be ignored.
config.install_signal_handlers = 1;
status = Py_InitializeFromConfig(&config);
if (PyStatus_Exception(status)) {
throw_status(env, status);
return 1;
}
return Py_RunMain();
}

View file

@ -0,0 +1,79 @@
package org.python.testbed
import android.content.Context
import android.os.*
import android.system.Os
import android.widget.TextView
import androidx.appcompat.app.*
import java.io.*
// Launching the tests from an activity is OK for a quick check, but for
// anything more complicated it'll be more convenient to use `android.py test`
// to launch the tests via PythonSuite.
class MainActivity : AppCompatActivity() {
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_main)
val status = PythonTestRunner(this).run("-W -uall")
findViewById<TextView>(R.id.tvHello).text = "Exit status $status"
}
}
class PythonTestRunner(val context: Context) {
/** @param args Extra arguments for `python -m test`.
* @return The Python exit status: zero if the tests passed, nonzero if
* they failed. */
fun run(args: String = "") : Int {
Os.setenv("PYTHON_ARGS", args, true)
// Python needs this variable to help it find the temporary directory,
// but Android only sets it on API level 33 and later.
Os.setenv("TMPDIR", context.cacheDir.toString(), false)
val pythonHome = extractAssets()
System.loadLibrary("main_activity")
redirectStdioToLogcat()
// The main module is in src/main/python/main.py.
return runPython(pythonHome.toString(), "main")
}
private fun extractAssets() : File {
val pythonHome = File(context.filesDir, "python")
if (pythonHome.exists() && !pythonHome.deleteRecursively()) {
throw RuntimeException("Failed to delete $pythonHome")
}
extractAssetDir("python", context.filesDir)
return pythonHome
}
private fun extractAssetDir(path: String, targetDir: File) {
val names = context.assets.list(path)
?: throw RuntimeException("Failed to list $path")
val targetSubdir = File(targetDir, path)
if (!targetSubdir.mkdirs()) {
throw RuntimeException("Failed to create $targetSubdir")
}
for (name in names) {
val subPath = "$path/$name"
val input: InputStream
try {
input = context.assets.open(subPath)
} catch (e: FileNotFoundException) {
extractAssetDir(subPath, targetDir)
continue
}
input.use {
File(targetSubdir, name).outputStream().use { output ->
input.copyTo(output)
}
}
}
}
private external fun redirectStdioToLogcat()
private external fun runPython(home: String, runModule: String) : Int
}

View file

@ -0,0 +1,32 @@
import os
import runpy
import shlex
import signal
import sys
# Some tests use SIGUSR1, but that's blocked by default in an Android app in
# order to make it available to `sigwait` in the Signal Catcher thread.
# (https://cs.android.com/android/platform/superproject/+/android14-qpr3-release:art/runtime/signal_catcher.cc).
# That thread's functionality is only useful for debugging the JVM, so disabling
# it should not weaken the tests.
#
# There's no safe way of stopping the thread completely (#123982), but simply
# unblocking SIGUSR1 is enough to fix most tests.
#
# However, in tests that generate multiple different signals in quick
# succession, it's possible for SIGUSR1 to arrive while the main thread is busy
# running the C-level handler for a different signal. In that case, the SIGUSR1
# may be sent to the Signal Catcher thread instead, which will generate a log
# message containing the text "reacting to signal".
#
# Such tests may need to be changed in one of the following ways:
# * Use a signal other than SIGUSR1 (e.g. test_stress_delivery_simultaneous in
# test_signal.py).
# * Send the signal to a specific thread rather than the whole process (e.g.
# test_signals in test_threadsignals.py.
signal.pthread_sigmask(signal.SIG_UNBLOCK, [signal.SIGUSR1])
sys.argv[1:] = shlex.split(os.environ["PYTHON_ARGS"])
# The test module will call sys.exit to indicate whether the tests passed.
runpy.run_module("test")

Binary file not shown.

After

Width:  |  Height:  |  Size: 3 KiB

View file

@ -0,0 +1,19 @@
<?xml version="1.0" encoding="utf-8"?>
<androidx.constraintlayout.widget.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
tools:context=".MainActivity">
<TextView
android:id="@+id/tvHello"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="Hello World!"
app:layout_constraintBottom_toBottomOf="parent"
app:layout_constraintEnd_toEndOf="parent"
app:layout_constraintStart_toStartOf="parent"
app:layout_constraintTop_toTopOf="parent" />
</androidx.constraintlayout.widget.ConstraintLayout>

View file

@ -0,0 +1,3 @@
<resources>
<string name="app_name">Python testbed</string>
</resources>

View file

@ -0,0 +1,5 @@
// Top-level build file where you can add configuration options common to all sub-projects/modules.
plugins {
id("com.android.application") version "8.6.1" apply false
id("org.jetbrains.kotlin.android") version "1.9.22" apply false
}

View file

@ -0,0 +1,28 @@
# Project-wide Gradle settings.
# IDE (e.g. Android Studio) users:
# Gradle settings configured through the IDE *will override*
# any settings specified in this file.
# For more details on how to configure your build environment visit
# http://www.gradle.org/docs/current/userguide/build_environment.html
# Specifies the JVM arguments used for the daemon process.
# The setting is particularly useful for tweaking memory settings.
org.gradle.jvmargs=-Xmx2048m -Dfile.encoding=UTF-8
# When configured, Gradle will run in incubating parallel mode.
# This option should only be used with decoupled projects. More details, visit
# http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
# org.gradle.parallel=true
# AndroidX package structure to make it clearer which packages are bundled with the
# Android operating system, and which are packaged with your app's APK
# https://developer.android.com/topic/libraries/support-library/androidx-rn
android.useAndroidX=true
# Kotlin code style for this project: "official" or "obsolete":
kotlin.code.style=official
# Enables namespacing of each library's R class so that its R class includes only the
# resources declared in the library itself and none from the library's dependencies,
# thereby reducing the size of the R class for that library
android.nonTransitiveRClass=true
# By default, the app will be uninstalled after the tests finish (apparently
# after 10 seconds in case of an unclean shutdown). We disable this, because
# when using android.py it can conflict with the installation of the next run.
android.injected.androidTest.leaveApksInstalledAfterRun=true

View file

@ -0,0 +1,6 @@
#Mon Feb 19 20:29:06 GMT 2024
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-8.7-bin.zip
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists

View file

@ -0,0 +1,18 @@
pluginManagement {
repositories {
google()
mavenCentral()
gradlePluginPortal()
}
}
dependencyResolutionManagement {
repositoriesMode.set(RepositoriesMode.FAIL_ON_PROJECT_REPOS)
repositories {
google()
mavenCentral()
}
}
rootProject.name = "Python testbed"
include(":app")

42
Doc/.ruff.toml Normal file
View file

@ -0,0 +1,42 @@
target-version = "py312" # Align with the version in oldest_supported_sphinx
fix = true
output-format = "full"
line-length = 79
extend-exclude = [
"includes/*",
# Temporary exclusions:
"tools/extensions/pyspecific.py",
]
[lint]
preview = true
select = [
"C4", # flake8-comprehensions
"B", # flake8-bugbear
"E", # pycodestyle
"F", # pyflakes
"FA", # flake8-future-annotations
"FLY", # flynt
"FURB", # refurb
"G", # flake8-logging-format
"I", # isort
"LOG", # flake8-logging
"N", # pep8-naming
"PERF", # perflint
"PGH", # pygrep-hooks
"PT", # flake8-pytest-style
"TCH", # flake8-type-checking
"UP", # pyupgrade
"W", # pycodestyle
]
ignore = [
"E501", # Ignore line length errors (we use auto-formatting)
]
[format]
preview = true
quote-style = "preserve"
docstring-code-format = true
exclude = [
"tools/extensions/lexers/*",
]

View file

@ -6,6 +6,7 @@
# You can set these variables from the command line.
PYTHON = python3
VENVDIR = ./venv
UV = uv
SPHINXBUILD = PATH=$(VENVDIR)/bin:$$PATH sphinx-build
BLURB = PATH=$(VENVDIR)/bin:$$PATH blurb
JOBS = auto
@ -13,15 +14,15 @@ PAPER =
SOURCES =
DISTVERSION = $(shell $(PYTHON) tools/extensions/patchlevel.py)
REQUIREMENTS = requirements.txt
SPHINXERRORHANDLING = -W
SPHINXERRORHANDLING = --fail-on-warning
# Internal variables.
PAPEROPT_a4 = -D latex_elements.papersize=a4paper
PAPEROPT_letter = -D latex_elements.papersize=letterpaper
PAPEROPT_a4 = --define latex_elements.papersize=a4paper
PAPEROPT_letter = --define latex_elements.papersize=letterpaper
ALLSPHINXOPTS = -b $(BUILDER) \
-d build/doctrees \
-j $(JOBS) \
ALLSPHINXOPTS = --builder $(BUILDER) \
--doctree-dir build/doctrees \
--jobs $(JOBS) \
$(PAPEROPT_$(PAPER)) \
$(SPHINXOPTS) $(SPHINXERRORHANDLING) \
. build/$(BUILDER) $(SOURCES)
@ -32,6 +33,7 @@ help:
@echo " clean to remove build files"
@echo " venv to create a venv with necessary tools"
@echo " html to make standalone HTML files"
@echo " gettext to generate POT files"
@echo " htmlview to open the index page built by the html target in your browser"
@echo " htmllive to rebuild and reload HTML files in your browser"
@echo " htmlhelp to make HTML files and a HTML help project"
@ -140,14 +142,19 @@ pydoc-topics: build
@echo "Building finished; now run this:" \
"cp build/pydoc-topics/topics.py ../Lib/pydoc_data/topics.py"
.PHONY: gettext
gettext: BUILDER = gettext
gettext: override SPHINXOPTS := --doctree-dir build/doctrees-gettext $(SPHINXOPTS)
gettext: build
.PHONY: htmlview
htmlview: html
$(PYTHON) -c "import os, webbrowser; webbrowser.open('file://' + os.path.realpath('build/html/index.html'))"
.PHONY: htmllive
htmllive: SPHINXBUILD = $(VENVDIR)/bin/sphinx-autobuild
htmllive: SPHINXBUILD = PATH=$(VENVDIR)/bin:$$PATH sphinx-autobuild
htmllive: SPHINXOPTS = --re-ignore="/venv/" --open-browser --delay 0
htmllive: html
htmllive: _ensure-sphinx-autobuild html
.PHONY: clean
clean: clean-venv
@ -163,92 +170,151 @@ venv:
echo "venv already exists."; \
echo "To recreate it, remove it first with \`make clean-venv'."; \
else \
$(PYTHON) -m venv $(VENVDIR); \
$(VENVDIR)/bin/python3 -m pip install --upgrade pip; \
$(VENVDIR)/bin/python3 -m pip install -r $(REQUIREMENTS); \
echo "Creating venv in $(VENVDIR)"; \
if $(UV) --version >/dev/null 2>&1; then \
$(UV) venv --python=$(PYTHON) $(VENVDIR); \
VIRTUAL_ENV=$(VENVDIR) $(UV) pip install -r $(REQUIREMENTS); \
else \
$(PYTHON) -m venv $(VENVDIR); \
$(VENVDIR)/bin/python3 -m pip install --upgrade pip; \
$(VENVDIR)/bin/python3 -m pip install -r $(REQUIREMENTS); \
fi; \
echo "The venv has been created in the $(VENVDIR) directory"; \
fi
.PHONY: dist-no-html
dist-no-html: dist-text dist-pdf dist-epub dist-texinfo
.PHONY: dist
dist:
rm -rf dist
mkdir -p dist
$(MAKE) dist-html
$(MAKE) dist-text
$(MAKE) dist-pdf
$(MAKE) dist-epub
$(MAKE) dist-texinfo
.PHONY: dist-html
dist-html:
# archive the HTML
make html
@echo "Building HTML..."
mkdir -p dist
rm -rf build/html
find dist -name 'python-$(DISTVERSION)-docs-html*' -exec rm -rf {} \;
$(MAKE) html
cp -pPR build/html dist/python-$(DISTVERSION)-docs-html
rm -rf dist/python-$(DISTVERSION)-docs-html/_images/social_previews/
tar -C dist -cf dist/python-$(DISTVERSION)-docs-html.tar python-$(DISTVERSION)-docs-html
bzip2 -9 -k dist/python-$(DISTVERSION)-docs-html.tar
(cd dist; zip -q -r -9 python-$(DISTVERSION)-docs-html.zip python-$(DISTVERSION)-docs-html)
rm -r dist/python-$(DISTVERSION)-docs-html
rm dist/python-$(DISTVERSION)-docs-html.tar
@echo "Build finished and archived!"
.PHONY: dist-text
dist-text:
# archive the text build
make text
@echo "Building text..."
mkdir -p dist
rm -rf build/text
find dist -name 'python-$(DISTVERSION)-docs-text*' -exec rm -rf {} \;
$(MAKE) text
cp -pPR build/text dist/python-$(DISTVERSION)-docs-text
tar -C dist -cf dist/python-$(DISTVERSION)-docs-text.tar python-$(DISTVERSION)-docs-text
bzip2 -9 -k dist/python-$(DISTVERSION)-docs-text.tar
(cd dist; zip -q -r -9 python-$(DISTVERSION)-docs-text.zip python-$(DISTVERSION)-docs-text)
rm -r dist/python-$(DISTVERSION)-docs-text
rm dist/python-$(DISTVERSION)-docs-text.tar
@echo "Build finished and archived!"
.PHONY: dist-pdf
dist-pdf:
# archive the A4 latex
@echo "Building LaTeX (A4 paper)..."
mkdir -p dist
rm -rf build/latex
make latex PAPER=a4
-sed -i 's/makeindex/makeindex -q/' build/latex/Makefile
(cd build/latex; make clean && make all-pdf && make FMT=pdf zip bz2)
find dist -name 'python-$(DISTVERSION)-docs-pdf*' -exec rm -rf {} \;
$(MAKE) latex PAPER=a4
# remove zip & bz2 dependency on all-pdf,
# as otherwise the full latexmk process is run twice.
# ($$ is needed to escape the $; https://www.gnu.org/software/make/manual/make.html#Basics-of-Variable-References)
-sed -i 's/: all-$$(FMT)/:/' build/latex/Makefile
(cd build/latex; $(MAKE) clean && $(MAKE) --jobs=$$((`nproc`+1)) --output-sync LATEXMKOPTS='-quiet' all-pdf && $(MAKE) FMT=pdf zip bz2)
cp build/latex/docs-pdf.zip dist/python-$(DISTVERSION)-docs-pdf-a4.zip
cp build/latex/docs-pdf.tar.bz2 dist/python-$(DISTVERSION)-docs-pdf-a4.tar.bz2
@echo "Build finished and archived!"
# archive the letter latex
rm -rf build/latex
make latex PAPER=letter
-sed -i 's/makeindex/makeindex -q/' build/latex/Makefile
(cd build/latex; make clean && make all-pdf && make FMT=pdf zip bz2)
cp build/latex/docs-pdf.zip dist/python-$(DISTVERSION)-docs-pdf-letter.zip
cp build/latex/docs-pdf.tar.bz2 dist/python-$(DISTVERSION)-docs-pdf-letter.tar.bz2
.PHONY: dist-epub
dist-epub:
# copy the epub build
@echo "Building EPUB..."
mkdir -p dist
rm -rf build/epub
make epub
rm -f dist/python-$(DISTVERSION)-docs.epub
$(MAKE) epub
cp -pPR build/epub/Python.epub dist/python-$(DISTVERSION)-docs.epub
@echo "Build finished and archived!"
.PHONY: dist-texinfo
dist-texinfo:
# archive the texinfo build
@echo "Building Texinfo..."
mkdir -p dist
rm -rf build/texinfo
make texinfo
make info --directory=build/texinfo
find dist -name 'python-$(DISTVERSION)-docs-texinfo*' -exec rm -rf {} \;
$(MAKE) texinfo
$(MAKE) info --directory=build/texinfo
cp -pPR build/texinfo dist/python-$(DISTVERSION)-docs-texinfo
tar -C dist -cf dist/python-$(DISTVERSION)-docs-texinfo.tar python-$(DISTVERSION)-docs-texinfo
bzip2 -9 -k dist/python-$(DISTVERSION)-docs-texinfo.tar
(cd dist; zip -q -r -9 python-$(DISTVERSION)-docs-texinfo.zip python-$(DISTVERSION)-docs-texinfo)
rm -r dist/python-$(DISTVERSION)-docs-texinfo
rm dist/python-$(DISTVERSION)-docs-texinfo.tar
@echo "Build finished and archived!"
.PHONY: _ensure-package
_ensure-package: venv
if $(UV) --version >/dev/null 2>&1; then \
VIRTUAL_ENV=$(VENVDIR) $(UV) pip install $(PACKAGE); \
else \
$(VENVDIR)/bin/python3 -m pip install $(PACKAGE); \
fi
.PHONY: _ensure-pre-commit
_ensure-pre-commit:
$(MAKE) _ensure-package PACKAGE=pre-commit
.PHONY: _ensure-sphinx-autobuild
_ensure-sphinx-autobuild:
$(MAKE) _ensure-package PACKAGE=sphinx-autobuild
.PHONY: check
check: venv
$(VENVDIR)/bin/python3 -m pre_commit --version > /dev/null || $(VENVDIR)/bin/python3 -m pip install pre-commit
check: _ensure-pre-commit
$(VENVDIR)/bin/python3 -m pre_commit run --all-files
.PHONY: serve
serve:
@echo "The serve target was removed, use htmlview instead (see bpo-36329)"
@echo "The serve target was removed, use htmllive instead (see gh-80510)"
# Targets for daily automated doc build
# By default, Sphinx only rebuilds pages where the page content has changed.
# This means it doesn't always pick up changes to preferred link targets, etc
# To ensure such changes are picked up, we build the published docs with
# `-E` (to ignore the cached environment) and `-a` (to ignore already existing
# output files)
# ``--fresh-env`` (to ignore the cached environment) and ``--write-all``
# (to ignore already existing output files)
# for development releases: always build
.PHONY: autobuild-dev
autobuild-dev: DISTVERSION = $(shell $(PYTHON) tools/extensions/patchlevel.py --short)
autobuild-dev:
make dist SPHINXOPTS='$(SPHINXOPTS) -Ea -A daily=1'
$(MAKE) dist-no-html SPHINXOPTS='$(SPHINXOPTS) --fresh-env --write-all --html-define daily=1' DISTVERSION=$(DISTVERSION)
# for quick rebuilds (HTML only)
# for HTML-only rebuilds
.PHONY: autobuild-dev-html
autobuild-dev-html: DISTVERSION = $(shell $(PYTHON) tools/extensions/patchlevel.py --short)
autobuild-dev-html:
make html SPHINXOPTS='$(SPHINXOPTS) -Ea -A daily=1'
$(MAKE) dist-html SPHINXOPTS='$(SPHINXOPTS) --fresh-env --write-all --html-define daily=1' DISTVERSION=$(DISTVERSION)
# for stable releases: only build if not in pre-release stage (alpha, beta)
# release candidate downloads are okay, since the stable tree can be in that stage
@ -258,7 +324,7 @@ autobuild-stable:
echo "Not building; $(DISTVERSION) is not a release version."; \
exit 1;; \
esac
@make autobuild-dev
@$(MAKE) autobuild-dev
.PHONY: autobuild-stable-html
autobuild-stable-html:
@ -266,4 +332,4 @@ autobuild-stable-html:
echo "Not building; $(DISTVERSION) is not a release version."; \
exit 1;; \
esac
@make autobuild-dev-html
@$(MAKE) autobuild-dev-html

View file

@ -28,7 +28,7 @@ install the tools into there.
Using make
----------
To get started on UNIX, you can create a virtual environment and build
To get started on Unix, you can create a virtual environment and build
documentation with the commands::
make venv
@ -40,13 +40,13 @@ If you'd like to create the virtual environment in a different location,
you can specify it using the ``VENVDIR`` variable.
You can also skip creating the virtual environment altogether, in which case
the Makefile will look for instances of ``sphinx-build`` and ``blurb``
the ``Makefile`` will look for instances of ``sphinx-build`` and ``blurb``
installed on your process ``PATH`` (configurable with the ``SPHINXBUILD`` and
``BLURB`` variables).
On Windows, we try to emulate the Makefile as closely as possible with a
On Windows, we try to emulate the ``Makefile`` as closely as possible with a
``make.bat`` file. If you need to specify the Python interpreter to use,
set the PYTHON environment variable.
set the ``PYTHON`` environment variable.
Available make targets are:
@ -62,15 +62,19 @@ Available make targets are:
* "htmlview", which re-uses the "html" builder, but then opens the main page
in your default web browser.
* "htmllive", which re-uses the "html" builder, rebuilds the docs,
starts a local server, and automatically reloads the page in your browser
when you make changes to reST files (Unix only).
* "htmlhelp", which builds HTML files and a HTML Help project file usable to
convert them into a single Compiled HTML (.chm) file -- these are popular
under Microsoft Windows, but very handy on every platform.
To create the CHM file, you need to run the Microsoft HTML Help Workshop
over the generated project (.hhp) file. The make.bat script does this for
over the generated project (.hhp) file. The ``make.bat`` script does this for
you on Windows.
* "latex", which builds LaTeX source files as input to "pdflatex" to produce
* "latex", which builds LaTeX source files as input to ``pdflatex`` to produce
PDF documents.
* "text", which builds a plain text file for each source file.
@ -95,8 +99,6 @@ Available make targets are:
* "check", which checks for frequent markup errors.
* "serve", which serves the build/html directory on port 8000.
* "dist", (Unix only) which creates distributable archives of HTML, text,
PDF, and EPUB builds.
@ -131,8 +133,5 @@ Bugs in the content should be reported to the
Bugs in the toolset should be reported to the tools themselves.
You can also send a mail to the Python Documentation Team at docs@python.org,
and we will process your request as soon as possible.
If you want to help the Documentation Team, you are always welcome. Just send
a mail to docs@python.org.
To help with the documentation, or report any problems, please leave a message
on `discuss.python.org <https://discuss.python.org/c/documentation>`_.

View file

@ -1,10 +1,11 @@
=====================
About these documents
=====================
========================
About this documentation
========================
These documents are generated from `reStructuredText`_ sources by `Sphinx`_, a
document processor specifically written for the Python documentation.
Python's documentation is generated from `reStructuredText`_ sources
using `Sphinx`_, a documentation generator originally created for Python
and now maintained as an independent project.
.. _reStructuredText: https://docutils.sourceforge.io/rst.html
.. _Sphinx: https://www.sphinx-doc.org/
@ -20,14 +21,14 @@ volunteers are always welcome!
Many thanks go to:
* Fred L. Drake, Jr., the creator of the original Python documentation toolset
and writer of much of the content;
and author of much of the content;
* the `Docutils <https://docutils.sourceforge.io/>`_ project for creating
reStructuredText and the Docutils suite;
* Fredrik Lundh for his Alternative Python Reference project from which Sphinx
got many good ideas.
Contributors to the Python Documentation
Contributors to the Python documentation
----------------------------------------
Many people have contributed to the Python language, the Python standard

View file

@ -16,16 +16,15 @@ Documentation bugs
==================
If you find a bug in this documentation or would like to propose an improvement,
please submit a bug report on the :ref:`tracker <using-the-tracker>`. If you
please submit a bug report on the :ref:`issue tracker <using-the-tracker>`. If you
have a suggestion on how to fix it, include that as well.
You can also open a discussion item on our
`Documentation Discourse forum <https://discuss.python.org/c/documentation/26>`_.
If you're short on time, you can also email documentation bug reports to
docs@python.org (behavioral bugs can be sent to python-list@python.org).
'docs@' is a mailing list run by volunteers; your request will be noticed,
though it may take a while to be processed.
If you find a bug in the theme (HTML / CSS / JavaScript) of the
documentation, please submit a bug report on the `python-doc-theme issue
tracker <https://github.com/python/python-docs-theme>`_.
.. seealso::

View file

@ -15,10 +15,8 @@ Allocating Objects on the Heap
.. c:function:: PyObject* PyObject_Init(PyObject *op, PyTypeObject *type)
Initialize a newly allocated object *op* with its type and initial
reference. Returns the initialized object. If *type* indicates that the
object participates in the cyclic garbage detector, it is added to the
detector's set of observed objects. Other fields of the object are not
affected.
reference. Returns the initialized object. Other fields of the object are
not affected.
.. c:function:: PyVarObject* PyObject_InitVar(PyVarObject *op, PyTypeObject *type, Py_ssize_t size)
@ -37,6 +35,10 @@ Allocating Objects on the Heap
The size of the memory allocation is determined from the
:c:member:`~PyTypeObject.tp_basicsize` field of the type object.
Note that this function is unsuitable if *typeobj* has
:c:macro:`Py_TPFLAGS_HAVE_GC` set. For such objects,
use :c:func:`PyObject_GC_New` instead.
.. c:macro:: PyObject_NewVar(TYPE, typeobj, size)
@ -51,15 +53,14 @@ Allocating Objects on the Heap
fields into the same allocation decreases the number of allocations,
improving the memory management efficiency.
Note that this function is unsuitable if *typeobj* has
:c:macro:`Py_TPFLAGS_HAVE_GC` set. For such objects,
use :c:func:`PyObject_GC_NewVar` instead.
.. c:function:: void PyObject_Del(void *op)
Releases memory allocated to an object using :c:macro:`PyObject_New` or
:c:macro:`PyObject_NewVar`. This is normally called from the
:c:member:`~PyTypeObject.tp_dealloc` handler specified in the object's type. The fields of
the object should not be accessed after this call as the memory is no
longer a valid Python object.
Same as :c:func:`PyObject_Free`.
.. c:var:: PyObject _Py_NoneStruct

View file

@ -6,9 +6,13 @@
API and ABI Versioning
***********************
Build-time version constants
----------------------------
CPython exposes its version number in the following macros.
Note that these correspond to the version code is **built** with,
not necessarily the version used at **run time**.
Note that these correspond to the version code is **built** with.
See :c:var:`Py_Version` for the version used at **run time**.
See :ref:`stable` for a discussion of API and ABI stability across versions.
@ -37,37 +41,83 @@ See :ref:`stable` for a discussion of API and ABI stability across versions.
.. c:macro:: PY_VERSION_HEX
The Python version number encoded in a single integer.
See :c:func:`Py_PACK_FULL_VERSION` for the encoding details.
The underlying version information can be found by treating it as a 32 bit
number in the following manner:
Use this for numeric comparisons, for example,
``#if PY_VERSION_HEX >= ...``.
+-------+-------------------------+-------------------------+--------------------------+
| Bytes | Bits (big endian order) | Meaning | Value for ``3.4.1a2`` |
+=======+=========================+=========================+==========================+
| 1 | 1-8 | ``PY_MAJOR_VERSION`` | ``0x03`` |
+-------+-------------------------+-------------------------+--------------------------+
| 2 | 9-16 | ``PY_MINOR_VERSION`` | ``0x04`` |
+-------+-------------------------+-------------------------+--------------------------+
| 3 | 17-24 | ``PY_MICRO_VERSION`` | ``0x01`` |
+-------+-------------------------+-------------------------+--------------------------+
| 4 | 25-28 | ``PY_RELEASE_LEVEL`` | ``0xA`` |
+ +-------------------------+-------------------------+--------------------------+
| | 29-32 | ``PY_RELEASE_SERIAL`` | ``0x2`` |
+-------+-------------------------+-------------------------+--------------------------+
Thus ``3.4.1a2`` is hexversion ``0x030401a2`` and ``3.10.0`` is
hexversion ``0x030a00f0``.
Use this for numeric comparisons, e.g. ``#if PY_VERSION_HEX >= ...``.
This version is also available via the symbol :c:var:`Py_Version`.
Run-time version
----------------
.. c:var:: const unsigned long Py_Version
The Python runtime version number encoded in a single constant integer, with
the same format as the :c:macro:`PY_VERSION_HEX` macro.
The Python runtime version number encoded in a single constant integer.
See :c:func:`Py_PACK_FULL_VERSION` for the encoding details.
This contains the Python version used at run time.
Use this for numeric comparisons, for example, ``if (Py_Version >= ...)``.
.. versionadded:: 3.11
All the given macros are defined in :source:`Include/patchlevel.h`.
Bit-packing macros
------------------
.. c:function:: uint32_t Py_PACK_FULL_VERSION(int major, int minor, int micro, int release_level, int release_serial)
Return the given version, encoded as a single 32-bit integer with
the following structure:
+------------------+-------+----------------+-----------+--------------------------+
| | No. | | | Example values |
| | of | | +-------------+------------+
| Argument | bits | Bit mask | Bit shift | ``3.4.1a2`` | ``3.10.0`` |
+==================+=======+================+===========+=============+============+
| *major* | 8 | ``0xFF000000`` | 24 | ``0x03`` | ``0x03`` |
+------------------+-------+----------------+-----------+-------------+------------+
| *minor* | 8 | ``0x00FF0000`` | 16 | ``0x04`` | ``0x0A`` |
+------------------+-------+----------------+-----------+-------------+------------+
| *micro* | 8 | ``0x0000FF00`` | 8 | ``0x01`` | ``0x00`` |
+------------------+-------+----------------+-----------+-------------+------------+
| *release_level* | 4 | ``0x000000F0`` | 4 | ``0xA`` | ``0xF`` |
+------------------+-------+----------------+-----------+-------------+------------+
| *release_serial* | 4 | ``0x0000000F`` | 0 | ``0x2`` | ``0x0`` |
+------------------+-------+----------------+-----------+-------------+------------+
For example:
+-------------+------------------------------------+-----------------+
| Version | ``Py_PACK_FULL_VERSION`` arguments | Encoded version |
+=============+====================================+=================+
| ``3.4.1a2`` | ``(3, 4, 1, 0xA, 2)`` | ``0x030401a2`` |
+-------------+------------------------------------+-----------------+
| ``3.10.0`` | ``(3, 10, 0, 0xF, 0)`` | ``0x030a00f0`` |
+-------------+------------------------------------+-----------------+
Out-of range bits in the arguments are ignored.
That is, the macro can be defined as:
.. code-block:: c
#ifndef Py_PACK_FULL_VERSION
#define Py_PACK_FULL_VERSION(X, Y, Z, LEVEL, SERIAL) ( \
(((X) & 0xff) << 24) | \
(((Y) & 0xff) << 16) | \
(((Z) & 0xff) << 8) | \
(((LEVEL) & 0xf) << 4) | \
(((SERIAL) & 0xf) << 0))
#endif
``Py_PACK_FULL_VERSION`` is primarily a macro, intended for use in
``#if`` directives, but it is also available as an exported function.
.. versionadded:: 3.14
.. c:function:: uint32_t Py_PACK_VERSION(int major, int minor)
Equivalent to ``Py_PACK_FULL_VERSION(major, minor, 0, 0, 0)``.
The result does not correspond to any Python release, but is useful
in numeric comparisons.
.. versionadded:: 3.14

View file

@ -5,7 +5,7 @@
Parsing arguments and building values
=====================================
These functions are useful when creating your own extensions functions and
These functions are useful when creating your own extension functions and
methods. Additional information and examples are available in
:ref:`extending-index`.
@ -113,14 +113,18 @@ There are three ways strings and buffers can be converted to C:
``z`` (:class:`str` or ``None``) [const char \*]
Like ``s``, but the Python object may also be ``None``, in which case the C
pointer is set to ``NULL``.
It is the same as ``s?`` with the C pointer was initialized to ``NULL``.
``z*`` (:class:`str`, :term:`bytes-like object` or ``None``) [Py_buffer]
Like ``s*``, but the Python object may also be ``None``, in which case the
``buf`` member of the :c:type:`Py_buffer` structure is set to ``NULL``.
It is the same as ``s*?`` with the ``buf`` member of the :c:type:`Py_buffer`
structure was initialized to ``NULL``.
``z#`` (:class:`str`, read-only :term:`bytes-like object` or ``None``) [const char \*, :c:type:`Py_ssize_t`]
Like ``s#``, but the Python object may also be ``None``, in which case the C
pointer is set to ``NULL``.
It is the same as ``s#?`` with the C pointer was initialized to ``NULL``.
``y`` (read-only :term:`bytes-like object`) [const char \*]
This format converts a bytes-like object to a C pointer to a
@ -229,12 +233,24 @@ There are three ways strings and buffers can be converted to C:
Numbers
-------
These formats allow representing Python numbers or single characters as C numbers.
Formats that require :class:`int`, :class:`float` or :class:`complex` can
also use the corresponding special methods :meth:`~object.__index__`,
:meth:`~object.__float__` or :meth:`~object.__complex__` to convert
the Python object to the required type.
For signed integer formats, :exc:`OverflowError` is raised if the value
is out of range for the C type.
For unsigned integer formats, no range checking is done --- the
most significant bits are silently truncated when the receiving field is too
small to receive the value.
``b`` (:class:`int`) [unsigned char]
Convert a nonnegative Python integer to an unsigned tiny int, stored in a C
Convert a nonnegative Python integer to an unsigned tiny integer, stored in a C
:c:expr:`unsigned char`.
``B`` (:class:`int`) [unsigned char]
Convert a Python integer to a tiny int without overflow checking, stored in a C
Convert a Python integer to a tiny integer without overflow checking, stored in a C
:c:expr:`unsigned char`.
``h`` (:class:`int`) [short int]
@ -280,10 +296,10 @@ Numbers
length 1, to a C :c:expr:`int`.
``f`` (:class:`float`) [float]
Convert a Python floating point number to a C :c:expr:`float`.
Convert a Python floating-point number to a C :c:expr:`float`.
``d`` (:class:`float`) [double]
Convert a Python floating point number to a C :c:expr:`double`.
Convert a Python floating-point number to a C :c:expr:`double`.
``D`` (:class:`complex`) [Py_complex]
Convert a Python complex number to a C :c:type:`Py_complex` structure.
@ -307,7 +323,7 @@ Other objects
.. _o_ampersand:
``O&`` (object) [*converter*, *anything*]
``O&`` (object) [*converter*, *address*]
Convert a Python object to a C variable through a *converter* function. This
takes two arguments: the first is a function, the second is the address of a C
variable (of arbitrary type), converted to :c:expr:`void *`. The *converter*
@ -321,14 +337,20 @@ Other objects
the conversion has failed. When the conversion fails, the *converter* function
should raise an exception and leave the content of *address* unmodified.
If the *converter* returns ``Py_CLEANUP_SUPPORTED``, it may get called a
.. c:macro:: Py_CLEANUP_SUPPORTED
:no-typesetting:
If the *converter* returns :c:macro:`!Py_CLEANUP_SUPPORTED`, it may get called a
second time if the argument parsing eventually fails, giving the converter a
chance to release any memory that it had already allocated. In this second
call, the *object* parameter will be ``NULL``; *address* will have the same value
as in the original call.
Examples of converters: :c:func:`PyUnicode_FSConverter` and
:c:func:`PyUnicode_FSDecoder`.
.. versionchanged:: 3.1
``Py_CLEANUP_SUPPORTED`` was added.
:c:macro:`!Py_CLEANUP_SUPPORTED` was added.
``p`` (:class:`bool`) [int]
Tests the value passed in for truth (a boolean **p**\ redicate) and converts
@ -339,16 +361,36 @@ Other objects
.. versionadded:: 3.3
``(items)`` (:class:`tuple`) [*matching-items*]
The object must be a Python sequence whose length is the number of format units
``(items)`` (sequence) [*matching-items*]
The object must be a Python sequence (except :class:`str`, :class:`bytes`
or :class:`bytearray`) whose length is the number of format units
in *items*. The C arguments must correspond to the individual format units in
*items*. Format units for sequences may be nested.
It is possible to pass "long" integers (integers whose value exceeds the
platform's :c:macro:`LONG_MAX`) however no proper range checking is done --- the
most significant bits are silently truncated when the receiving field is too
small to receive the value (actually, the semantics are inherited from downcasts
in C --- your mileage may vary).
If *items* contains format units which store a :ref:`borrowed buffer
<c-arg-borrowed-buffer>` (``s``, ``s#``, ``z``, ``z#``, ``y``, or ``y#``)
or a :term:`borrowed reference` (``S``, ``Y``, ``U``, ``O``, or ``O!``),
the object must be a Python tuple.
The *converter* for the ``O&`` format unit in *items* must not store
a borrowed buffer or a borrowed reference.
.. versionchanged:: next
:class:`str` and :class:`bytearray` objects no longer accepted as a sequence.
.. deprecated:: next
Non-tuple sequences are deprecated if *items* contains format units
which store a borrowed buffer or a borrowed reference.
``unit?`` (anything or ``None``) [*matching-variable(s)*]
``?`` modifies the behavior of the preceding format unit.
The C variable(s) corresponding to that parameter should be initialized
to their default value --- when the argument is ``None``,
:c:func:`PyArg_ParseTuple` does not touch the contents of the corresponding
C variable(s).
If the argument is not ``None``, it is parsed according to the specified
format unit.
.. versionadded:: next
A few other characters have a meaning in a format string. These may not occur
inside nested parentheses. They are:
@ -633,6 +675,10 @@ Building values
``n`` (:class:`int`) [:c:type:`Py_ssize_t`]
Convert a C :c:type:`Py_ssize_t` to a Python integer.
``p`` (:class:`bool`) [int]
Convert a C :c:expr:`int` to a Python :class:`bool` object.
.. versionadded:: 3.14
``c`` (:class:`bytes` of length 1) [char]
Convert a C :c:expr:`int` representing a byte to a Python :class:`bytes` object of
length 1.
@ -642,10 +688,10 @@ Building values
object of length 1.
``d`` (:class:`float`) [double]
Convert a C :c:expr:`double` to a Python floating point number.
Convert a C :c:expr:`double` to a Python floating-point number.
``f`` (:class:`float`) [float]
Convert a C :c:expr:`float` to a Python floating point number.
Convert a C :c:expr:`float` to a Python floating-point number.
``D`` (:class:`complex`) [Py_complex \*]
Convert a C :c:type:`Py_complex` structure to a Python complex number.

View file

@ -26,17 +26,19 @@ characteristic of being backed by a possibly large memory buffer. It is
then desirable, in some situations, to access that buffer directly and
without intermediate copying.
Python provides such a facility at the C level in the form of the :ref:`buffer
protocol <bufferobjects>`. This protocol has two sides:
Python provides such a facility at the C and Python level in the form of the
:ref:`buffer protocol <bufferobjects>`. This protocol has two sides:
.. index:: single: PyBufferProcs
.. index:: single: PyBufferProcs (C type)
- on the producer side, a type can export a "buffer interface" which allows
objects of that type to expose information about their underlying buffer.
This interface is described in the section :ref:`buffer-structs`;
This interface is described in the section :ref:`buffer-structs`; for
Python see :ref:`python-buffer-protocol`.
- on the consumer side, several means are available to obtain a pointer to
the raw underlying data of an object (for example a method parameter).
the raw underlying data of an object (for example a method parameter). For
Python see :class:`memoryview`.
Simple objects such as :class:`bytes` and :class:`bytearray` expose their
underlying buffer in byte-oriented form. Other forms are possible; for example,
@ -62,6 +64,10 @@ In both cases, :c:func:`PyBuffer_Release` must be called when the buffer
isn't needed anymore. Failure to do so could lead to various issues such as
resource leaks.
.. versionadded:: 3.12
The buffer protocol is now accessible in Python, see
:ref:`python-buffer-protocol` and :class:`memoryview`.
.. _buffer-structure:
@ -147,9 +153,9 @@ a buffer, see :c:func:`PyObject_GetBuffer`.
or a :c:macro:`PyBUF_WRITABLE` request, the consumer must disregard
:c:member:`~Py_buffer.itemsize` and assume ``itemsize == 1``.
.. c:member:: const char *format
.. c:member:: char *format
A *NUL* terminated string in :mod:`struct` module style syntax describing
A *NULL* terminated string in :mod:`struct` module style syntax describing
the contents of a single item. If this is ``NULL``, ``"B"`` (unsigned bytes)
is assumed.
@ -244,7 +250,6 @@ The following fields are not influenced by *flags* and must always be filled in
with the correct values: :c:member:`~Py_buffer.obj`, :c:member:`~Py_buffer.buf`,
:c:member:`~Py_buffer.len`, :c:member:`~Py_buffer.itemsize`, :c:member:`~Py_buffer.ndim`.
readonly, format
~~~~~~~~~~~~~~~~
@ -253,7 +258,8 @@ readonly, format
Controls the :c:member:`~Py_buffer.readonly` field. If set, the exporter
MUST provide a writable buffer or else report failure. Otherwise, the
exporter MAY provide either a read-only or writable buffer, but the choice
MUST be consistent for all consumers.
MUST be consistent for all consumers. For example, :c:expr:`PyBUF_SIMPLE | PyBUF_WRITABLE`
can be used to request a simple writable buffer.
.. c:macro:: PyBUF_FORMAT
@ -265,8 +271,9 @@ readonly, format
Since :c:macro:`PyBUF_SIMPLE` is defined as 0, :c:macro:`PyBUF_WRITABLE`
can be used as a stand-alone flag to request a simple writable buffer.
:c:macro:`PyBUF_FORMAT` can be \|'d to any of the flags except :c:macro:`PyBUF_SIMPLE`.
The latter already implies format ``B`` (unsigned bytes).
:c:macro:`PyBUF_FORMAT` must be \|'d to any of the flags except :c:macro:`PyBUF_SIMPLE`, because
the latter already implies format ``B`` (unsigned bytes). :c:macro:`!PyBUF_FORMAT` cannot be
used on its own.
shape, strides, suboffsets

View file

@ -42,17 +42,22 @@ Direct API functions
Return a new bytearray object from any object, *o*, that implements the
:ref:`buffer protocol <bufferobjects>`.
On failure, return ``NULL`` with an exception set.
.. c:function:: PyObject* PyByteArray_FromStringAndSize(const char *string, Py_ssize_t len)
Create a new bytearray object from *string* and its length, *len*. On
failure, ``NULL`` is returned.
Create a new bytearray object from *string* and its length, *len*.
On failure, return ``NULL`` with an exception set.
.. c:function:: PyObject* PyByteArray_Concat(PyObject *a, PyObject *b)
Concat bytearrays *a* and *b* and return a new bytearray with the result.
On failure, return ``NULL`` with an exception set.
.. c:function:: Py_ssize_t PyByteArray_Size(PyObject *bytearray)
@ -69,6 +74,11 @@ Direct API functions
.. c:function:: int PyByteArray_Resize(PyObject *bytearray, Py_ssize_t len)
Resize the internal buffer of *bytearray* to *len*.
Failure is a ``-1`` return with an exception set.
.. versionchanged:: 3.14
A negative *len* will now result in an exception being set and -1 returned.
Macros
^^^^^^

View file

@ -189,12 +189,30 @@ called with a non-bytes parameter.
to *newpart* (i.e. decrements its reference count).
.. c:function:: PyObject* PyBytes_Join(PyObject *sep, PyObject *iterable)
Similar to ``sep.join(iterable)`` in Python.
*sep* must be Python :class:`bytes` object.
(Note that :c:func:`PyUnicode_Join` accepts ``NULL`` separator and treats
it as a space, whereas :c:func:`PyBytes_Join` doesn't accept ``NULL``
separator.)
*iterable* must be an iterable object yielding objects that implement the
:ref:`buffer protocol <bufferobjects>`.
On success, return a new :class:`bytes` object.
On error, set an exception and return ``NULL``.
.. versionadded:: 3.14
.. c:function:: int _PyBytes_Resize(PyObject **bytes, Py_ssize_t newsize)
A way to resize a bytes object even though it is "immutable". Only use this
to build up a brand new bytes object; don't use this if the bytes may already
be known in other parts of the code. It is an error to call this function if
the refcount on the input bytes object is not one. Pass the address of an
Resize a bytes object. *newsize* will be the new length of the bytes object.
You can think of it as creating a new bytes object and destroying the old
one, only more efficiently.
Pass the address of an
existing bytes object as an lvalue (it may be written into), and the new size
desired. On success, *\*bytes* holds the resized bytes object and ``0`` is
returned; the address in *\*bytes* may differ from its input value. If the

View file

@ -39,7 +39,8 @@ Cell objects are not likely to be useful elsewhere.
.. c:function:: PyObject* PyCell_Get(PyObject *cell)
Return the contents of the cell *cell*.
Return the contents of the cell *cell*, which can be ``NULL``.
If *cell* is not a cell object, returns ``NULL`` with an exception set.
.. c:function:: PyObject* PyCell_GET(PyObject *cell)
@ -52,8 +53,10 @@ Cell objects are not likely to be useful elsewhere.
Set the contents of the cell object *cell* to *value*. This releases the
reference to any current content of the cell. *value* may be ``NULL``. *cell*
must be non-``NULL``; if it is not a cell object, ``-1`` will be returned. On
success, ``0`` will be returned.
must be non-``NULL``.
On success, return ``0``.
If *cell* is not a cell object, set an exception and return ``-1``.
.. c:function:: void PyCell_SET(PyObject *cell, PyObject *value)

View file

@ -22,16 +22,29 @@ bound into a function.
.. c:var:: PyTypeObject PyCode_Type
This is an instance of :c:type:`PyTypeObject` representing the Python
:class:`code` type.
:ref:`code object <code-objects>`.
.. c:function:: int PyCode_Check(PyObject *co)
Return true if *co* is a :class:`code` object. This function always succeeds.
Return true if *co* is a :ref:`code object <code-objects>`.
This function always succeeds.
.. c:function:: int PyCode_GetNumFree(PyCodeObject *co)
.. c:function:: Py_ssize_t PyCode_GetNumFree(PyCodeObject *co)
Return the number of free variables in *co*.
Return the number of :term:`free (closure) variables <closure variable>`
in a code object.
.. c:function:: int PyUnstable_Code_GetFirstFree(PyCodeObject *co)
Return the position of the first :term:`free (closure) variable <closure variable>`
in a code object.
.. versionchanged:: 3.13
Renamed from ``PyCode_GetFirstFree`` as part of :ref:`unstable-c-api`.
The old name is deprecated, but will remain available until the
signature changes again.
.. c:function:: PyCodeObject* PyUnstable_Code_New(int argcount, int kwonlyargcount, int nlocals, int stacksize, int flags, PyObject *code, PyObject *consts, PyObject *names, PyObject *varnames, PyObject *freevars, PyObject *cellvars, PyObject *filename, PyObject *name, PyObject *qualname, int firstlineno, PyObject *linetable, PyObject *exceptiontable)
@ -48,7 +61,7 @@ bound into a function.
.. versionchanged:: 3.11
Added ``qualname`` and ``exceptiontable`` parameters.
.. index:: single: PyCode_New
.. index:: single: PyCode_New (C function)
.. versionchanged:: 3.12
@ -61,7 +74,7 @@ bound into a function.
Similar to :c:func:`PyUnstable_Code_New`, but with an extra "posonlyargcount" for positional-only arguments.
The same caveats that apply to ``PyUnstable_Code_New`` also apply to this function.
.. index:: single: PyCode_NewWithPosOnlyArgs
.. index:: single: PyCode_NewWithPosOnlyArgs (C function)
.. versionadded:: 3.8 as ``PyCode_NewWithPosOnlyArgs``
@ -85,8 +98,8 @@ bound into a function.
Return the line number of the instruction that occurs on or before ``byte_offset`` and ends after it.
If you just need the line number of a frame, use :c:func:`PyFrame_GetLineNumber` instead.
For efficiently iterating over the line numbers in a code object, use `the API described in PEP 626
<https://peps.python.org/pep-0626/#out-of-process-debuggers-and-profilers>`_.
For efficiently iterating over the line numbers in a code object, use :pep:`the API described in PEP 626
<0626#out-of-process-debuggers-and-profilers>`.
.. c:function:: int PyCode_Addr2Location(PyObject *co, int byte_offset, int *start_line, int *start_column, int *end_line, int *end_column)
@ -133,7 +146,8 @@ bound into a function.
Equivalent to the Python code ``getattr(co, 'co_freevars')``.
Returns a new reference to a :c:type:`PyTupleObject` containing the names of
the free variables. On error, ``NULL`` is returned and an exception is raised.
the :term:`free (closure) variables <closure variable>`. On error, ``NULL`` is returned
and an exception is raised.
.. versionadded:: 3.11
@ -220,7 +234,7 @@ may change without deprecation warnings.
*free* will be called on non-``NULL`` data stored under the new index.
Use :c:func:`Py_DecRef` when storing :c:type:`PyObject`.
.. index:: single: _PyEval_RequestCodeExtraIndex
.. index:: single: _PyEval_RequestCodeExtraIndex (C function)
.. versionadded:: 3.6 as ``_PyEval_RequestCodeExtraIndex``
@ -238,7 +252,7 @@ may change without deprecation warnings.
If no data was set under the index, set *extra* to ``NULL`` and return
0 without setting an exception.
.. index:: single: _PyCode_GetExtra
.. index:: single: _PyCode_GetExtra (C function)
.. versionadded:: 3.6 as ``_PyCode_GetExtra``
@ -253,7 +267,7 @@ may change without deprecation warnings.
Set the extra data stored under the given index to *extra*.
Return 0 on success. Set an exception and return -1 on failure.
.. index:: single: _PyCode_SetExtra
.. index:: single: _PyCode_SetExtra (C function)
.. versionadded:: 3.6 as ``_PyCode_SetExtra``

View file

@ -25,12 +25,16 @@ pointers. This is consistent throughout the API.
The C structure which corresponds to the value portion of a Python complex
number object. Most of the functions for dealing with complex number objects
use structures of this type as input or output values, as appropriate. It is
defined as::
use structures of this type as input or output values, as appropriate.
.. c:member:: double real
double imag
The structure is defined as::
typedef struct {
double real;
double imag;
double real;
double imag;
} Py_complex;
@ -40,12 +44,36 @@ pointers. This is consistent throughout the API.
representation.
.. c:function:: Py_complex _Py_cr_sum(Py_complex left, double right)
Return the sum of a complex number and a real number, using the C :c:type:`Py_complex`
representation.
.. versionadded:: 3.14
.. c:function:: Py_complex _Py_c_diff(Py_complex left, Py_complex right)
Return the difference between two complex numbers, using the C
:c:type:`Py_complex` representation.
.. c:function:: Py_complex _Py_cr_diff(Py_complex left, double right)
Return the difference between a complex number and a real number, using the C
:c:type:`Py_complex` representation.
.. versionadded:: 3.14
.. c:function:: Py_complex _Py_rc_diff(double left, Py_complex right)
Return the difference between a real number and a complex number, using the C
:c:type:`Py_complex` representation.
.. versionadded:: 3.14
.. c:function:: Py_complex _Py_c_neg(Py_complex num)
Return the negation of the complex number *num*, using the C
@ -58,6 +86,14 @@ pointers. This is consistent throughout the API.
representation.
.. c:function:: Py_complex _Py_cr_prod(Py_complex left, double right)
Return the product of a complex number and a real number, using the C
:c:type:`Py_complex` representation.
.. versionadded:: 3.14
.. c:function:: Py_complex _Py_c_quot(Py_complex dividend, Py_complex divisor)
Return the quotient of two complex numbers, using the C :c:type:`Py_complex`
@ -67,6 +103,28 @@ pointers. This is consistent throughout the API.
:c:data:`errno` to :c:macro:`!EDOM`.
.. c:function:: Py_complex _Py_cr_quot(Py_complex dividend, double divisor)
Return the quotient of a complex number and a real number, using the C
:c:type:`Py_complex` representation.
If *divisor* is zero, this method returns zero and sets
:c:data:`errno` to :c:macro:`!EDOM`.
.. versionadded:: 3.14
.. c:function:: Py_complex _Py_rc_quot(double dividend, Py_complex divisor)
Return the quotient of a real number and a complex number, using the C
:c:type:`Py_complex` representation.
If *divisor* is zero, this method returns zero and sets
:c:data:`errno` to :c:macro:`!EDOM`.
.. versionadded:: 3.14
.. c:function:: Py_complex _Py_c_pow(Py_complex num, Py_complex exp)
Return the exponentiation of *num* by *exp*, using the C :c:type:`Py_complex`
@ -75,6 +133,8 @@ pointers. This is consistent throughout the API.
If *num* is null and *exp* is not a positive real number,
this method returns zero and sets :c:data:`errno` to :c:macro:`!EDOM`.
Set :c:data:`errno` to :c:macro:`!ERANGE` on overflows.
Complex Numbers as Python Objects
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@ -106,22 +166,46 @@ Complex Numbers as Python Objects
.. c:function:: PyObject* PyComplex_FromCComplex(Py_complex v)
Create a new Python complex number object from a C :c:type:`Py_complex` value.
Return ``NULL`` with an exception set on error.
.. c:function:: PyObject* PyComplex_FromDoubles(double real, double imag)
Return a new :c:type:`PyComplexObject` object from *real* and *imag*.
Return ``NULL`` with an exception set on error.
.. c:function:: double PyComplex_RealAsDouble(PyObject *op)
Return the real part of *op* as a C :c:expr:`double`.
If *op* is not a Python complex number object but has a
:meth:`~object.__complex__` method, this method will first be called to
convert *op* to a Python complex number object. If :meth:`!__complex__` is
not defined then it falls back to call :c:func:`PyFloat_AsDouble` and
returns its result.
Upon failure, this method returns ``-1.0`` with an exception set, so one
should call :c:func:`PyErr_Occurred` to check for errors.
.. versionchanged:: 3.13
Use :meth:`~object.__complex__` if available.
.. c:function:: double PyComplex_ImagAsDouble(PyObject *op)
Return the imaginary part of *op* as a C :c:expr:`double`.
If *op* is not a Python complex number object but has a
:meth:`~object.__complex__` method, this method will first be called to
convert *op* to a Python complex number object. If :meth:`!__complex__` is
not defined then it falls back to call :c:func:`PyFloat_AsDouble` and
returns ``0.0`` on success.
Upon failure, this method returns ``-1.0`` with an exception set, so one
should call :c:func:`PyErr_Occurred` to check for errors.
.. versionchanged:: 3.13
Use :meth:`~object.__complex__` if available.
.. c:function:: Py_complex PyComplex_AsCComplex(PyObject *op)
@ -131,8 +215,11 @@ Complex Numbers as Python Objects
method, this method will first be called to convert *op* to a Python complex
number object. If :meth:`!__complex__` is not defined then it falls back to
:meth:`~object.__float__`. If :meth:`!__float__` is not defined then it falls back
to :meth:`~object.__index__`. Upon failure, this method returns ``-1.0`` as a real
value.
to :meth:`~object.__index__`.
Upon failure, this method returns :c:type:`Py_complex`
with :c:member:`~Py_complex.real` set to ``-1.0`` and with an exception set, so one
should call :c:func:`PyErr_Occurred` to check for errors.
.. versionchanged:: 3.8
Use :meth:`~object.__index__` if available.

View file

@ -6,6 +6,8 @@ Context Variables Objects
-------------------------
.. _contextvarsobjects_pointertype_change:
.. versionadded:: 3.7
.. versionchanged:: 3.7.1
.. note::
@ -24,8 +26,6 @@ Context Variables Objects
See :issue:`34762` for more details.
.. versionadded:: 3.7
This section details the public C API for the :mod:`contextvars` module.
.. c:type:: PyContext
@ -101,6 +101,52 @@ Context object management functions:
current context for the current thread. Returns ``0`` on success,
and ``-1`` on error.
.. c:function:: int PyContext_AddWatcher(PyContext_WatchCallback callback)
Register *callback* as a context object watcher for the current interpreter.
Return an ID which may be passed to :c:func:`PyContext_ClearWatcher`.
In case of error (e.g. no more watcher IDs available),
return ``-1`` and set an exception.
.. versionadded:: 3.14
.. c:function:: int PyContext_ClearWatcher(int watcher_id)
Clear watcher identified by *watcher_id* previously returned from
:c:func:`PyContext_AddWatcher` for the current interpreter.
Return ``0`` on success, or ``-1`` and set an exception on error
(e.g. if the given *watcher_id* was never registered.)
.. versionadded:: 3.14
.. c:type:: PyContextEvent
Enumeration of possible context object watcher events:
- ``Py_CONTEXT_SWITCHED``: The :term:`current context` has switched to a
different context. The object passed to the watch callback is the
now-current :class:`contextvars.Context` object, or None if no context is
current.
.. versionadded:: 3.14
.. c:type:: int (*PyContext_WatchCallback)(PyContextEvent event, PyObject *obj)
Context object watcher callback function. The object passed to the callback
is event-specific; see :c:type:`PyContextEvent` for details.
If the callback returns with an exception set, it must return ``-1``; this
exception will be printed as an unraisable exception using
:c:func:`PyErr_FormatUnraisable`. Otherwise it should return ``0``.
There may already be a pending exception set on entry to the callback. In
this case, the callback should return ``0`` with the same exception still
set. This means the callback may not call any other API that can set an
exception unless it saves and clears the exception state first, and restores
it before returning.
.. versionadded:: 3.14
Context variable functions:

View file

@ -48,6 +48,42 @@ The return value (*rv*) for these functions should be interpreted as follows:
The following functions provide locale-independent string to number conversions.
.. c:function:: unsigned long PyOS_strtoul(const char *str, char **ptr, int base)
Convert the initial part of the string in ``str`` to an :c:expr:`unsigned
long` value according to the given ``base``, which must be between ``2`` and
``36`` inclusive, or be the special value ``0``.
Leading white space and case of characters are ignored. If ``base`` is zero
it looks for a leading ``0b``, ``0o`` or ``0x`` to tell which base. If
these are absent it defaults to ``10``. Base must be 0 or between 2 and 36
(inclusive). If ``ptr`` is non-``NULL`` it will contain a pointer to the
end of the scan.
If the converted value falls out of range of corresponding return type,
range error occurs (:c:data:`errno` is set to :c:macro:`!ERANGE`) and
:c:macro:`!ULONG_MAX` is returned. If no conversion can be performed, ``0``
is returned.
See also the Unix man page :manpage:`strtoul(3)`.
.. versionadded:: 3.2
.. c:function:: long PyOS_strtol(const char *str, char **ptr, int base)
Convert the initial part of the string in ``str`` to an :c:expr:`long` value
according to the given ``base``, which must be between ``2`` and ``36``
inclusive, or be the special value ``0``.
Same as :c:func:`PyOS_strtoul`, but return a :c:expr:`long` value instead
and :c:macro:`LONG_MAX` on overflows.
See also the Unix man page :manpage:`strtol(3)`.
.. versionadded:: 3.2
.. c:function:: double PyOS_string_to_double(const char *s, char **endptr, PyObject *overflow_exception)
Convert a string ``s`` to a :c:expr:`double`, raising a Python
@ -69,7 +105,7 @@ The following functions provide locale-independent string to number conversions.
If ``s`` represents a value that is too large to store in a float
(for example, ``"1e500"`` is such a string on many platforms) then
if ``overflow_exception`` is ``NULL`` return ``Py_HUGE_VAL`` (with
if ``overflow_exception`` is ``NULL`` return ``Py_INFINITY`` (with
an appropriate sign) and don't set any exception. Otherwise,
``overflow_exception`` must point to a Python exception object;
raise that exception and return ``-1.0``. In both cases, set

View file

@ -318,10 +318,10 @@ Macros for the convenience of modules implementing the DB API:
.. c:function:: PyObject* PyDateTime_FromTimestamp(PyObject *args)
Create and return a new :class:`datetime.datetime` object given an argument
tuple suitable for passing to :meth:`datetime.datetime.fromtimestamp()`.
tuple suitable for passing to :meth:`datetime.datetime.fromtimestamp`.
.. c:function:: PyObject* PyDate_FromTimestamp(PyObject *args)
Create and return a new :class:`datetime.date` object given an argument
tuple suitable for passing to :meth:`datetime.date.fromtimestamp()`.
tuple suitable for passing to :meth:`datetime.date.fromtimestamp`.

View file

@ -127,7 +127,7 @@ Dictionary Objects
Prefer the :c:func:`PyDict_GetItemWithError` function instead.
.. versionchanged:: 3.10
Calling this API without :term:`GIL` held had been allowed for historical
Calling this API without an :term:`attached thread state` had been allowed for historical
reason. It is no longer allowed.
@ -156,7 +156,7 @@ Dictionary Objects
.. c:function:: int PyDict_GetItemStringRef(PyObject *p, const char *key, PyObject **result)
Similar than :c:func:`PyDict_GetItemRef`, but *key* is specified as a
Similar to :c:func:`PyDict_GetItemRef`, but *key* is specified as a
:c:expr:`const char*` UTF-8 encoded bytes string, rather than a
:c:expr:`PyObject*`.
@ -174,6 +174,27 @@ Dictionary Objects
.. versionadded:: 3.4
.. c:function:: int PyDict_SetDefaultRef(PyObject *p, PyObject *key, PyObject *default_value, PyObject **result)
Inserts *default_value* into the dictionary *p* with a key of *key* if the
key is not already present in the dictionary. If *result* is not ``NULL``,
then *\*result* is set to a :term:`strong reference` to either
*default_value*, if the key was not present, or the existing value, if *key*
was already present in the dictionary.
Returns ``1`` if the key was present and *default_value* was not inserted,
or ``0`` if the key was not present and *default_value* was inserted.
On failure, returns ``-1``, sets an exception, and sets ``*result``
to ``NULL``.
For clarity: if you have a strong reference to *default_value* before
calling this function, then after it returns, you hold a strong reference
to both *default_value* and *\*result* (if it's not ``NULL``).
These may refer to the same object: in that case you hold two separate
references to it.
.. versionadded:: 3.13
.. c:function:: int PyDict_Pop(PyObject *p, PyObject *key, PyObject **result)
Remove *key* from dictionary *p* and optionally return the removed value.
@ -185,7 +206,7 @@ Dictionary Objects
``NULL``, and return ``0``.
- On error, raise an exception and return ``-1``.
This is similar to :meth:`dict.pop`, but without the default value and
Similar to :meth:`dict.pop`, but without the default value and
not raising :exc:`KeyError` if the key missing.
.. versionadded:: 3.13
@ -269,6 +290,17 @@ Dictionary Objects
Py_DECREF(o);
}
The function is not thread-safe in the :term:`free-threaded <free threading>`
build without external synchronization. You can use
:c:macro:`Py_BEGIN_CRITICAL_SECTION` to lock the dictionary while iterating
over it::
Py_BEGIN_CRITICAL_SECTION(self->dict);
while (PyDict_Next(self->dict, &pos, &key, &value)) {
...
}
Py_END_CRITICAL_SECTION();
.. c:function:: int PyDict_Merge(PyObject *a, PyObject *b, int override)

View file

@ -34,7 +34,7 @@ propagated, additional calls into the Python/C API may not behave as intended
and may fail in mysterious ways.
.. note::
The error indicator is **not** the result of :func:`sys.exc_info()`.
The error indicator is **not** the result of :func:`sys.exc_info`.
The former corresponds to an exception that is not yet caught (and is
therefore still propagating), while the latter returns an exception after
it is caught (and has therefore stopped propagating).
@ -104,8 +104,8 @@ Printing and clearing
Similar to :c:func:`PyErr_WriteUnraisable`, but the *format* and subsequent
parameters help format the warning message; they have the same meaning and
values as in :c:func:`PyUnicode_FromFormat`.
``PyErr_WriteUnraisable(obj)`` is roughtly equivalent to
``PyErr_FormatUnraisable("Exception ignored in: %R, obj)``.
``PyErr_WriteUnraisable(obj)`` is roughly equivalent to
``PyErr_FormatUnraisable("Exception ignored in: %R", obj)``.
If *format* is ``NULL``, only the traceback is printed.
.. versionadded:: 3.13
@ -180,7 +180,7 @@ For convenience, some of these functions will always return a
.. c:function:: PyObject* PyErr_SetFromErrno(PyObject *type)
.. index:: single: strerror()
.. index:: single: strerror (C function)
This is a convenience function to raise an exception when a C library function
has returned an error and set the C variable :c:data:`errno`. It constructs a
@ -221,13 +221,14 @@ For convenience, some of these functions will always return a
.. c:function:: PyObject* PyErr_SetFromWindowsErr(int ierr)
This is a convenience function to raise :exc:`WindowsError`. If called with
This is a convenience function to raise :exc:`OSError`. If called with
*ierr* of ``0``, the error code returned by a call to :c:func:`!GetLastError`
is used instead. It calls the Win32 function :c:func:`!FormatMessage` to retrieve
the Windows description of error code given by *ierr* or :c:func:`!GetLastError`,
then it constructs a tuple object whose first item is the *ierr* value and whose
second item is the corresponding error message (gotten from
:c:func:`!FormatMessage`), and then calls ``PyErr_SetObject(PyExc_WindowsError,
then it constructs a :exc:`OSError` object with the :attr:`~OSError.winerror`
attribute set to the error code, the :attr:`~OSError.strerror` attribute
set to the corresponding error message (gotten from
:c:func:`!FormatMessage`), and then calls ``PyErr_SetObject(PyExc_OSError,
object)``. This function always returns ``NULL``.
.. availability:: Windows.
@ -396,7 +397,7 @@ an error value).
.. c:function:: int PyErr_ResourceWarning(PyObject *source, Py_ssize_t stack_level, const char *format, ...)
Function similar to :c:func:`PyErr_WarnFormat`, but *category* is
:exc:`ResourceWarning` and it passes *source* to :func:`warnings.WarningMessage`.
:exc:`ResourceWarning` and it passes *source* to :class:`!warnings.WarningMessage`.
.. versionadded:: 3.6
@ -412,7 +413,7 @@ Querying the error indicator
own a reference to the return value, so you do not need to :c:func:`Py_DECREF`
it.
The caller must hold the GIL.
The caller must have an :term:`attached thread state`.
.. note::
@ -440,7 +441,7 @@ Querying the error indicator
.. c:function:: PyObject *PyErr_GetRaisedException(void)
Return the exception currently being raised, clearing the error indicator at
the same time.
the same time. Return ``NULL`` if the error indicator is not set.
This function is used by code that needs to catch exceptions,
or code that needs to save and restore the error indicator temporarily.
@ -635,7 +636,7 @@ Signal Handling
.. index::
pair: module; signal
single: SIGINT
single: SIGINT (C macro)
single: KeyboardInterrupt (built-in exception)
This function interacts with Python's signal handling.
@ -666,7 +667,7 @@ Signal Handling
.. index::
pair: module; signal
single: SIGINT
single: SIGINT (C macro)
single: KeyboardInterrupt (built-in exception)
Simulate the effect of a :c:macro:`!SIGINT` signal arriving.
@ -674,7 +675,7 @@ Signal Handling
.. note::
This function is async-signal-safe. It can be called without
the :term:`GIL` and from a C signal handler.
an :term:`attached thread state` and from a C signal handler.
.. c:function:: int PyErr_SetInterruptEx(int signum)
@ -701,7 +702,7 @@ Signal Handling
.. note::
This function is async-signal-safe. It can be called without
the :term:`GIL` and from a C signal handler.
an :term:`attached thread state` and from a C signal handler.
.. versionadded:: 3.10
@ -732,7 +733,7 @@ Exception Classes
This creates a class object derived from :exc:`Exception` (accessible in C as
:c:data:`PyExc_Exception`).
The :attr:`__module__` attribute of the new class is set to the first part (up
The :attr:`~type.__module__` attribute of the new class is set to the first part (up
to the last dot) of the *name* argument, and the class name is set to the last
part (after the last dot). The *base* argument can be used to specify alternate
base classes; it can either be only one class or a tuple of classes. The *dict*
@ -852,12 +853,23 @@ The following functions are used to create and modify Unicode exceptions from C.
*\*start*. *start* must not be ``NULL``. Return ``0`` on success, ``-1`` on
failure.
If the :attr:`UnicodeError.object` is an empty sequence, the resulting
*start* is ``0``. Otherwise, it is clipped to ``[0, len(object) - 1]``.
.. seealso:: :attr:`UnicodeError.start`
.. c:function:: int PyUnicodeDecodeError_SetStart(PyObject *exc, Py_ssize_t start)
int PyUnicodeEncodeError_SetStart(PyObject *exc, Py_ssize_t start)
int PyUnicodeTranslateError_SetStart(PyObject *exc, Py_ssize_t start)
Set the *start* attribute of the given exception object to *start*. Return
``0`` on success, ``-1`` on failure.
Set the *start* attribute of the given exception object to *start*.
Return ``0`` on success, ``-1`` on failure.
.. note::
While passing a negative *start* does not raise an exception,
the corresponding getters will not consider it as a relative
offset.
.. c:function:: int PyUnicodeDecodeError_GetEnd(PyObject *exc, Py_ssize_t *end)
int PyUnicodeEncodeError_GetEnd(PyObject *exc, Py_ssize_t *end)
@ -867,6 +879,9 @@ The following functions are used to create and modify Unicode exceptions from C.
*\*end*. *end* must not be ``NULL``. Return ``0`` on success, ``-1`` on
failure.
If the :attr:`UnicodeError.object` is an empty sequence, the resulting
*end* is ``0``. Otherwise, it is clipped to ``[1, len(object)]``.
.. c:function:: int PyUnicodeDecodeError_SetEnd(PyObject *exc, Py_ssize_t end)
int PyUnicodeEncodeError_SetEnd(PyObject *exc, Py_ssize_t end)
int PyUnicodeTranslateError_SetEnd(PyObject *exc, Py_ssize_t end)
@ -874,6 +889,8 @@ The following functions are used to create and modify Unicode exceptions from C.
Set the *end* attribute of the given exception object to *end*. Return ``0``
on success, ``-1`` on failure.
.. seealso:: :attr:`UnicodeError.end`
.. c:function:: PyObject* PyUnicodeDecodeError_GetReason(PyObject *exc)
PyObject* PyUnicodeEncodeError_GetReason(PyObject *exc)
PyObject* PyUnicodeTranslateError_GetReason(PyObject *exc)
@ -904,11 +921,7 @@ because the :ref:`call protocol <call>` takes care of recursion handling.
Marks a point where a recursive C-level call is about to be performed.
If :c:macro:`USE_STACKCHECK` is defined, this function checks if the OS
stack overflowed using :c:func:`PyOS_CheckStack`. In this is the case, it
sets a :exc:`MemoryError` and returns a nonzero value.
The function then checks if the recursion limit is reached. If this is the
The function then checks if the stack limit is reached. If this is the
case, a :exc:`RecursionError` is set and a nonzero value is returned.
Otherwise, zero is returned.
@ -968,59 +981,60 @@ All standard Python exceptions are available as global variables whose names are
the variables:
.. index::
single: PyExc_BaseException
single: PyExc_Exception
single: PyExc_ArithmeticError
single: PyExc_AssertionError
single: PyExc_AttributeError
single: PyExc_BlockingIOError
single: PyExc_BrokenPipeError
single: PyExc_BufferError
single: PyExc_ChildProcessError
single: PyExc_ConnectionAbortedError
single: PyExc_ConnectionError
single: PyExc_ConnectionRefusedError
single: PyExc_ConnectionResetError
single: PyExc_EOFError
single: PyExc_FileExistsError
single: PyExc_FileNotFoundError
single: PyExc_FloatingPointError
single: PyExc_GeneratorExit
single: PyExc_ImportError
single: PyExc_IndentationError
single: PyExc_IndexError
single: PyExc_InterruptedError
single: PyExc_IsADirectoryError
single: PyExc_KeyError
single: PyExc_KeyboardInterrupt
single: PyExc_LookupError
single: PyExc_MemoryError
single: PyExc_ModuleNotFoundError
single: PyExc_NameError
single: PyExc_NotADirectoryError
single: PyExc_NotImplementedError
single: PyExc_OSError
single: PyExc_OverflowError
single: PyExc_PermissionError
single: PyExc_ProcessLookupError
single: PyExc_RecursionError
single: PyExc_ReferenceError
single: PyExc_RuntimeError
single: PyExc_StopAsyncIteration
single: PyExc_StopIteration
single: PyExc_SyntaxError
single: PyExc_SystemError
single: PyExc_SystemExit
single: PyExc_TabError
single: PyExc_TimeoutError
single: PyExc_TypeError
single: PyExc_UnboundLocalError
single: PyExc_UnicodeDecodeError
single: PyExc_UnicodeEncodeError
single: PyExc_UnicodeError
single: PyExc_UnicodeTranslateError
single: PyExc_ValueError
single: PyExc_ZeroDivisionError
single: PyExc_BaseException (C var)
single: PyExc_Exception (C var)
single: PyExc_ArithmeticError (C var)
single: PyExc_AssertionError (C var)
single: PyExc_AttributeError (C var)
single: PyExc_BlockingIOError (C var)
single: PyExc_BrokenPipeError (C var)
single: PyExc_BufferError (C var)
single: PyExc_ChildProcessError (C var)
single: PyExc_ConnectionAbortedError (C var)
single: PyExc_ConnectionError (C var)
single: PyExc_ConnectionRefusedError (C var)
single: PyExc_ConnectionResetError (C var)
single: PyExc_EOFError (C var)
single: PyExc_FileExistsError (C var)
single: PyExc_FileNotFoundError (C var)
single: PyExc_FloatingPointError (C var)
single: PyExc_GeneratorExit (C var)
single: PyExc_ImportError (C var)
single: PyExc_IndentationError (C var)
single: PyExc_IndexError (C var)
single: PyExc_InterruptedError (C var)
single: PyExc_IsADirectoryError (C var)
single: PyExc_KeyError (C var)
single: PyExc_KeyboardInterrupt (C var)
single: PyExc_LookupError (C var)
single: PyExc_MemoryError (C var)
single: PyExc_ModuleNotFoundError (C var)
single: PyExc_NameError (C var)
single: PyExc_NotADirectoryError (C var)
single: PyExc_NotImplementedError (C var)
single: PyExc_OSError (C var)
single: PyExc_OverflowError (C var)
single: PyExc_PermissionError (C var)
single: PyExc_ProcessLookupError (C var)
single: PyExc_PythonFinalizationError (C var)
single: PyExc_RecursionError (C var)
single: PyExc_ReferenceError (C var)
single: PyExc_RuntimeError (C var)
single: PyExc_StopAsyncIteration (C var)
single: PyExc_StopIteration (C var)
single: PyExc_SyntaxError (C var)
single: PyExc_SystemError (C var)
single: PyExc_SystemExit (C var)
single: PyExc_TabError (C var)
single: PyExc_TimeoutError (C var)
single: PyExc_TypeError (C var)
single: PyExc_UnboundLocalError (C var)
single: PyExc_UnicodeDecodeError (C var)
single: PyExc_UnicodeEncodeError (C var)
single: PyExc_UnicodeError (C var)
single: PyExc_UnicodeTranslateError (C var)
single: PyExc_ValueError (C var)
single: PyExc_ZeroDivisionError (C var)
+-----------------------------------------+---------------------------------+----------+
| C Name | Python Name | Notes |
@ -1095,6 +1109,8 @@ the variables:
+-----------------------------------------+---------------------------------+----------+
| :c:data:`PyExc_ProcessLookupError` | :exc:`ProcessLookupError` | |
+-----------------------------------------+---------------------------------+----------+
| :c:data:`PyExc_PythonFinalizationError` | :exc:`PythonFinalizationError` | |
+-----------------------------------------+---------------------------------+----------+
| :c:data:`PyExc_RecursionError` | :exc:`RecursionError` | |
+-----------------------------------------+---------------------------------+----------+
| :c:data:`PyExc_ReferenceError` | :exc:`ReferenceError` | |
@ -1151,18 +1167,18 @@ the variables:
These are compatibility aliases to :c:data:`PyExc_OSError`:
.. index::
single: PyExc_EnvironmentError
single: PyExc_IOError
single: PyExc_WindowsError
single: PyExc_EnvironmentError (C var)
single: PyExc_IOError (C var)
single: PyExc_WindowsError (C var)
+-------------------------------------+----------+
| C Name | Notes |
+=====================================+==========+
| :c:data:`PyExc_EnvironmentError` | |
| :c:data:`!PyExc_EnvironmentError` | |
+-------------------------------------+----------+
| :c:data:`PyExc_IOError` | |
| :c:data:`!PyExc_IOError` | |
+-------------------------------------+----------+
| :c:data:`PyExc_WindowsError` | [2]_ |
| :c:data:`!PyExc_WindowsError` | [2]_ |
+-------------------------------------+----------+
.. versionchanged:: 3.3
@ -1188,17 +1204,17 @@ names are ``PyExc_`` followed by the Python exception name. These have the type
the variables:
.. index::
single: PyExc_Warning
single: PyExc_BytesWarning
single: PyExc_DeprecationWarning
single: PyExc_FutureWarning
single: PyExc_ImportWarning
single: PyExc_PendingDeprecationWarning
single: PyExc_ResourceWarning
single: PyExc_RuntimeWarning
single: PyExc_SyntaxWarning
single: PyExc_UnicodeWarning
single: PyExc_UserWarning
single: PyExc_Warning (C var)
single: PyExc_BytesWarning (C var)
single: PyExc_DeprecationWarning (C var)
single: PyExc_FutureWarning (C var)
single: PyExc_ImportWarning (C var)
single: PyExc_PendingDeprecationWarning (C var)
single: PyExc_ResourceWarning (C var)
single: PyExc_RuntimeWarning (C var)
single: PyExc_SyntaxWarning (C var)
single: PyExc_UnicodeWarning (C var)
single: PyExc_UserWarning (C var)
+------------------------------------------+---------------------------------+----------+
| C Name | Python Name | Notes |

View file

@ -65,8 +65,14 @@ the :mod:`io` APIs instead.
Overrides the normal behavior of :func:`io.open_code` to pass its parameter
through the provided handler.
The handler is a function of type :c:expr:`PyObject *(\*)(PyObject *path,
void *userData)`, where *path* is guaranteed to be :c:type:`PyUnicodeObject`.
The *handler* is a function of type:
.. c:namespace:: NULL
.. c:type:: PyObject * (*Py_OpenCodeHookFunction)(PyObject *, void *)
Equivalent of :c:expr:`PyObject *(\*)(PyObject *path,
void *userData)`, where *path* is guaranteed to be
:c:type:`PyUnicodeObject`.
The *userData* pointer is passed into the hook function. Since hook
functions may be called from different runtimes, this pointer should not
@ -90,7 +96,7 @@ the :mod:`io` APIs instead.
.. c:function:: int PyFile_WriteObject(PyObject *obj, PyObject *p, int flags)
.. index:: single: Py_PRINT_RAW
.. index:: single: Py_PRINT_RAW (C macro)
Write object *obj* to file object *p*. The only supported flag for *flags* is
:c:macro:`Py_PRINT_RAW`; if given, the :func:`str` of the object is written

View file

@ -2,20 +2,20 @@
.. _floatobjects:
Floating Point Objects
Floating-Point Objects
======================
.. index:: pair: object; floating point
.. index:: pair: object; floating-point
.. c:type:: PyFloatObject
This subtype of :c:type:`PyObject` represents a Python floating point object.
This subtype of :c:type:`PyObject` represents a Python floating-point object.
.. c:var:: PyTypeObject PyFloat_Type
This instance of :c:type:`PyTypeObject` represents the Python floating point
This instance of :c:type:`PyTypeObject` represents the Python floating-point
type. This is the same object as :class:`float` in the Python layer.
@ -45,7 +45,7 @@ Floating Point Objects
.. c:function:: double PyFloat_AsDouble(PyObject *pyfloat)
Return a C :c:expr:`double` representation of the contents of *pyfloat*. If
*pyfloat* is not a Python floating point object but has a :meth:`~object.__float__`
*pyfloat* is not a Python floating-point object but has a :meth:`~object.__float__`
method, this method will first be called to convert *pyfloat* into a float.
If :meth:`!__float__` is not defined then it falls back to :meth:`~object.__index__`.
This method returns ``-1.0`` upon failure, so one should call

View file

@ -120,18 +120,46 @@ See also :ref:`Reflection <reflection>`.
.. c:function:: PyObject* PyFrame_GetLocals(PyFrameObject *frame)
Get the *frame*'s :attr:`~frame.f_locals` attribute (:class:`dict`).
Get the *frame*'s :attr:`~frame.f_locals` attribute.
If the frame refers to an :term:`optimized scope`, this returns a
write-through proxy object that allows modifying the locals.
In all other cases (classes, modules, :func:`exec`, :func:`eval`) it returns
the mapping representing the frame locals directly (as described for
:func:`locals`).
Return a :term:`strong reference`.
.. versionadded:: 3.11
.. versionchanged:: 3.13
As part of :pep:`667`, return an instance of :c:var:`PyFrameLocalsProxy_Type`.
.. c:function:: int PyFrame_GetLineNumber(PyFrameObject *frame)
Return the line number that *frame* is currently executing.
Frame Locals Proxies
^^^^^^^^^^^^^^^^^^^^
.. versionadded:: 3.13
The :attr:`~frame.f_locals` attribute on a :ref:`frame object <frame-objects>`
is an instance of a "frame-locals proxy". The proxy object exposes a
write-through view of the underlying locals dictionary for the frame. This
ensures that the variables exposed by ``f_locals`` are always up to date with
the live local variables in the frame itself.
See :pep:`667` for more information.
.. c:var:: PyTypeObject PyFrameLocalsProxy_Type
The type of frame :func:`locals` proxy objects.
.. c:function:: int PyFrameLocalsProxy_Check(PyObject *obj)
Return non-zero if *obj* is a frame :func:`locals` proxy.
Internal Frames
^^^^^^^^^^^^^^^

View file

@ -145,12 +145,13 @@ There are a few functions specific to Python functions.
.. c:type:: PyFunction_WatchEvent
Enumeration of possible function watcher events:
- ``PyFunction_EVENT_CREATE``
- ``PyFunction_EVENT_DESTROY``
- ``PyFunction_EVENT_MODIFY_CODE``
- ``PyFunction_EVENT_MODIFY_DEFAULTS``
- ``PyFunction_EVENT_MODIFY_KWDEFAULTS``
Enumeration of possible function watcher events:
- ``PyFunction_EVENT_CREATE``
- ``PyFunction_EVENT_DESTROY``
- ``PyFunction_EVENT_MODIFY_CODE``
- ``PyFunction_EVENT_MODIFY_DEFAULTS``
- ``PyFunction_EVENT_MODIFY_KWDEFAULTS``
.. versionadded:: 3.12

View file

@ -83,10 +83,15 @@ rules:
.. versionadded:: 3.12
.. c:function:: TYPE* PyObject_GC_Resize(TYPE, PyVarObject *op, Py_ssize_t newsize)
.. c:macro:: PyObject_GC_Resize(TYPE, op, newsize)
Resize an object allocated by :c:macro:`PyObject_NewVar`. Returns the
resized object or ``NULL`` on failure. *op* must not be tracked by the collector yet.
Resize an object allocated by :c:macro:`PyObject_NewVar`.
Returns the resized object of type ``TYPE*`` (refers to any C type)
or ``NULL`` on failure.
*op* must be of type :c:expr:`PyVarObject *`
and must not be tracked by the collector yet.
*newsize* must be of type :c:type:`Py_ssize_t`.
.. c:function:: void PyObject_GC_Track(PyObject *op)
@ -272,7 +277,7 @@ the garbage collector.
Type of the visitor function to be passed to :c:func:`PyUnstable_GC_VisitObjects`.
*arg* is the same as the *arg* passed to ``PyUnstable_GC_VisitObjects``.
Return ``0`` to continue iteration, return ``1`` to stop iteration. Other return
Return ``1`` to continue iteration, return ``0`` to stop iteration. Other return
values are reserved for now so behavior on returning anything else is undefined.
.. versionadded:: 3.12

View file

@ -3,7 +3,7 @@
PyHash API
----------
See also the :c:member:`PyTypeObject.tp_hash` member.
See also the :c:member:`PyTypeObject.tp_hash` member and :ref:`numeric-hash`.
.. c:type:: Py_hash_t
@ -17,6 +17,35 @@ See also the :c:member:`PyTypeObject.tp_hash` member.
.. versionadded:: 3.2
.. c:macro:: PyHASH_MODULUS
The `Mersenne prime <https://en.wikipedia.org/wiki/Mersenne_prime>`_ ``P = 2**n -1``, used for numeric hash scheme.
.. versionadded:: 3.13
.. c:macro:: PyHASH_BITS
The exponent ``n`` of ``P`` in :c:macro:`PyHASH_MODULUS`.
.. versionadded:: 3.13
.. c:macro:: PyHASH_MULTIPLIER
Prime multiplier used in string and various other hashes.
.. versionadded:: 3.13
.. c:macro:: PyHASH_INF
The hash value returned for a positive infinity.
.. versionadded:: 3.13
.. c:macro:: PyHASH_IMAG
The multiplier used for the imaginary part of a complex number.
.. versionadded:: 3.13
.. c:type:: PyHash_FuncDef
@ -59,3 +88,33 @@ See also the :c:member:`PyTypeObject.tp_hash` member.
The function cannot fail: it cannot return ``-1``.
.. versionadded:: 3.13
.. c:function:: Py_hash_t Py_HashBuffer(const void *ptr, Py_ssize_t len)
Compute and return the hash value of a buffer of *len* bytes
starting at address *ptr*. The hash is guaranteed to match that of
:class:`bytes`, :class:`memoryview`, and other built-in objects
that implement the :ref:`buffer protocol <bufferobjects>`.
Use this function to implement hashing for immutable objects whose
:c:member:`~PyTypeObject.tp_richcompare` function compares to another
object's buffer.
*len* must be greater than or equal to ``0``.
This function always succeeds.
.. versionadded:: 3.14
.. c:function:: Py_hash_t PyObject_GenericHash(PyObject *obj)
Generic hashing function that is meant to be put into a type
object's ``tp_hash`` slot.
Its result only depends on the object's identity.
.. impl-detail::
In CPython, it is equivalent to :c:func:`Py_HashPointer`.
.. versionadded:: 3.13

View file

@ -13,20 +13,8 @@ Importing Modules
single: __all__ (package variable)
single: modules (in module sys)
This is a simplified interface to :c:func:`PyImport_ImportModuleEx` below,
leaving the *globals* and *locals* arguments set to ``NULL`` and *level* set
to 0. When the *name*
argument contains a dot (when it specifies a submodule of a package), the
*fromlist* argument is set to the list ``['*']`` so that the return value is the
named module rather than the top-level package containing it as would otherwise
be the case. (Unfortunately, this has an additional side effect when *name* in
fact specifies a subpackage instead of a submodule: the submodules specified in
the package's ``__all__`` variable are loaded.) Return a new reference to the
imported module, or ``NULL`` with an exception set on failure. A failing
import of a module doesn't leave the module in :data:`sys.modules`.
This function always uses absolute imports.
This is a wrapper around :c:func:`PyImport_Import()` which takes a
:c:expr:`const char *` as an argument instead of a :c:expr:`PyObject *`.
.. c:function:: PyObject* PyImport_ImportModuleNoBlock(const char *name)
@ -148,14 +136,14 @@ Importing Modules
such modules have no way to know that the module object is an unknown (and
probably damaged with respect to the module author's intents) state.
The module's :attr:`__spec__` and :attr:`__loader__` will be set, if
not set already, with the appropriate values. The spec's loader will
be set to the module's ``__loader__`` (if set) and to an instance of
:class:`~importlib.machinery.SourceFileLoader` otherwise.
The module's :attr:`~module.__spec__` and :attr:`~module.__loader__` will be
set, if not set already, with the appropriate values. The spec's loader
will be set to the module's :attr:`!__loader__` (if set) and to an instance
of :class:`~importlib.machinery.SourceFileLoader` otherwise.
The module's :attr:`__file__` attribute will be set to the code object's
:attr:`~codeobject.co_filename`. If applicable, :attr:`__cached__` will also
be set.
The module's :attr:`~module.__file__` attribute will be set to the code
object's :attr:`~codeobject.co_filename`. If applicable,
:attr:`~module.__cached__` will also be set.
This function will reload the module if it was already imported. See
:c:func:`PyImport_ReloadModule` for the intended way to reload a module.
@ -167,29 +155,29 @@ Importing Modules
:c:func:`PyImport_ExecCodeModuleWithPathnames`.
.. versionchanged:: 3.12
The setting of :attr:`__cached__` and :attr:`__loader__` is
deprecated. See :class:`~importlib.machinery.ModuleSpec` for
The setting of :attr:`~module.__cached__` and :attr:`~module.__loader__`
is deprecated. See :class:`~importlib.machinery.ModuleSpec` for
alternatives.
.. c:function:: PyObject* PyImport_ExecCodeModuleEx(const char *name, PyObject *co, const char *pathname)
Like :c:func:`PyImport_ExecCodeModule`, but the :attr:`__file__` attribute of
the module object is set to *pathname* if it is non-``NULL``.
Like :c:func:`PyImport_ExecCodeModule`, but the :attr:`~module.__file__`
attribute of the module object is set to *pathname* if it is non-``NULL``.
See also :c:func:`PyImport_ExecCodeModuleWithPathnames`.
.. c:function:: PyObject* PyImport_ExecCodeModuleObject(PyObject *name, PyObject *co, PyObject *pathname, PyObject *cpathname)
Like :c:func:`PyImport_ExecCodeModuleEx`, but the :attr:`__cached__`
Like :c:func:`PyImport_ExecCodeModuleEx`, but the :attr:`~module.__cached__`
attribute of the module object is set to *cpathname* if it is
non-``NULL``. Of the three functions, this is the preferred one to use.
.. versionadded:: 3.3
.. versionchanged:: 3.12
Setting :attr:`__cached__` is deprecated. See
Setting :attr:`~module.__cached__` is deprecated. See
:class:`~importlib.machinery.ModuleSpec` for alternatives.
@ -202,7 +190,7 @@ Importing Modules
.. versionadded:: 3.2
.. versionchanged:: 3.3
Uses :func:`!imp.source_from_cache()` in calculating the source path if
Uses :func:`!imp.source_from_cache` in calculating the source path if
only the bytecode path is provided.
.. versionchanged:: 3.12
No longer uses the removed :mod:`!imp` module.
@ -320,7 +308,7 @@ Importing Modules
The module name, as an ASCII encoded string.
.. c: member:: PyObject* (*initfunc)(void)
.. c:member:: PyObject* (*initfunc)(void)
Initialization function for a module built into the interpreter.
@ -337,3 +325,24 @@ Importing Modules
If Python is initialized multiple times, :c:func:`PyImport_AppendInittab` or
:c:func:`PyImport_ExtendInittab` must be called before each Python
initialization.
.. c:function:: PyObject* PyImport_ImportModuleAttr(PyObject *mod_name, PyObject *attr_name)
Import the module *mod_name* and get its attribute *attr_name*.
Names must be Python :class:`str` objects.
Helper function combining :c:func:`PyImport_Import` and
:c:func:`PyObject_GetAttr`. For example, it can raise :exc:`ImportError` if
the module is not found, and :exc:`AttributeError` if the attribute doesn't
exist.
.. versionadded:: 3.14
.. c:function:: PyObject* PyImport_ImportModuleAttrString(const char *mod_name, const char *attr_name)
Similar to :c:func:`PyImport_ImportModuleAttr`, but names are UTF-8 encoded
strings instead of Python :class:`str` objects.
.. versionadded:: 3.14

View file

@ -25,3 +25,4 @@ document the API functions in detail.
memory.rst
objimpl.rst
apiabiversion.rst
monitoring.rst

File diff suppressed because it is too large Load diff

View file

@ -6,6 +6,627 @@
Python Initialization Configuration
***********************************
.. _pyinitconfig_api:
PyInitConfig C API
==================
.. versionadded:: 3.14
Python can be initialized with :c:func:`Py_InitializeFromInitConfig`.
The :c:func:`Py_RunMain` function can be used to write a customized Python
program.
See also :ref:`Initialization, Finalization, and Threads <initialization>`.
.. seealso::
:pep:`741` "Python Configuration C API".
Example
-------
Example of customized Python always running with the :ref:`Python Development
Mode <devmode>` enabled; return ``-1`` on error:
.. code-block:: c
int init_python(void)
{
PyInitConfig *config = PyInitConfig_Create();
if (config == NULL) {
printf("PYTHON INIT ERROR: memory allocation failed\n");
return -1;
}
// Enable the Python Development Mode
if (PyInitConfig_SetInt(config, "dev_mode", 1) < 0) {
goto error;
}
// Initialize Python with the configuration
if (Py_InitializeFromInitConfig(config) < 0) {
goto error;
}
PyInitConfig_Free(config);
return 0;
error:
{
// Display the error message.
//
// This uncommon braces style is used, because you cannot make
// goto targets point to variable declarations.
const char *err_msg;
(void)PyInitConfig_GetError(config, &err_msg);
printf("PYTHON INIT ERROR: %s\n", err_msg);
PyInitConfig_Free(config);
return -1;
}
}
Create Config
-------------
.. c:struct:: PyInitConfig
Opaque structure to configure the Python initialization.
.. c:function:: PyInitConfig* PyInitConfig_Create(void)
Create a new initialization configuration using :ref:`Isolated Configuration
<init-isolated-conf>` default values.
It must be freed by :c:func:`PyInitConfig_Free`.
Return ``NULL`` on memory allocation failure.
.. c:function:: void PyInitConfig_Free(PyInitConfig *config)
Free memory of the initialization configuration *config*.
If *config* is ``NULL``, no operation is performed.
Error Handling
--------------
.. c:function:: int PyInitConfig_GetError(PyInitConfig* config, const char **err_msg)
Get the *config* error message.
* Set *\*err_msg* and return ``1`` if an error is set.
* Set *\*err_msg* to ``NULL`` and return ``0`` otherwise.
An error message is an UTF-8 encoded string.
If *config* has an exit code, format the exit code as an error
message.
The error message remains valid until another ``PyInitConfig``
function is called with *config*. The caller doesn't have to free the
error message.
.. c:function:: int PyInitConfig_GetExitCode(PyInitConfig* config, int *exitcode)
Get the *config* exit code.
* Set *\*exitcode* and return ``1`` if *config* has an exit code set.
* Return ``0`` if *config* has no exit code set.
Only the ``Py_InitializeFromInitConfig()`` function can set an exit
code if the ``parse_argv`` option is non-zero.
An exit code can be set when parsing the command line failed (exit
code ``2``) or when a command line option asks to display the command
line help (exit code ``0``).
Get Options
-----------
The configuration option *name* parameter must be a non-NULL null-terminated
UTF-8 encoded string. See :ref:`Configuration Options <pyinitconfig-opts>`.
.. c:function:: int PyInitConfig_HasOption(PyInitConfig *config, const char *name)
Test if the configuration has an option called *name*.
Return ``1`` if the option exists, or return ``0`` otherwise.
.. c:function:: int PyInitConfig_GetInt(PyInitConfig *config, const char *name, int64_t *value)
Get an integer configuration option.
* Set *\*value*, and return ``0`` on success.
* Set an error in *config* and return ``-1`` on error.
.. c:function:: int PyInitConfig_GetStr(PyInitConfig *config, const char *name, char **value)
Get a string configuration option as a null-terminated UTF-8
encoded string.
* Set *\*value*, and return ``0`` on success.
* Set an error in *config* and return ``-1`` on error.
*\*value* can be set to ``NULL`` if the option is an optional string and the
option is unset.
On success, the string must be released with ``free(value)`` if it's not
``NULL``.
.. c:function:: int PyInitConfig_GetStrList(PyInitConfig *config, const char *name, size_t *length, char ***items)
Get a string list configuration option as an array of
null-terminated UTF-8 encoded strings.
* Set *\*length* and *\*value*, and return ``0`` on success.
* Set an error in *config* and return ``-1`` on error.
On success, the string list must be released with
``PyInitConfig_FreeStrList(length, items)``.
.. c:function:: void PyInitConfig_FreeStrList(size_t length, char **items)
Free memory of a string list created by
``PyInitConfig_GetStrList()``.
Set Options
-----------
The configuration option *name* parameter must be a non-NULL null-terminated
UTF-8 encoded string. See :ref:`Configuration Options <pyinitconfig-opts>`.
Some configuration options have side effects on other options. This logic is
only implemented when ``Py_InitializeFromInitConfig()`` is called, not by the
"Set" functions below. For example, setting ``dev_mode`` to ``1`` does not set
``faulthandler`` to ``1``.
.. c:function:: int PyInitConfig_SetInt(PyInitConfig *config, const char *name, int64_t value)
Set an integer configuration option.
* Return ``0`` on success.
* Set an error in *config* and return ``-1`` on error.
.. c:function:: int PyInitConfig_SetStr(PyInitConfig *config, const char *name, const char *value)
Set a string configuration option from a null-terminated UTF-8
encoded string. The string is copied.
* Return ``0`` on success.
* Set an error in *config* and return ``-1`` on error.
.. c:function:: int PyInitConfig_SetStrList(PyInitConfig *config, const char *name, size_t length, char * const *items)
Set a string list configuration option from an array of
null-terminated UTF-8 encoded strings. The string list is copied.
* Return ``0`` on success.
* Set an error in *config* and return ``-1`` on error.
Module
------
.. c:function:: int PyInitConfig_AddModule(PyInitConfig *config, const char *name, PyObject* (*initfunc)(void))
Add a built-in extension module to the table of built-in modules.
The new module can be imported by the name *name*, and uses the function
*initfunc* as the initialization function called on the first attempted
import.
* Return ``0`` on success.
* Set an error in *config* and return ``-1`` on error.
If Python is initialized multiple times, ``PyInitConfig_AddModule()`` must
be called at each Python initialization.
Similar to the :c:func:`PyImport_AppendInittab` function.
Initialize Python
-----------------
.. c:function:: int Py_InitializeFromInitConfig(PyInitConfig *config)
Initialize Python from the initialization configuration.
* Return ``0`` on success.
* Set an error in *config* and return ``-1`` on error.
* Set an exit code in *config* and return ``-1`` if Python wants to
exit.
See ``PyInitConfig_GetExitcode()`` for the exit code case.
.. _pyinitconfig-opts:
Configuration Options
=====================
.. list-table::
:header-rows: 1
* - Option
- PyConfig/PyPreConfig member
- Type
- Visibility
* - ``"allocator"``
- :c:member:`allocator <PyPreConfig.allocator>`
- ``int``
- Read-only
* - ``"argv"``
- :c:member:`argv <PyConfig.argv>`
- ``list[str]``
- Public
* - ``"base_exec_prefix"``
- :c:member:`base_exec_prefix <PyConfig.base_exec_prefix>`
- ``str``
- Public
* - ``"base_executable"``
- :c:member:`base_executable <PyConfig.base_executable>`
- ``str``
- Public
* - ``"base_prefix"``
- :c:member:`base_prefix <PyConfig.base_prefix>`
- ``str``
- Public
* - ``"buffered_stdio"``
- :c:member:`buffered_stdio <PyConfig.buffered_stdio>`
- ``bool``
- Read-only
* - ``"bytes_warning"``
- :c:member:`bytes_warning <PyConfig.bytes_warning>`
- ``int``
- Public
* - ``"check_hash_pycs_mode"``
- :c:member:`check_hash_pycs_mode <PyConfig.check_hash_pycs_mode>`
- ``str``
- Read-only
* - ``"code_debug_ranges"``
- :c:member:`code_debug_ranges <PyConfig.code_debug_ranges>`
- ``bool``
- Read-only
* - ``"coerce_c_locale"``
- :c:member:`coerce_c_locale <PyPreConfig.coerce_c_locale>`
- ``bool``
- Read-only
* - ``"coerce_c_locale_warn"``
- :c:member:`coerce_c_locale_warn <PyPreConfig.coerce_c_locale_warn>`
- ``bool``
- Read-only
* - ``"configure_c_stdio"``
- :c:member:`configure_c_stdio <PyConfig.configure_c_stdio>`
- ``bool``
- Read-only
* - ``"configure_locale"``
- :c:member:`configure_locale <PyPreConfig.configure_locale>`
- ``bool``
- Read-only
* - ``"cpu_count"``
- :c:member:`cpu_count <PyConfig.cpu_count>`
- ``int``
- Read-only
* - ``"dev_mode"``
- :c:member:`dev_mode <PyConfig.dev_mode>`
- ``bool``
- Read-only
* - ``"dump_refs"``
- :c:member:`dump_refs <PyConfig.dump_refs>`
- ``bool``
- Read-only
* - ``"dump_refs_file"``
- :c:member:`dump_refs_file <PyConfig.dump_refs_file>`
- ``str``
- Read-only
* - ``"exec_prefix"``
- :c:member:`exec_prefix <PyConfig.exec_prefix>`
- ``str``
- Public
* - ``"executable"``
- :c:member:`executable <PyConfig.executable>`
- ``str``
- Public
* - ``"faulthandler"``
- :c:member:`faulthandler <PyConfig.faulthandler>`
- ``bool``
- Read-only
* - ``"filesystem_encoding"``
- :c:member:`filesystem_encoding <PyConfig.filesystem_encoding>`
- ``str``
- Read-only
* - ``"filesystem_errors"``
- :c:member:`filesystem_errors <PyConfig.filesystem_errors>`
- ``str``
- Read-only
* - ``"hash_seed"``
- :c:member:`hash_seed <PyConfig.hash_seed>`
- ``int``
- Read-only
* - ``"home"``
- :c:member:`home <PyConfig.home>`
- ``str``
- Read-only
* - ``"import_time"``
- :c:member:`import_time <PyConfig.import_time>`
- ``bool``
- Read-only
* - ``"inspect"``
- :c:member:`inspect <PyConfig.inspect>`
- ``bool``
- Public
* - ``"install_signal_handlers"``
- :c:member:`install_signal_handlers <PyConfig.install_signal_handlers>`
- ``bool``
- Read-only
* - ``"int_max_str_digits"``
- :c:member:`int_max_str_digits <PyConfig.int_max_str_digits>`
- ``int``
- Public
* - ``"interactive"``
- :c:member:`interactive <PyConfig.interactive>`
- ``bool``
- Public
* - ``"isolated"``
- :c:member:`isolated <PyConfig.isolated>`
- ``bool``
- Read-only
* - ``"legacy_windows_fs_encoding"``
- :c:member:`legacy_windows_fs_encoding <PyPreConfig.legacy_windows_fs_encoding>`
- ``bool``
- Read-only
* - ``"legacy_windows_stdio"``
- :c:member:`legacy_windows_stdio <PyConfig.legacy_windows_stdio>`
- ``bool``
- Read-only
* - ``"malloc_stats"``
- :c:member:`malloc_stats <PyConfig.malloc_stats>`
- ``bool``
- Read-only
* - ``"module_search_paths"``
- :c:member:`module_search_paths <PyConfig.module_search_paths>`
- ``list[str]``
- Public
* - ``"optimization_level"``
- :c:member:`optimization_level <PyConfig.optimization_level>`
- ``int``
- Public
* - ``"orig_argv"``
- :c:member:`orig_argv <PyConfig.orig_argv>`
- ``list[str]``
- Read-only
* - ``"parse_argv"``
- :c:member:`parse_argv <PyConfig.parse_argv>`
- ``bool``
- Read-only
* - ``"parser_debug"``
- :c:member:`parser_debug <PyConfig.parser_debug>`
- ``bool``
- Public
* - ``"pathconfig_warnings"``
- :c:member:`pathconfig_warnings <PyConfig.pathconfig_warnings>`
- ``bool``
- Read-only
* - ``"perf_profiling"``
- :c:member:`perf_profiling <PyConfig.perf_profiling>`
- ``bool``
- Read-only
* - ``"platlibdir"``
- :c:member:`platlibdir <PyConfig.platlibdir>`
- ``str``
- Public
* - ``"prefix"``
- :c:member:`prefix <PyConfig.prefix>`
- ``str``
- Public
* - ``"program_name"``
- :c:member:`program_name <PyConfig.program_name>`
- ``str``
- Read-only
* - ``"pycache_prefix"``
- :c:member:`pycache_prefix <PyConfig.pycache_prefix>`
- ``str``
- Public
* - ``"quiet"``
- :c:member:`quiet <PyConfig.quiet>`
- ``bool``
- Public
* - ``"run_command"``
- :c:member:`run_command <PyConfig.run_command>`
- ``str``
- Read-only
* - ``"run_filename"``
- :c:member:`run_filename <PyConfig.run_filename>`
- ``str``
- Read-only
* - ``"run_module"``
- :c:member:`run_module <PyConfig.run_module>`
- ``str``
- Read-only
* - ``"run_presite"``
- :c:member:`run_presite <PyConfig.run_presite>`
- ``str``
- Read-only
* - ``"safe_path"``
- :c:member:`safe_path <PyConfig.safe_path>`
- ``bool``
- Read-only
* - ``"show_ref_count"``
- :c:member:`show_ref_count <PyConfig.show_ref_count>`
- ``bool``
- Read-only
* - ``"site_import"``
- :c:member:`site_import <PyConfig.site_import>`
- ``bool``
- Read-only
* - ``"skip_source_first_line"``
- :c:member:`skip_source_first_line <PyConfig.skip_source_first_line>`
- ``bool``
- Read-only
* - ``"stdio_encoding"``
- :c:member:`stdio_encoding <PyConfig.stdio_encoding>`
- ``str``
- Read-only
* - ``"stdio_errors"``
- :c:member:`stdio_errors <PyConfig.stdio_errors>`
- ``str``
- Read-only
* - ``"stdlib_dir"``
- :c:member:`stdlib_dir <PyConfig.stdlib_dir>`
- ``str``
- Public
* - ``"tracemalloc"``
- :c:member:`tracemalloc <PyConfig.tracemalloc>`
- ``int``
- Read-only
* - ``"use_environment"``
- :c:member:`use_environment <PyConfig.use_environment>`
- ``bool``
- Public
* - ``"use_frozen_modules"``
- :c:member:`use_frozen_modules <PyConfig.use_frozen_modules>`
- ``bool``
- Read-only
* - ``"use_hash_seed"``
- :c:member:`use_hash_seed <PyConfig.use_hash_seed>`
- ``bool``
- Read-only
* - ``"use_system_logger"``
- :c:member:`use_system_logger <PyConfig.use_system_logger>`
- ``bool``
- Read-only
* - ``"user_site_directory"``
- :c:member:`user_site_directory <PyConfig.user_site_directory>`
- ``bool``
- Read-only
* - ``"utf8_mode"``
- :c:member:`utf8_mode <PyPreConfig.utf8_mode>`
- ``bool``
- Read-only
* - ``"verbose"``
- :c:member:`verbose <PyConfig.verbose>`
- ``int``
- Public
* - ``"warn_default_encoding"``
- :c:member:`warn_default_encoding <PyConfig.warn_default_encoding>`
- ``bool``
- Read-only
* - ``"warnoptions"``
- :c:member:`warnoptions <PyConfig.warnoptions>`
- ``list[str]``
- Public
* - ``"write_bytecode"``
- :c:member:`write_bytecode <PyConfig.write_bytecode>`
- ``bool``
- Public
* - ``"xoptions"``
- :c:member:`xoptions <PyConfig.xoptions>`
- ``dict[str, str]``
- Public
* - ``"_pystats"``
- :c:member:`_pystats <PyConfig._pystats>`
- ``bool``
- Read-only
Visibility:
* Public: Can by get by :c:func:`PyConfig_Get` and set by
:c:func:`PyConfig_Set`.
* Read-only: Can by get by :c:func:`PyConfig_Get`, but cannot be set by
:c:func:`PyConfig_Set`.
Runtime Python configuration API
================================
At runtime, it's possible to get and set configuration options using
:c:func:`PyConfig_Get` and :c:func:`PyConfig_Set` functions.
The configuration option *name* parameter must be a non-NULL null-terminated
UTF-8 encoded string. See :ref:`Configuration Options <pyinitconfig-opts>`.
Some options are read from the :mod:`sys` attributes. For example, the option
``"argv"`` is read from :data:`sys.argv`.
.. c:function:: PyObject* PyConfig_Get(const char *name)
Get the current runtime value of a configuration option as a Python object.
* Return a new reference on success.
* Set an exception and return ``NULL`` on error.
The object type depends on the configuration option. It can be:
* ``bool``
* ``int``
* ``str``
* ``list[str]``
* ``dict[str, str]``
The caller must have an :term:`attached thread state`. The function cannot
be called before Python initialization nor after Python finalization.
.. versionadded:: 3.14
.. c:function:: int PyConfig_GetInt(const char *name, int *value)
Similar to :c:func:`PyConfig_Get`, but get the value as a C int.
* Return ``0`` on success.
* Set an exception and return ``-1`` on error.
.. versionadded:: 3.14
.. c:function:: PyObject* PyConfig_Names(void)
Get all configuration option names as a ``frozenset``.
* Return a new reference on success.
* Set an exception and return ``NULL`` on error.
The caller must have an :term:`attached thread state`. The function cannot
be called before Python initialization nor after Python finalization.
.. versionadded:: 3.14
.. c:function:: int PyConfig_Set(const char *name, PyObject *value)
Set the current runtime value of a configuration option.
* Raise a :exc:`ValueError` if there is no option *name*.
* Raise a :exc:`ValueError` if *value* is an invalid value.
* Raise a :exc:`ValueError` if the option is read-only (cannot be set).
* Raise a :exc:`TypeError` if *value* has not the proper type.
The caller must have an :term:`attached thread state`. The function cannot
be called before Python initialization nor after Python finalization.
.. versionadded:: 3.14
.. _pyconfig_api:
PyConfig C API
==============
.. versionadded:: 3.8
Python can be initialized with :c:func:`Py_InitializeFromConfig` and the
@ -34,7 +655,7 @@ See also :ref:`Initialization, Finalization, and Threads <initialization>`.
Example
=======
-------
Example of customized Python always running in isolated mode::
@ -73,7 +694,7 @@ Example of customized Python always running in isolated mode::
PyWideStringList
================
----------------
.. c:type:: PyWideStringList
@ -116,7 +737,7 @@ PyWideStringList
List items.
PyStatus
========
--------
.. c:type:: PyStatus
@ -210,7 +831,7 @@ Example::
PyPreConfig
===========
-----------
.. c:type:: PyPreConfig
@ -321,7 +942,7 @@ PyPreConfig
* Set :c:member:`PyConfig.filesystem_encoding` to ``"mbcs"``,
* Set :c:member:`PyConfig.filesystem_errors` to ``"replace"``.
Initialized the from :envvar:`PYTHONLEGACYWINDOWSFSENCODING` environment
Initialized from the :envvar:`PYTHONLEGACYWINDOWSFSENCODING` environment
variable value.
Only available on Windows. ``#ifdef MS_WINDOWS`` macro can be used for
@ -360,7 +981,7 @@ PyPreConfig
.. _c-preinit:
Preinitialize Python with PyPreConfig
=====================================
-------------------------------------
The preinitialization of Python:
@ -440,7 +1061,7 @@ the :ref:`Python UTF-8 Mode <utf8-mode>`::
PyConfig
========
--------
.. c:type:: PyConfig
@ -509,7 +1130,7 @@ PyConfig
The :c:func:`PyConfig_Read` function only parses
:c:member:`PyConfig.argv` arguments once: :c:member:`PyConfig.parse_argv`
is set to ``2`` after arguments are parsed. Since Python arguments are
strippped from :c:member:`PyConfig.argv`, parsing arguments twice would
stripped from :c:member:`PyConfig.argv`, parsing arguments twice would
parse the application options as Python options.
:ref:`Preinitialize Python <c-preinit>` if needed.
@ -731,6 +1352,16 @@ PyConfig
Default: ``0``.
.. c:member:: wchar_t* dump_refs_file
Filename where to dump Python references.
Set by the :envvar:`PYTHONDUMPREFSFILE` environment variable.
Default: ``NULL``.
.. versionadded:: 3.11
.. c:member:: wchar_t* exec_prefix
The site-specific directory prefix where the platform-dependent Python
@ -809,6 +1440,15 @@ PyConfig
See also the :c:member:`~PyConfig.filesystem_encoding` member.
.. c:member:: int use_frozen_modules
If non-zero, use frozen modules.
Set by the :envvar:`PYTHON_FROZEN_MODULES` environment variable.
Default: ``1`` in a release build, or ``0`` in a :ref:`debug build
<debug-build>`.
.. c:member:: unsigned long hash_seed
.. c:member:: int use_hash_seed
@ -1041,7 +1681,7 @@ PyConfig
The :c:func:`PyConfig_Read` function only parses
:c:member:`PyConfig.argv` arguments once: :c:member:`PyConfig.parse_argv`
is set to ``2`` after arguments are parsed. Since Python arguments are
strippped from :c:member:`PyConfig.argv`, parsing arguments twice would
stripped from :c:member:`PyConfig.argv`, parsing arguments twice would
parse the application options as Python options.
Default: ``1`` in Python mode, ``0`` in isolated mode.
@ -1245,18 +1885,34 @@ PyConfig
.. c:member:: int perf_profiling
Enable compatibility mode with the perf profiler?
Enable the Linux ``perf`` profiler support?
If non-zero, initialize the perf trampoline. See :ref:`perf_profiling`
for more information.
If equals to ``1``, enable support for the Linux ``perf`` profiler.
Set by :option:`-X perf <-X>` command line option and by the
If equals to ``2``, enable support for the Linux ``perf`` profiler with
DWARF JIT support.
Set to ``1`` by :option:`-X perf <-X>` command-line option and the
:envvar:`PYTHONPERFSUPPORT` environment variable.
Set to ``2`` by the :option:`-X perf_jit <-X>` command-line option and
the :envvar:`PYTHON_PERF_JIT_SUPPORT` environment variable.
Default: ``-1``.
.. seealso::
See :ref:`perf_profiling` for more information.
.. versionadded:: 3.12
.. c:member:: wchar_t* stdlib_dir
Directory of the Python standard library.
Default: ``NULL``.
.. versionadded:: 3.11
.. c:member:: int use_environment
Use :ref:`environment variables <using-on-envvars>`?
@ -1268,6 +1924,18 @@ PyConfig
Default: ``1`` in Python config and ``0`` in isolated config.
.. c:member:: int use_system_logger
If non-zero, ``stdout`` and ``stderr`` will be redirected to the system
log.
Only available on macOS 10.12 and later, and on iOS.
Default: ``0`` (don't use the system log) on macOS; ``1`` on iOS (use the
system log).
.. versionadded:: 3.14
.. c:member:: int user_site_directory
If non-zero, add the user site directory to :data:`sys.path`.
@ -1332,6 +2000,15 @@ PyConfig
Default: empty list.
.. c:member:: int _pystats
If non-zero, write performance statistics at Python exit.
Need a special build with the ``Py_STATS`` macro:
see :option:`--enable-pystats`.
Default: ``0``.
If :c:member:`~PyConfig.parse_argv` is non-zero, :c:member:`~PyConfig.argv`
arguments are parsed the same way the regular Python parses :ref:`command line
arguments <using-on-cmdline>`, and Python arguments are stripped from
@ -1345,14 +2022,13 @@ the :option:`-X` command line option.
The ``show_alloc_count`` field has been removed.
.. _init-from-config:
Initialization with PyConfig
============================
----------------------------
Function to initialize Python:
.. c:function:: PyStatus Py_InitializeFromConfig(const PyConfig *config)
Initialize Python from *config* configuration.
Initializing the interpreter from a populated configuration struct is handled
by calling :c:func:`Py_InitializeFromConfig`.
The caller is responsible to handle exceptions (error or exit) using
:c:func:`PyStatus_Exception` and :c:func:`Py_ExitStatusException`.
@ -1458,7 +2134,7 @@ initialization::
.. _init-isolated-conf:
Isolated Configuration
======================
----------------------
:c:func:`PyPreConfig_InitIsolatedConfig` and
:c:func:`PyConfig_InitIsolatedConfig` functions create a configuration to
@ -1478,7 +2154,7 @@ to avoid computing the default path configuration.
.. _init-python-config:
Python Configuration
====================
--------------------
:c:func:`PyPreConfig_InitPythonConfig` and :c:func:`PyConfig_InitPythonConfig`
functions create a configuration to build a customized Python which behaves as
@ -1496,7 +2172,7 @@ and :ref:`Python UTF-8 Mode <utf8-mode>`
.. _init-path-config:
Python Path Configuration
=========================
-------------------------
:c:type:`PyConfig` contains multiple fields for the path configuration:
@ -1578,28 +2254,22 @@ If a ``._pth`` file is present:
* Set :c:member:`~PyConfig.site_import` to ``0``.
* Set :c:member:`~PyConfig.safe_path` to ``1``.
If :c:member:`~PyConfig.home` is not set and a ``pyvenv.cfg`` file is present in
the same directory as :c:member:`~PyConfig.executable`, or its parent,
:c:member:`~PyConfig.prefix` and :c:member:`~PyConfig.exec_prefix` are set that
location. When this happens, :c:member:`~PyConfig.base_prefix` and
:c:member:`~PyConfig.base_exec_prefix` still keep their value, pointing to the
base installation. See :ref:`sys-path-init-virtual-environments` for more
information.
The ``__PYVENV_LAUNCHER__`` environment variable is used to set
:c:member:`PyConfig.base_executable`.
.. versionchanged:: 3.14
Py_RunMain()
============
.. c:function:: int Py_RunMain(void)
Execute the command (:c:member:`PyConfig.run_command`), the script
(:c:member:`PyConfig.run_filename`) or the module
(:c:member:`PyConfig.run_module`) specified on the command line or in the
configuration.
By default and when if :option:`-i` option is used, run the REPL.
Finally, finalizes Python and returns an exit status that can be passed to
the ``exit()`` function.
See :ref:`Python Configuration <init-python-config>` for an example of
customized Python always running in isolated mode using
:c:func:`Py_RunMain`.
:c:member:`~PyConfig.prefix`, and :c:member:`~PyConfig.exec_prefix`, are now
set to the ``pyvenv.cfg`` directory. This was previously done by :mod:`site`,
therefore affected by :option:`-S`.
Py_GetArgcArgv()
@ -1611,89 +2281,13 @@ Py_GetArgcArgv()
See also :c:member:`PyConfig.orig_argv` member.
Delaying main module execution
==============================
Multi-Phase Initialization Private Provisional API
==================================================
In some embedding use cases, it may be desirable to separate interpreter initialization
from the execution of the main module.
This section is a private provisional API introducing multi-phase
initialization, the core feature of :pep:`432`:
* "Core" initialization phase, "bare minimum Python":
* Builtin types;
* Builtin exceptions;
* Builtin and frozen modules;
* The :mod:`sys` module is only partially initialized
(ex: :data:`sys.path` doesn't exist yet).
* "Main" initialization phase, Python is fully initialized:
* Install and configure :mod:`importlib`;
* Apply the :ref:`Path Configuration <init-path-config>`;
* Install signal handlers;
* Finish :mod:`sys` module initialization (ex: create :data:`sys.stdout`
and :data:`sys.path`);
* Enable optional features like :mod:`faulthandler` and :mod:`tracemalloc`;
* Import the :mod:`site` module;
* etc.
Private provisional API:
* :c:member:`PyConfig._init_main`: if set to ``0``,
:c:func:`Py_InitializeFromConfig` stops at the "Core" initialization phase.
.. c:function:: PyStatus _Py_InitializeMain(void)
Move to the "Main" initialization phase, finish the Python initialization.
No module is imported during the "Core" phase and the ``importlib`` module is
not configured: the :ref:`Path Configuration <init-path-config>` is only
applied during the "Main" phase. It may allow to customize Python in Python to
override or tune the :ref:`Path Configuration <init-path-config>`, maybe
install a custom :data:`sys.meta_path` importer or an import hook, etc.
It may become possible to calculate the :ref:`Path Configuration
<init-path-config>` in Python, after the Core phase and before the Main phase,
which is one of the :pep:`432` motivation.
The "Core" phase is not properly defined: what should be and what should
not be available at this phase is not specified yet. The API is marked
as private and provisional: the API can be modified or even be removed
anytime until a proper public API is designed.
Example running Python code between "Core" and "Main" initialization
phases::
void init_python(void)
{
PyStatus status;
PyConfig config;
PyConfig_InitPythonConfig(&config);
config._init_main = 0;
/* ... customize 'config' configuration ... */
status = Py_InitializeFromConfig(&config);
PyConfig_Clear(&config);
if (PyStatus_Exception(status)) {
Py_ExitStatusException(status);
}
/* Use sys.stderr because sys.stdout is only created
by _Py_InitializeMain() */
int res = PyRun_SimpleString(
"import sys; "
"print('Run Python code before _Py_InitializeMain', "
"file=sys.stderr)");
if (res < 0) {
exit(1);
}
/* ... put more configuration code here ... */
status = _Py_InitializeMain();
if (PyStatus_Exception(status)) {
Py_ExitStatusException(status);
}
}
This separation can be achieved by setting ``PyConfig.run_command`` to the empty
string during initialization (to prevent the interpreter from dropping into the
interactive prompt), and then subsequently executing the desired main module
code using ``__main__.__dict__`` as the global namespace.

View file

@ -30,6 +30,16 @@ familiar with writing an extension before attempting to embed Python in a real
application.
Language version compatibility
==============================
Python's C API is compatible with C11 and C++11 versions of C and C++.
This is a lower limit: the C API does not require features from later
C/C++ versions.
You do *not* need to enable your compiler's "c11 mode".
Coding standards
================
@ -148,7 +158,7 @@ complete listing.
worse performances (due to increased code size for example). The compiler is
usually smarter than the developer for the cost/benefit analysis.
If Python is :ref:`built in debug mode <debug-build>` (if the ``Py_DEBUG``
If Python is :ref:`built in debug mode <debug-build>` (if the :c:macro:`Py_DEBUG`
macro is defined), the :c:macro:`Py_ALWAYS_INLINE` macro does nothing.
It must be specified before the function return type. Usage::
@ -325,8 +335,8 @@ objects that reference each other here; for now, the solution
is "don't do that.")
.. index::
single: Py_INCREF()
single: Py_DECREF()
single: Py_INCREF (C function)
single: Py_DECREF (C function)
Reference counts are always manipulated explicitly. The normal way is
to use the macro :c:func:`Py_INCREF` to take a new reference to an
@ -401,8 +411,8 @@ function, that function assumes that it now owns that reference, and you are not
responsible for it any longer.
.. index::
single: PyList_SetItem()
single: PyTuple_SetItem()
single: PyList_SetItem (C function)
single: PyTuple_SetItem (C function)
Few functions steal references; the two notable exceptions are
:c:func:`PyList_SetItem` and :c:func:`PyTuple_SetItem`, which steal a reference
@ -491,8 +501,8 @@ using :c:func:`PySequence_GetItem` (which happens to take exactly the same
arguments), you do own a reference to the returned object.
.. index::
single: PyList_GetItem()
single: PySequence_GetItem()
single: PyList_GetItem (C function)
single: PySequence_GetItem (C function)
Here is an example of how you could write a function that computes the sum of
the items in a list of integers; once using :c:func:`PyList_GetItem`, and once
@ -587,7 +597,7 @@ caller, then to the caller's caller, and so on, until they reach the top-level
interpreter, where they are reported to the user accompanied by a stack
traceback.
.. index:: single: PyErr_Occurred()
.. index:: single: PyErr_Occurred (C function)
For C programmers, however, error checking always has to be explicit. All
functions in the Python/C API can raise exceptions, unless an explicit claim is
@ -601,8 +611,8 @@ ambiguous return value, and require explicit testing for errors with
:c:func:`PyErr_Occurred`. These exceptions are always explicitly documented.
.. index::
single: PyErr_SetString()
single: PyErr_Clear()
single: PyErr_SetString (C function)
single: PyErr_Clear (C function)
Exception state is maintained in per-thread storage (this is equivalent to
using global storage in an unthreaded application). A thread can be in one of
@ -624,7 +634,7 @@ an exception is being passed on between C functions until it reaches the Python
bytecode interpreter's main loop, which takes care of transferring it to
``sys.exc_info()`` and friends.
.. index:: single: exc_info() (in module sys)
.. index:: single: exc_info (in module sys)
Note that starting with Python 1.5, the preferred, thread-safe way to access the
exception state from Python code is to call the function :func:`sys.exc_info`,
@ -709,9 +719,9 @@ Here is the corresponding C code, in all its glory::
.. index:: single: incr_item()
.. index::
single: PyErr_ExceptionMatches()
single: PyErr_Clear()
single: Py_XDECREF()
single: PyErr_ExceptionMatches (C function)
single: PyErr_Clear (C function)
single: Py_XDECREF (C function)
This example represents an endorsed use of the ``goto`` statement in C!
It illustrates the use of :c:func:`PyErr_ExceptionMatches` and
@ -735,7 +745,7 @@ the finalization, of the Python interpreter. Most functionality of the
interpreter can only be used after the interpreter has been initialized.
.. index::
single: Py_Initialize()
single: Py_Initialize (C function)
pair: module; builtins
pair: module; __main__
pair: module; sys
@ -770,10 +780,10 @@ environment variable :envvar:`PYTHONHOME`, or insert additional directories in
front of the standard path by setting :envvar:`PYTHONPATH`.
.. index::
single: Py_GetPath()
single: Py_GetPrefix()
single: Py_GetExecPrefix()
single: Py_GetProgramFullPath()
single: Py_GetPath (C function)
single: Py_GetPrefix (C function)
single: Py_GetExecPrefix (C function)
single: Py_GetProgramFullPath (C function)
The embedding application can steer the search by setting
:c:member:`PyConfig.program_name` *before* calling
@ -784,7 +794,7 @@ control has to provide its own implementation of :c:func:`Py_GetPath`,
:c:func:`Py_GetPrefix`, :c:func:`Py_GetExecPrefix`, and
:c:func:`Py_GetProgramFullPath` (all defined in :file:`Modules/getpath.c`).
.. index:: single: Py_IsInitialized()
.. index:: single: Py_IsInitialized (C function)
Sometimes, it is desirable to "uninitialize" Python. For instance, the
application may want to start over (make another call to
@ -812,12 +822,14 @@ available that support tracing of reference counts, debugging the memory
allocator, or low-level profiling of the main interpreter loop. Only the most
frequently used builds will be described in the remainder of this section.
Compiling the interpreter with the :c:macro:`Py_DEBUG` macro defined produces
.. c:macro:: Py_DEBUG
Compiling the interpreter with the :c:macro:`!Py_DEBUG` macro defined produces
what is generally meant by :ref:`a debug build of Python <debug-build>`.
:c:macro:`Py_DEBUG` is enabled in the Unix build by adding
:c:macro:`!Py_DEBUG` is enabled in the Unix build by adding
:option:`--with-pydebug` to the :file:`./configure` command.
It is also implied by the presence of the
not-Python-specific :c:macro:`_DEBUG` macro. When :c:macro:`Py_DEBUG` is enabled
not-Python-specific :c:macro:`!_DEBUG` macro. When :c:macro:`!Py_DEBUG` is enabled
in the Unix build, compiler optimization is disabled.
In addition to the reference count debugging described below, extra checks are
@ -832,4 +844,3 @@ after every statement run by the interpreter.)
Please refer to :file:`Misc/SpecialBuilds.txt` in the Python source distribution
for more detailed information.

View file

@ -10,7 +10,8 @@ There are two functions specifically for working with iterators.
.. c:function:: int PyIter_Check(PyObject *o)
Return non-zero if the object *o* can be safely passed to
:c:func:`PyIter_Next`, and ``0`` otherwise. This function always succeeds.
:c:func:`PyIter_NextItem` and ``0`` otherwise.
This function always succeeds.
.. c:function:: int PyAIter_Check(PyObject *o)
@ -19,41 +20,27 @@ There are two functions specifically for working with iterators.
.. versionadded:: 3.10
.. c:function:: int PyIter_NextItem(PyObject *iter, PyObject **item)
Return ``1`` and set *item* to a :term:`strong reference` of the
next value of the iterator *iter* on success.
Return ``0`` and set *item* to ``NULL`` if there are no remaining values.
Return ``-1``, set *item* to ``NULL`` and set an exception on error.
.. versionadded:: 3.14
.. c:function:: PyObject* PyIter_Next(PyObject *o)
This is an older version of :c:func:`!PyIter_NextItem`,
which is retained for backwards compatibility.
Prefer :c:func:`PyIter_NextItem`.
Return the next value from the iterator *o*. The object must be an iterator
according to :c:func:`PyIter_Check` (it is up to the caller to check this).
If there are no remaining values, returns ``NULL`` with no exception set.
If an error occurs while retrieving the item, returns ``NULL`` and passes
along the exception.
To write a loop which iterates over an iterator, the C code should look
something like this::
PyObject *iterator = PyObject_GetIter(obj);
PyObject *item;
if (iterator == NULL) {
/* propagate error */
}
while ((item = PyIter_Next(iterator))) {
/* do something with item */
...
/* release reference when done */
Py_DECREF(item);
}
Py_DECREF(iterator);
if (PyErr_Occurred()) {
/* propagate error */
}
else {
/* continue doing useful work */
}
.. c:type:: PySendResult
The enum value used to represent different results of :c:func:`PyIter_Send`.

View file

@ -38,9 +38,12 @@ List Objects
.. note::
If *len* is greater than zero, the returned list object's items are
set to ``NULL``. Thus you cannot use abstract API functions such as
:c:func:`PySequence_SetItem` or expose the object to Python code before
setting all items to a real object with :c:func:`PyList_SetItem`.
set to ``NULL``. Thus you cannot use abstract API functions such as
:c:func:`PySequence_SetItem` or expose the object to Python code before
setting all items to a real object with :c:func:`PyList_SetItem` or
:c:func:`PyList_SET_ITEM()`. The following APIs are safe APIs before
the list is fully initialized: :c:func:`PyList_SetItem()` and :c:func:`PyList_SET_ITEM()`.
.. c:function:: Py_ssize_t PyList_Size(PyObject *list)
@ -56,13 +59,21 @@ List Objects
Similar to :c:func:`PyList_Size`, but without error checking.
.. c:function:: PyObject* PyList_GetItem(PyObject *list, Py_ssize_t index)
.. c:function:: PyObject* PyList_GetItemRef(PyObject *list, Py_ssize_t index)
Return the object at position *index* in the list pointed to by *list*. The
position must be non-negative; indexing from the end of the list is not
supported. If *index* is out of bounds (<0 or >=len(list)),
supported. If *index* is out of bounds (:code:`<0 or >=len(list)`),
return ``NULL`` and set an :exc:`IndexError` exception.
.. versionadded:: 3.13
.. c:function:: PyObject* PyList_GetItem(PyObject *list, Py_ssize_t index)
Like :c:func:`PyList_GetItemRef`, but returns a
:term:`borrowed reference` instead of a :term:`strong reference`.
.. c:function:: PyObject* PyList_GET_ITEM(PyObject *list, Py_ssize_t i)

View file

@ -69,12 +69,32 @@ distinguished from a number. Use :c:func:`PyErr_Occurred` to disambiguate.
on failure.
.. c:function:: PyObject* PyLong_FromInt32(int32_t value)
PyObject* PyLong_FromInt64(int64_t value)
Return a new :c:type:`PyLongObject` object from a signed C
:c:expr:`int32_t` or :c:expr:`int64_t`, or ``NULL``
with an exception set on failure.
.. versionadded:: 3.14
.. c:function:: PyObject* PyLong_FromUnsignedLongLong(unsigned long long v)
Return a new :c:type:`PyLongObject` object from a C :c:expr:`unsigned long long`,
or ``NULL`` on failure.
.. c:function:: PyObject* PyLong_FromUInt32(uint32_t value)
PyObject* PyLong_FromUInt64(uint64_t value)
Return a new :c:type:`PyLongObject` object from an unsigned C
:c:expr:`uint32_t` or :c:expr:`uint64_t`, or ``NULL``
with an exception set on failure.
.. versionadded:: 3.14
.. c:function:: PyObject* PyLong_FromDouble(double v)
Return a new :c:type:`PyLongObject` object from the integer part of *v*, or
@ -94,9 +114,9 @@ distinguished from a number. Use :c:func:`PyErr_Occurred` to disambiguate.
ignored. If there are no digits or *str* is not NULL-terminated following the
digits and trailing whitespace, :exc:`ValueError` will be raised.
.. seealso:: Python methods :meth:`int.to_bytes` and :meth:`int.from_bytes`
to convert a :c:type:`PyLongObject` to/from an array of bytes in base
``256``. You can call those from C using :c:func:`PyObject_CallMethod`.
.. seealso:: :c:func:`PyLong_AsNativeBytes()` and
:c:func:`PyLong_FromNativeBytes()` functions can be used to convert
a :c:type:`PyLongObject` to/from an array of bytes in base ``256``.
.. c:function:: PyObject* PyLong_FromUnicodeObject(PyObject *u, int base)
@ -113,11 +133,36 @@ distinguished from a number. Use :c:func:`PyErr_Occurred` to disambiguate.
retrieved from the resulting value using :c:func:`PyLong_AsVoidPtr`.
.. XXX alias PyLong_AS_LONG (for now)
.. c:function:: PyObject* PyLong_FromNativeBytes(const void* buffer, size_t n_bytes, int flags)
Create a Python integer from the value contained in the first *n_bytes* of
*buffer*, interpreted as a two's-complement signed number.
*flags* are as for :c:func:`PyLong_AsNativeBytes`. Passing ``-1`` will select
the native endian that CPython was compiled with and assume that the
most-significant bit is a sign bit. Passing
``Py_ASNATIVEBYTES_UNSIGNED_BUFFER`` will produce the same result as calling
:c:func:`PyLong_FromUnsignedNativeBytes`. Other flags are ignored.
.. versionadded:: 3.13
.. c:function:: PyObject* PyLong_FromUnsignedNativeBytes(const void* buffer, size_t n_bytes, int flags)
Create a Python integer from the value contained in the first *n_bytes* of
*buffer*, interpreted as an unsigned number.
*flags* are as for :c:func:`PyLong_AsNativeBytes`. Passing ``-1`` will select
the native endian that CPython was compiled with and assume that the
most-significant bit is not a sign bit. Flags other than endian are ignored.
.. versionadded:: 3.13
.. c:function:: long PyLong_AsLong(PyObject *obj)
.. index::
single: LONG_MAX
single: LONG_MAX (C macro)
single: OverflowError (built-in exception)
Return a C :c:expr:`long` representation of *obj*. If *obj* is not an
@ -135,6 +180,16 @@ distinguished from a number. Use :c:func:`PyErr_Occurred` to disambiguate.
.. versionchanged:: 3.10
This function will no longer use :meth:`~object.__int__`.
.. c:namespace:: NULL
.. c:function:: long PyLong_AS_LONG(PyObject *obj)
A :term:`soft deprecated` alias.
Exactly equivalent to the preferred ``PyLong_AsLong``. In particular,
it can fail with :exc:`OverflowError` or another exception.
.. deprecated:: 3.14
The function is soft deprecated.
.. c:function:: int PyLong_AsInt(PyObject *obj)
@ -210,7 +265,7 @@ distinguished from a number. Use :c:func:`PyErr_Occurred` to disambiguate.
.. c:function:: Py_ssize_t PyLong_AsSsize_t(PyObject *pylong)
.. index::
single: PY_SSIZE_T_MAX
single: PY_SSIZE_T_MAX (C macro)
single: OverflowError (built-in exception)
Return a C :c:type:`Py_ssize_t` representation of *pylong*. *pylong* must
@ -225,7 +280,7 @@ distinguished from a number. Use :c:func:`PyErr_Occurred` to disambiguate.
.. c:function:: unsigned long PyLong_AsUnsignedLong(PyObject *pylong)
.. index::
single: ULONG_MAX
single: ULONG_MAX (C macro)
single: OverflowError (built-in exception)
Return a C :c:expr:`unsigned long` representation of *pylong*. *pylong*
@ -241,7 +296,7 @@ distinguished from a number. Use :c:func:`PyErr_Occurred` to disambiguate.
.. c:function:: size_t PyLong_AsSize_t(PyObject *pylong)
.. index::
single: SIZE_MAX
single: SIZE_MAX (C macro)
single: OverflowError (built-in exception)
Return a C :c:type:`size_t` representation of *pylong*. *pylong* must be
@ -311,6 +366,43 @@ distinguished from a number. Use :c:func:`PyErr_Occurred` to disambiguate.
This function will no longer use :meth:`~object.__int__`.
.. c:function:: int PyLong_AsInt32(PyObject *obj, int32_t *value)
int PyLong_AsInt64(PyObject *obj, int64_t *value)
Set *\*value* to a signed C :c:expr:`int32_t` or :c:expr:`int64_t`
representation of *obj*.
If the *obj* value is out of range, raise an :exc:`OverflowError`.
Set *\*value* and return ``0`` on success.
Set an exception and return ``-1`` on error.
*value* must not be ``NULL``.
.. versionadded:: 3.14
.. c:function:: int PyLong_AsUInt32(PyObject *obj, uint32_t *value)
int PyLong_AsUInt64(PyObject *obj, uint64_t *value)
Set *\*value* to an unsigned C :c:expr:`uint32_t` or :c:expr:`uint64_t`
representation of *obj*.
If *obj* is not an instance of :c:type:`PyLongObject`, first call its
:meth:`~object.__index__` method (if present) to convert it to a
:c:type:`PyLongObject`.
* If *obj* is negative, raise a :exc:`ValueError`.
* If the *obj* value is out of range, raise an :exc:`OverflowError`.
Set *\*value* and return ``0`` on success.
Set an exception and return ``-1`` on error.
*value* must not be ``NULL``.
.. versionadded:: 3.14
.. c:function:: double PyLong_AsDouble(PyObject *pylong)
Return a C :c:expr:`double` representation of *pylong*. *pylong* must be
@ -332,6 +424,208 @@ distinguished from a number. Use :c:func:`PyErr_Occurred` to disambiguate.
Returns ``NULL`` on error. Use :c:func:`PyErr_Occurred` to disambiguate.
.. c:function:: Py_ssize_t PyLong_AsNativeBytes(PyObject *pylong, void* buffer, Py_ssize_t n_bytes, int flags)
Copy the Python integer value *pylong* to a native *buffer* of size
*n_bytes*. The *flags* can be set to ``-1`` to behave similarly to a C cast,
or to values documented below to control the behavior.
Returns ``-1`` with an exception raised on error. This may happen if
*pylong* cannot be interpreted as an integer, or if *pylong* was negative
and the ``Py_ASNATIVEBYTES_REJECT_NEGATIVE`` flag was set.
Otherwise, returns the number of bytes required to store the value.
If this is equal to or less than *n_bytes*, the entire value was copied.
All *n_bytes* of the buffer are written: large buffers are padded with
zeroes.
If the returned value is greater than than *n_bytes*, the value was
truncated: as many of the lowest bits of the value as could fit are written,
and the higher bits are ignored. This matches the typical behavior
of a C-style downcast.
.. note::
Overflow is not considered an error. If the returned value
is larger than *n_bytes*, most significant bits were discarded.
``0`` will never be returned.
Values are always copied as two's-complement.
Usage example::
int32_t value;
Py_ssize_t bytes = PyLong_AsNativeBytes(pylong, &value, sizeof(value), -1);
if (bytes < 0) {
// Failed. A Python exception was set with the reason.
return NULL;
}
else if (bytes <= (Py_ssize_t)sizeof(value)) {
// Success!
}
else {
// Overflow occurred, but 'value' contains the truncated
// lowest bits of pylong.
}
Passing zero to *n_bytes* will return the size of a buffer that would
be large enough to hold the value. This may be larger than technically
necessary, but not unreasonably so. If *n_bytes=0*, *buffer* may be
``NULL``.
.. note::
Passing *n_bytes=0* to this function is not an accurate way to determine
the bit length of the value.
To get at the entire Python value of an unknown size, the function can be
called twice: first to determine the buffer size, then to fill it::
// Ask how much space we need.
Py_ssize_t expected = PyLong_AsNativeBytes(pylong, NULL, 0, -1);
if (expected < 0) {
// Failed. A Python exception was set with the reason.
return NULL;
}
assert(expected != 0); // Impossible per the API definition.
uint8_t *bignum = malloc(expected);
if (!bignum) {
PyErr_SetString(PyExc_MemoryError, "bignum malloc failed.");
return NULL;
}
// Safely get the entire value.
Py_ssize_t bytes = PyLong_AsNativeBytes(pylong, bignum, expected, -1);
if (bytes < 0) { // Exception has been set.
free(bignum);
return NULL;
}
else if (bytes > expected) { // This should not be possible.
PyErr_SetString(PyExc_RuntimeError,
"Unexpected bignum truncation after a size check.");
free(bignum);
return NULL;
}
// The expected success given the above pre-check.
// ... use bignum ...
free(bignum);
*flags* is either ``-1`` (``Py_ASNATIVEBYTES_DEFAULTS``) to select defaults
that behave most like a C cast, or a combination of the other flags in
the table below.
Note that ``-1`` cannot be combined with other flags.
Currently, ``-1`` corresponds to
``Py_ASNATIVEBYTES_NATIVE_ENDIAN | Py_ASNATIVEBYTES_UNSIGNED_BUFFER``.
.. c:namespace:: NULL
============================================= ======
Flag Value
============================================= ======
.. c:macro:: Py_ASNATIVEBYTES_DEFAULTS ``-1``
.. c:macro:: Py_ASNATIVEBYTES_BIG_ENDIAN ``0``
.. c:macro:: Py_ASNATIVEBYTES_LITTLE_ENDIAN ``1``
.. c:macro:: Py_ASNATIVEBYTES_NATIVE_ENDIAN ``3``
.. c:macro:: Py_ASNATIVEBYTES_UNSIGNED_BUFFER ``4``
.. c:macro:: Py_ASNATIVEBYTES_REJECT_NEGATIVE ``8``
.. c:macro:: Py_ASNATIVEBYTES_ALLOW_INDEX ``16``
============================================= ======
Specifying ``Py_ASNATIVEBYTES_NATIVE_ENDIAN`` will override any other endian
flags. Passing ``2`` is reserved.
By default, sufficient buffer will be requested to include a sign bit.
For example, when converting 128 with *n_bytes=1*, the function will return
2 (or more) in order to store a zero sign bit.
If ``Py_ASNATIVEBYTES_UNSIGNED_BUFFER`` is specified, a zero sign bit
will be omitted from size calculations. This allows, for example, 128 to fit
in a single-byte buffer. If the destination buffer is later treated as
signed, a positive input value may become negative.
Note that the flag does not affect handling of negative values: for those,
space for a sign bit is always requested.
Specifying ``Py_ASNATIVEBYTES_REJECT_NEGATIVE`` causes an exception to be set
if *pylong* is negative. Without this flag, negative values will be copied
provided there is enough space for at least one sign bit, regardless of
whether ``Py_ASNATIVEBYTES_UNSIGNED_BUFFER`` was specified.
If ``Py_ASNATIVEBYTES_ALLOW_INDEX`` is specified and a non-integer value is
passed, its :meth:`~object.__index__` method will be called first. This may
result in Python code executing and other threads being allowed to run, which
could cause changes to other objects or values in use. When *flags* is
``-1``, this option is not set, and non-integer values will raise
:exc:`TypeError`.
.. note::
With the default *flags* (``-1``, or *UNSIGNED_BUFFER* without
*REJECT_NEGATIVE*), multiple Python integers can map to a single value
without overflow. For example, both ``255`` and ``-1`` fit a single-byte
buffer and set all its bits.
This matches typical C cast behavior.
.. versionadded:: 3.13
.. c:function:: int PyLong_GetSign(PyObject *obj, int *sign)
Get the sign of the integer object *obj*.
On success, set *\*sign* to the integer sign (0, -1 or +1 for zero, negative or
positive integer, respectively) and return 0.
On failure, return -1 with an exception set. This function always succeeds
if *obj* is a :c:type:`PyLongObject` or its subtype.
.. versionadded:: 3.14
.. c:function:: int PyLong_IsPositive(PyObject *obj)
Check if the integer object *obj* is positive (``obj > 0``).
If *obj* is an instance of :c:type:`PyLongObject` or its subtype,
return ``1`` when it's positive and ``0`` otherwise. Else set an
exception and return ``-1``.
.. versionadded:: 3.14
.. c:function:: int PyLong_IsNegative(PyObject *obj)
Check if the integer object *obj* is negative (``obj < 0``).
If *obj* is an instance of :c:type:`PyLongObject` or its subtype,
return ``1`` when it's negative and ``0`` otherwise. Else set an
exception and return ``-1``.
.. versionadded:: 3.14
.. c:function:: int PyLong_IsZero(PyObject *obj)
Check if the integer object *obj* is zero.
If *obj* is an instance of :c:type:`PyLongObject` or its subtype,
return ``1`` when it's zero and ``0`` otherwise. Else set an
exception and return ``-1``.
.. versionadded:: 3.14
.. c:function:: PyObject* PyLong_GetInfo(void)
On success, return a read only :term:`named tuple`, that holds
information about Python's internal representation of integers.
See :data:`sys.int_info` for description of individual fields.
On failure, return ``NULL`` with an exception set.
.. versionadded:: 3.1
.. c:function:: int PyUnstable_Long_IsCompact(const PyLongObject* op)
Return 1 if *op* is compact, 0 otherwise.
@ -340,13 +634,16 @@ distinguished from a number. Use :c:func:`PyErr_Occurred` to disambiguate.
a “fast path” for small integers. For compact values use
:c:func:`PyUnstable_Long_CompactValue`; for others fall back to a
:c:func:`PyLong_As* <PyLong_AsSize_t>` function or
:c:func:`calling <PyObject_CallMethod>` :meth:`int.to_bytes`.
:c:func:`PyLong_AsNativeBytes`.
The speedup is expected to be negligible for most users.
Exactly what values are considered compact is an implementation detail
and is subject to change.
.. versionadded:: 3.12
.. c:function:: Py_ssize_t PyUnstable_Long_CompactValue(const PyLongObject* op)
If *op* is compact, as determined by :c:func:`PyUnstable_Long_IsCompact`,
@ -354,3 +651,179 @@ distinguished from a number. Use :c:func:`PyErr_Occurred` to disambiguate.
Otherwise, the return value is undefined.
.. versionadded:: 3.12
Export API
^^^^^^^^^^
.. versionadded:: 3.14
.. c:struct:: PyLongLayout
Layout of an array of "digits" ("limbs" in the GMP terminology), used to
represent absolute value for arbitrary precision integers.
Use :c:func:`PyLong_GetNativeLayout` to get the native layout of Python
:class:`int` objects, used internally for integers with "big enough"
absolute value.
See also :data:`sys.int_info` which exposes similar information in Python.
.. c:member:: uint8_t bits_per_digit
Bits per digit. For example, a 15 bit digit means that bits 0-14 contain
meaningful information.
.. c:member:: uint8_t digit_size
Digit size in bytes. For example, a 15 bit digit will require at least 2
bytes.
.. c:member:: int8_t digits_order
Digits order:
- ``1`` for most significant digit first
- ``-1`` for least significant digit first
.. c:member:: int8_t digit_endianness
Digit endianness:
- ``1`` for most significant byte first (big endian)
- ``-1`` for least significant byte first (little endian)
.. c:function:: const PyLongLayout* PyLong_GetNativeLayout(void)
Get the native layout of Python :class:`int` objects.
See the :c:struct:`PyLongLayout` structure.
The function must not be called before Python initialization nor after
Python finalization. The returned layout is valid until Python is
finalized. The layout is the same for all Python sub-interpreters
in a process, and so it can be cached.
.. c:struct:: PyLongExport
Export of a Python :class:`int` object.
There are two cases:
* If :c:member:`digits` is ``NULL``, only use the :c:member:`value` member.
* If :c:member:`digits` is not ``NULL``, use :c:member:`negative`,
:c:member:`ndigits` and :c:member:`digits` members.
.. c:member:: int64_t value
The native integer value of the exported :class:`int` object.
Only valid if :c:member:`digits` is ``NULL``.
.. c:member:: uint8_t negative
``1`` if the number is negative, ``0`` otherwise.
Only valid if :c:member:`digits` is not ``NULL``.
.. c:member:: Py_ssize_t ndigits
Number of digits in :c:member:`digits` array.
Only valid if :c:member:`digits` is not ``NULL``.
.. c:member:: const void *digits
Read-only array of unsigned digits. Can be ``NULL``.
.. c:function:: int PyLong_Export(PyObject *obj, PyLongExport *export_long)
Export a Python :class:`int` object.
*export_long* must point to a :c:struct:`PyLongExport` structure allocated
by the caller. It must not be ``NULL``.
On success, fill in *\*export_long* and return ``0``.
On error, set an exception and return ``-1``.
:c:func:`PyLong_FreeExport` must be called when the export is no longer
needed.
.. impl-detail::
This function always succeeds if *obj* is a Python :class:`int` object
or a subclass.
.. c:function:: void PyLong_FreeExport(PyLongExport *export_long)
Release the export *export_long* created by :c:func:`PyLong_Export`.
.. impl-detail::
Calling :c:func:`PyLong_FreeExport` is optional if *export_long->digits*
is ``NULL``.
PyLongWriter API
^^^^^^^^^^^^^^^^
The :c:type:`PyLongWriter` API can be used to import an integer.
.. versionadded:: 3.14
.. c:struct:: PyLongWriter
A Python :class:`int` writer instance.
The instance must be destroyed by :c:func:`PyLongWriter_Finish` or
:c:func:`PyLongWriter_Discard`.
.. c:function:: PyLongWriter* PyLongWriter_Create(int negative, Py_ssize_t ndigits, void **digits)
Create a :c:type:`PyLongWriter`.
On success, allocate *\*digits* and return a writer.
On error, set an exception and return ``NULL``.
*negative* is ``1`` if the number is negative, or ``0`` otherwise.
*ndigits* is the number of digits in the *digits* array. It must be
greater than 0.
*digits* must not be NULL.
After a successful call to this function, the caller should fill in the
array of digits *digits* and then call :c:func:`PyLongWriter_Finish` to get
a Python :class:`int`.
The layout of *digits* is described by :c:func:`PyLong_GetNativeLayout`.
Digits must be in the range [``0``; ``(1 << bits_per_digit) - 1``]
(where the :c:struct:`~PyLongLayout.bits_per_digit` is the number of bits
per digit).
Any unused most significant digits must be set to ``0``.
Alternately, call :c:func:`PyLongWriter_Discard` to destroy the writer
instance without creating an :class:`~int` object.
.. c:function:: PyObject* PyLongWriter_Finish(PyLongWriter *writer)
Finish a :c:type:`PyLongWriter` created by :c:func:`PyLongWriter_Create`.
On success, return a Python :class:`int` object.
On error, set an exception and return ``NULL``.
The function takes care of normalizing the digits and converts the object
to a compact integer if needed.
The writer instance and the *digits* array are invalid after the call.
.. c:function:: void PyLongWriter_Discard(PyLongWriter *writer)
Discard a :c:type:`PyLongWriter` created by :c:func:`PyLongWriter_Create`.
If *writer* is ``NULL``, no operation is performed.
The writer instance and the *digits* array are invalid after the call.

View file

@ -13,11 +13,12 @@ binary mode.
Numeric values are stored with the least significant byte first.
The module supports two versions of the data format: version 0 is the
historical version, version 1 shares interned strings in the file, and upon
unmarshalling. Version 2 uses a binary format for floating point numbers.
``Py_MARSHAL_VERSION`` indicates the current file format (currently 2).
The module supports several versions of the data format; see
the :py:mod:`Python module documentation <marshal>` for details.
.. c:macro:: Py_MARSHAL_VERSION
The current format version. See :py:data:`marshal.version`.
.. c:function:: void PyMarshal_WriteLongToFile(long value, FILE *file, int version)

View file

@ -41,10 +41,10 @@ buffers is performed on demand by the Python memory manager through the Python/C
API functions listed in this document.
.. index::
single: malloc()
single: calloc()
single: realloc()
single: free()
single: malloc (C function)
single: calloc (C function)
single: realloc (C function)
single: free (C function)
To avoid memory corruption, extension writers should never try to operate on
Python objects with the functions exported by the C library: :c:func:`malloc`,
@ -102,37 +102,45 @@ All allocating functions belong to one of three different "domains" (see also
strategies and are optimized for different purposes. The specific details on
how every domain allocates memory or what internal functions each domain calls
is considered an implementation detail, but for debugging purposes a simplified
table can be found at :ref:`here <default-memory-allocators>`. There is no hard
requirement to use the memory returned by the allocation functions belonging to
a given domain for only the purposes hinted by that domain (although this is the
recommended practice). For example, one could use the memory returned by
:c:func:`PyMem_RawMalloc` for allocating Python objects or the memory returned
by :c:func:`PyObject_Malloc` for allocating memory for buffers.
table can be found at :ref:`here <default-memory-allocators>`.
The APIs used to allocate and free a block of memory must be from the same domain.
For example, :c:func:`PyMem_Free` must be used to free memory allocated using :c:func:`PyMem_Malloc`.
The three allocation domains are:
* Raw domain: intended for allocating memory for general-purpose memory
buffers where the allocation *must* go to the system allocator or where the
allocator can operate without the :term:`GIL`. The memory is requested directly
to the system.
allocator can operate without an :term:`attached thread state`. The memory
is requested directly from the system. See :ref:`Raw Memory Interface <raw-memoryinterface>`.
* "Mem" domain: intended for allocating memory for Python buffers and
general-purpose memory buffers where the allocation must be performed with
the :term:`GIL` held. The memory is taken from the Python private heap.
an :term:`attached thread state`. The memory is taken from the Python private heap.
See :ref:`Memory Interface <memoryinterface>`.
* Object domain: intended for allocating memory belonging to Python objects. The
memory is taken from the Python private heap.
* Object domain: intended for allocating memory for Python objects. The
memory is taken from the Python private heap. See :ref:`Object allocators <objectinterface>`.
When freeing memory previously allocated by the allocating functions belonging to a
given domain,the matching specific deallocating functions must be used. For example,
:c:func:`PyMem_Free` must be used to free memory allocated using :c:func:`PyMem_Malloc`.
.. note::
The :term:`free-threaded <free threading>` build requires that only Python objects are allocated using the "object" domain
and that all Python objects are allocated using that domain. This differs from the prior Python versions,
where this was only a best practice and not a hard requirement.
For example, buffers (non-Python objects) should be allocated using :c:func:`PyMem_Malloc`,
:c:func:`PyMem_RawMalloc`, or :c:func:`malloc`, but not :c:func:`PyObject_Malloc`.
See :ref:`Memory Allocation APIs <free-threaded-memory-allocation>`.
.. _raw-memoryinterface:
Raw Memory Interface
====================
The following function sets are wrappers to the system allocator. These
functions are thread-safe, the :term:`GIL <global interpreter lock>` does not
need to be held.
functions are thread-safe, so a :term:`thread state` does not
need to be :term:`attached <attached thread state>`.
The :ref:`default raw memory allocator <default-memory-allocators>` uses
the following functions: :c:func:`malloc`, :c:func:`calloc`, :c:func:`realloc`
@ -205,8 +213,7 @@ The :ref:`default memory allocator <default-memory-allocators>` uses the
.. warning::
The :term:`GIL <global interpreter lock>` must be held when using these
functions.
There must be an :term:`attached thread state` when using these functions.
.. versionchanged:: 3.6
@ -267,14 +274,14 @@ The following type-oriented macros are provided for convenience. Note that
.. c:macro:: PyMem_New(TYPE, n)
Same as :c:func:`PyMem_Malloc`, but allocates ``(n * sizeof(TYPE))`` bytes of
memory. Returns a pointer cast to :c:expr:`TYPE*`. The memory will not have
memory. Returns a pointer cast to ``TYPE*``. The memory will not have
been initialized in any way.
.. c:macro:: PyMem_Resize(p, TYPE, n)
Same as :c:func:`PyMem_Realloc`, but the memory block is resized to ``(n *
sizeof(TYPE))`` bytes. Returns a pointer cast to :c:expr:`TYPE*`. On return,
sizeof(TYPE))`` bytes. Returns a pointer cast to ``TYPE*``. On return,
*p* will be a pointer to the new memory area, or ``NULL`` in the event of
failure.
@ -299,6 +306,8 @@ versions and is therefore deprecated in extension modules.
* ``PyMem_DEL(ptr)``
.. _objectinterface:
Object allocators
=================
@ -317,8 +326,7 @@ The :ref:`default object allocator <default-memory-allocators>` uses the
.. warning::
The :term:`GIL <global interpreter lock>` must be held when using these
functions.
There must be an :term:`attached thread state` when using these functions.
.. c:function:: void* PyObject_Malloc(size_t n)
@ -475,12 +483,12 @@ Customize Memory Allocators
zero bytes.
For the :c:macro:`PYMEM_DOMAIN_RAW` domain, the allocator must be
thread-safe: the :term:`GIL <global interpreter lock>` is not held when the
allocator is called.
thread-safe: a :term:`thread state` is not :term:`attached <attached thread state>`
when the allocator is called.
For the remaining domains, the allocator must also be thread-safe:
the allocator may be called in different interpreters that do not
share a ``GIL``.
share a :term:`GIL`.
If the new allocator is not a hook (does not call the previous allocator),
the :c:func:`PyMem_SetupDebugHooks` function must be called to reinstall the
@ -497,8 +505,8 @@ Customize Memory Allocators
:c:func:`Py_InitializeFromConfig` to install a custom memory
allocator. There are no restrictions over the installed allocator
other than the ones imposed by the domain (for instance, the Raw
Domain allows the allocator to be called without the GIL held). See
:ref:`the section on allocator domains <allocator-domains>` for more
Domain allows the allocator to be called without an :term:`attached thread state`).
See :ref:`the section on allocator domains <allocator-domains>` for more
information.
* If called after Python has finish initializing (after
@ -545,7 +553,7 @@ Runtime checks:
called on a memory block allocated by :c:func:`PyMem_Malloc`.
- Detect write before the start of the buffer (buffer underflow).
- Detect write after the end of the buffer (buffer overflow).
- Check that the :term:`GIL <global interpreter lock>` is held when
- Check that there is an :term:`attached thread state` when
allocator functions of :c:macro:`PYMEM_DOMAIN_OBJ` (ex:
:c:func:`PyObject_Malloc`) and :c:macro:`PYMEM_DOMAIN_MEM` (ex:
:c:func:`PyMem_Malloc`) domains are called.
@ -610,8 +618,8 @@ PYMEM_CLEANBYTE (meaning uninitialized memory is getting used).
The :c:func:`PyMem_SetupDebugHooks` function now also works on Python
compiled in release mode. On error, the debug hooks now use
:mod:`tracemalloc` to get the traceback where a memory block was allocated.
The debug hooks now also check if the GIL is held when functions of
:c:macro:`PYMEM_DOMAIN_OBJ` and :c:macro:`PYMEM_DOMAIN_MEM` domains are
The debug hooks now also check if there is an :term:`attached thread state` when
functions of :c:macro:`PYMEM_DOMAIN_OBJ` and :c:macro:`PYMEM_DOMAIN_MEM` domains are
called.
.. versionchanged:: 3.8
@ -734,7 +742,7 @@ The same code using the type-oriented function set::
return PyErr_NoMemory();
/* ...Do some I/O operation involving buf... */
res = PyBytes_FromString(buf);
PyMem_Del(buf); /* allocated with PyMem_New */
PyMem_Free(buf); /* allocated with PyMem_New */
return res;
Note that in the two examples above, the buffer is always manipulated via
@ -750,11 +758,11 @@ allocators operating on different heaps. ::
...
PyMem_Del(buf3); /* Wrong -- should be PyMem_Free() */
free(buf2); /* Right -- allocated via malloc() */
free(buf1); /* Fatal -- should be PyMem_Del() */
free(buf1); /* Fatal -- should be PyMem_Free() */
In addition to the functions aimed at handling raw memory blocks from the Python
heap, objects in Python are allocated and released with :c:macro:`PyObject_New`,
:c:macro:`PyObject_NewVar` and :c:func:`PyObject_Del`.
:c:macro:`PyObject_NewVar` and :c:func:`PyObject_Free`.
These will be explained in the next chapter on defining and implementing new
object types in C.

View file

@ -20,6 +20,17 @@ any other object.
read/write, otherwise it may be either read-only or read/write at the
discretion of the exporter.
.. c:macro:: PyBUF_READ
Flag to request a readonly buffer.
.. c:macro:: PyBUF_WRITE
Flag to request a writable buffer.
.. c:function:: PyObject *PyMemoryView_FromMemory(char *mem, Py_ssize_t size, int flags)
Create a memoryview object using *mem* as the underlying buffer.
@ -41,6 +52,8 @@ any other object.
original memory. Otherwise, a copy is made and the memoryview points to a
new bytes object.
*buffertype* can be one of :c:macro:`PyBUF_READ` or :c:macro:`PyBUF_WRITE`.
.. c:function:: int PyMemoryView_Check(PyObject *obj)

View file

@ -37,16 +37,19 @@ Module Objects
single: __package__ (module attribute)
single: __loader__ (module attribute)
Return a new module object with the :attr:`__name__` attribute set to *name*.
The module's :attr:`__name__`, :attr:`__doc__`, :attr:`__package__`, and
:attr:`__loader__` attributes are filled in (all but :attr:`__name__` are set
to ``None``); the caller is responsible for providing a :attr:`__file__`
attribute.
Return a new module object with :attr:`module.__name__` set to *name*.
The module's :attr:`!__name__`, :attr:`~module.__doc__`,
:attr:`~module.__package__` and :attr:`~module.__loader__` attributes are
filled in (all but :attr:`!__name__` are set to ``None``). The caller is
responsible for setting a :attr:`~module.__file__` attribute.
Return ``NULL`` with an exception set on error.
.. versionadded:: 3.3
.. versionchanged:: 3.4
:attr:`__package__` and :attr:`__loader__` are set to ``None``.
:attr:`~module.__package__` and :attr:`~module.__loader__` are now set to
``None``.
.. c:function:: PyObject* PyModule_New(const char *name)
@ -75,8 +78,9 @@ Module Objects
single: __name__ (module attribute)
single: SystemError (built-in exception)
Return *module*'s :attr:`__name__` value. If the module does not provide one,
or if it is not a string, :exc:`SystemError` is raised and ``NULL`` is returned.
Return *module*'s :attr:`~module.__name__` value. If the module does not
provide one, or if it is not a string, :exc:`SystemError` is raised and
``NULL`` is returned.
.. versionadded:: 3.3
@ -106,8 +110,8 @@ Module Objects
single: SystemError (built-in exception)
Return the name of the file from which *module* was loaded using *module*'s
:attr:`__file__` attribute. If this is not defined, or if it is not a
unicode string, raise :exc:`SystemError` and return ``NULL``; otherwise return
:attr:`~module.__file__` attribute. If this is not defined, or if it is not a
string, raise :exc:`SystemError` and return ``NULL``; otherwise return
a reference to a Unicode object.
.. versionadded:: 3.2
@ -265,6 +269,8 @@ of the following two module creation functions:
API version *module_api_version*. If that version does not match the version
of the running interpreter, a :exc:`RuntimeWarning` is emitted.
Return ``NULL`` with an exception set on error.
.. note::
Most uses of this function should be using :c:func:`PyModule_Create`
@ -338,7 +344,8 @@ The available slot types are:
The *value* pointer of this slot must point to a function of the signature:
.. c:function:: PyObject* create_module(PyObject *spec, PyModuleDef *def)
:noindex:
:no-index-entry:
:no-contents-entry:
The function receives a :py:class:`~importlib.machinery.ModuleSpec`
instance, as defined in :PEP:`451`, and the module definition.
@ -373,7 +380,8 @@ The available slot types are:
The signature of the function is:
.. c:function:: int exec_module(PyObject* module)
:noindex:
:no-index-entry:
:no-contents-entry:
If multiple ``Py_mod_exec`` slots are specified, they are processed in the
order they appear in the *m_slots* array.
@ -407,10 +415,37 @@ The available slot types are:
in one module definition.
If ``Py_mod_multiple_interpreters`` is not specified, the import
machinery defaults to ``Py_MOD_MULTIPLE_INTERPRETERS_NOT_SUPPORTED``.
machinery defaults to ``Py_MOD_MULTIPLE_INTERPRETERS_SUPPORTED``.
.. versionadded:: 3.12
.. c:macro:: Py_mod_gil
Specifies one of the following values:
.. c:namespace:: NULL
.. c:macro:: Py_MOD_GIL_USED
The module depends on the presence of the global interpreter lock (GIL),
and may access global state without synchronization.
.. c:macro:: Py_MOD_GIL_NOT_USED
The module is safe to run without an active GIL.
This slot is ignored by Python builds not configured with
:option:`--disable-gil`. Otherwise, it determines whether or not importing
this module will cause the GIL to be automatically enabled. See
:ref:`whatsnew313-free-threaded-cpython` for more detail.
Multiple ``Py_mod_gil`` slots may not be specified in one module definition.
If ``Py_mod_gil`` is not specified, the import machinery defaults to
``Py_MOD_GIL_USED``.
.. versionadded:: 3.13
See :PEP:`489` for more details on multi-phase initialization.
Low-level module creation functions
@ -436,6 +471,8 @@ objects dynamically. Note that both ``PyModule_FromDefAndSpec`` and
If that version does not match the version of the running interpreter,
a :exc:`RuntimeWarning` is emitted.
Return ``NULL`` with an exception set on error.
.. note::
Most uses of this function should be using :c:func:`PyModule_FromDefAndSpec`
@ -486,9 +523,6 @@ state:
On success, return ``0``. On error, raise an exception and return ``-1``.
Return ``NULL`` if *value* is ``NULL``. It must be called with an exception
raised in this case.
Example usage::
static int
@ -503,6 +537,10 @@ state:
return res;
}
To be convenient, the function accepts ``NULL`` *value* with an exception
set. In this case, return ``-1`` and just leave the raised exception
unchanged.
The example can also be written without checking explicitly if *obj* is
``NULL``::
@ -518,6 +556,14 @@ state:
Note that ``Py_XDECREF()`` should be used instead of ``Py_DECREF()`` in
this case, since *obj* can be ``NULL``.
The number of different *name* strings passed to this function
should be kept small, usually by only using statically allocated strings
as *name*.
For names that aren't known at compile time, prefer calling
:c:func:`PyUnicode_FromString` and :c:func:`PyObject_SetAttr` directly.
For more details, see :c:func:`PyUnicode_InternFromString`, which may be
used internally to create a key object.
.. versionadded:: 3.10
@ -576,15 +622,23 @@ state:
.. c:function:: int PyModule_AddIntConstant(PyObject *module, const char *name, long value)
Add an integer constant to *module* as *name*. This convenience function can be
used from the module's initialization function. Return ``-1`` on error, ``0`` on
success.
used from the module's initialization function.
Return ``-1`` with an exception set on error, ``0`` on success.
This is a convenience function that calls :c:func:`PyLong_FromLong` and
:c:func:`PyModule_AddObjectRef`; see their documentation for details.
.. c:function:: int PyModule_AddStringConstant(PyObject *module, const char *name, const char *value)
Add a string constant to *module* as *name*. This convenience function can be
used from the module's initialization function. The string *value* must be
``NULL``-terminated. Return ``-1`` on error, ``0`` on success.
``NULL``-terminated.
Return ``-1`` with an exception set on error, ``0`` on success.
This is a convenience function that calls
:c:func:`PyUnicode_InternFromString` and :c:func:`PyModule_AddObjectRef`;
see their documentation for details.
.. c:macro:: PyModule_AddIntMacro(module, macro)
@ -592,7 +646,7 @@ state:
Add an int constant to *module*. The name and the value are taken from
*macro*. For example ``PyModule_AddIntMacro(module, AF_INET)`` adds the int
constant *AF_INET* with the value of *AF_INET* to *module*.
Return ``-1`` on error, ``0`` on success.
Return ``-1`` with an exception set on error, ``0`` on success.
.. c:macro:: PyModule_AddStringMacro(module, macro)
@ -605,10 +659,23 @@ state:
The type object is finalized by calling internally :c:func:`PyType_Ready`.
The name of the type object is taken from the last component of
:c:member:`~PyTypeObject.tp_name` after dot.
Return ``-1`` on error, ``0`` on success.
Return ``-1`` with an exception set on error, ``0`` on success.
.. versionadded:: 3.9
.. c:function:: int PyUnstable_Module_SetGIL(PyObject *module, void *gil)
Indicate that *module* does or does not support running without the global
interpreter lock (GIL), using one of the values from
:c:macro:`Py_mod_gil`. It must be called during *module*'s initialization
function. If this function is not called during module initialization, the
import machinery assumes the module does not support running without the
GIL. This function is only available in Python builds configured with
:option:`--disable-gil`.
Return ``-1`` with an exception set on error, ``0`` on success.
.. versionadded:: 3.13
Module lookup
^^^^^^^^^^^^^
@ -642,17 +709,17 @@ since multiple such modules can be created from a single definition.
mechanisms (either by calling it directly, or by referring to its
implementation for details of the required state updates).
The caller must hold the GIL.
The caller must have an :term:`attached thread state`.
Return 0 on success or -1 on failure.
Return ``-1`` with an exception set on error, ``0`` on success.
.. versionadded:: 3.3
.. c:function:: int PyState_RemoveModule(PyModuleDef *def)
Removes the module object created from *def* from the interpreter state.
Return 0 on success or -1 on failure.
Return ``-1`` with an exception set on error, ``0`` on success.
The caller must hold the GIL.
The caller must have an :term:`attached thread state`.
.. versionadded:: 3.3

210
Doc/c-api/monitoring.rst Normal file
View file

@ -0,0 +1,210 @@
.. highlight:: c
.. _c-api-monitoring:
Monitoring C API
================
Added in version 3.13.
An extension may need to interact with the event monitoring system. Subscribing
to events and registering callbacks can be done via the Python API exposed in
:mod:`sys.monitoring`.
Generating Execution Events
===========================
The functions below make it possible for an extension to fire monitoring
events as it emulates the execution of Python code. Each of these functions
accepts a ``PyMonitoringState`` struct which contains concise information
about the activation state of events, as well as the event arguments, which
include a ``PyObject*`` representing the code object, the instruction offset
and sometimes additional, event-specific arguments (see :mod:`sys.monitoring`
for details about the signatures of the different event callbacks).
The ``codelike`` argument should be an instance of :class:`types.CodeType`
or of a type that emulates it.
The VM disables tracing when firing an event, so there is no need for user
code to do that.
Monitoring functions should not be called with an exception set,
except those listed below as working with the current exception.
.. c:type:: PyMonitoringState
Representation of the state of an event type. It is allocated by the user
while its contents are maintained by the monitoring API functions described below.
All of the functions below return 0 on success and -1 (with an exception set) on error.
See :mod:`sys.monitoring` for descriptions of the events.
.. c:function:: int PyMonitoring_FirePyStartEvent(PyMonitoringState *state, PyObject *codelike, int32_t offset)
Fire a ``PY_START`` event.
.. c:function:: int PyMonitoring_FirePyResumeEvent(PyMonitoringState *state, PyObject *codelike, int32_t offset)
Fire a ``PY_RESUME`` event.
.. c:function:: int PyMonitoring_FirePyReturnEvent(PyMonitoringState *state, PyObject *codelike, int32_t offset, PyObject* retval)
Fire a ``PY_RETURN`` event.
.. c:function:: int PyMonitoring_FirePyYieldEvent(PyMonitoringState *state, PyObject *codelike, int32_t offset, PyObject* retval)
Fire a ``PY_YIELD`` event.
.. c:function:: int PyMonitoring_FireCallEvent(PyMonitoringState *state, PyObject *codelike, int32_t offset, PyObject* callable, PyObject *arg0)
Fire a ``CALL`` event.
.. c:function:: int PyMonitoring_FireLineEvent(PyMonitoringState *state, PyObject *codelike, int32_t offset, int lineno)
Fire a ``LINE`` event.
.. c:function:: int PyMonitoring_FireJumpEvent(PyMonitoringState *state, PyObject *codelike, int32_t offset, PyObject *target_offset)
Fire a ``JUMP`` event.
.. c:function:: int PyMonitoring_FireBranchLeftEvent(PyMonitoringState *state, PyObject *codelike, int32_t offset, PyObject *target_offset)
Fire a ``BRANCH_LEFT`` event.
.. c:function:: int PyMonitoring_FireBranchRightEvent(PyMonitoringState *state, PyObject *codelike, int32_t offset, PyObject *target_offset)
Fire a ``BRANCH_RIGHT`` event.
.. c:function:: int PyMonitoring_FireCReturnEvent(PyMonitoringState *state, PyObject *codelike, int32_t offset, PyObject *retval)
Fire a ``C_RETURN`` event.
.. c:function:: int PyMonitoring_FirePyThrowEvent(PyMonitoringState *state, PyObject *codelike, int32_t offset)
Fire a ``PY_THROW`` event with the current exception (as returned by
:c:func:`PyErr_GetRaisedException`).
.. c:function:: int PyMonitoring_FireRaiseEvent(PyMonitoringState *state, PyObject *codelike, int32_t offset)
Fire a ``RAISE`` event with the current exception (as returned by
:c:func:`PyErr_GetRaisedException`).
.. c:function:: int PyMonitoring_FireCRaiseEvent(PyMonitoringState *state, PyObject *codelike, int32_t offset)
Fire a ``C_RAISE`` event with the current exception (as returned by
:c:func:`PyErr_GetRaisedException`).
.. c:function:: int PyMonitoring_FireReraiseEvent(PyMonitoringState *state, PyObject *codelike, int32_t offset)
Fire a ``RERAISE`` event with the current exception (as returned by
:c:func:`PyErr_GetRaisedException`).
.. c:function:: int PyMonitoring_FireExceptionHandledEvent(PyMonitoringState *state, PyObject *codelike, int32_t offset)
Fire an ``EXCEPTION_HANDLED`` event with the current exception (as returned by
:c:func:`PyErr_GetRaisedException`).
.. c:function:: int PyMonitoring_FirePyUnwindEvent(PyMonitoringState *state, PyObject *codelike, int32_t offset)
Fire a ``PY_UNWIND`` event with the current exception (as returned by
:c:func:`PyErr_GetRaisedException`).
.. c:function:: int PyMonitoring_FireStopIterationEvent(PyMonitoringState *state, PyObject *codelike, int32_t offset, PyObject *value)
Fire a ``STOP_ITERATION`` event. If ``value`` is an instance of :exc:`StopIteration`, it is used. Otherwise,
a new :exc:`StopIteration` instance is created with ``value`` as its argument.
Managing the Monitoring State
-----------------------------
Monitoring states can be managed with the help of monitoring scopes. A scope
would typically correspond to a python function.
.. c:function:: int PyMonitoring_EnterScope(PyMonitoringState *state_array, uint64_t *version, const uint8_t *event_types, Py_ssize_t length)
Enter a monitored scope. ``event_types`` is an array of the event IDs for
events that may be fired from the scope. For example, the ID of a ``PY_START``
event is the value ``PY_MONITORING_EVENT_PY_START``, which is numerically equal
to the base-2 logarithm of ``sys.monitoring.events.PY_START``.
``state_array`` is an array with a monitoring state entry for each event in
``event_types``, it is allocated by the user but populated by
:c:func:`!PyMonitoring_EnterScope` with information about the activation state of
the event. The size of ``event_types`` (and hence also of ``state_array``)
is given in ``length``.
The ``version`` argument is a pointer to a value which should be allocated
by the user together with ``state_array`` and initialized to 0,
and then set only by :c:func:`!PyMonitoring_EnterScope` itself. It allows this
function to determine whether event states have changed since the previous call,
and to return quickly if they have not.
The scopes referred to here are lexical scopes: a function, class or method.
:c:func:`!PyMonitoring_EnterScope` should be called whenever the lexical scope is
entered. Scopes can be reentered, reusing the same *state_array* and *version*,
in situations like when emulating a recursive Python function. When a code-like's
execution is paused, such as when emulating a generator, the scope needs to
be exited and re-entered.
The macros for *event_types* are:
.. c:namespace:: NULL
.. The table is here to make the docs searchable, and to allow automatic
links to the identifiers.
================================================== =====================================
Macro Event
================================================== =====================================
.. c:macro:: PY_MONITORING_EVENT_BRANCH_LEFT :monitoring-event:`BRANCH_LEFT`
.. c:macro:: PY_MONITORING_EVENT_BRANCH_RIGHT :monitoring-event:`BRANCH_RIGHT`
.. c:macro:: PY_MONITORING_EVENT_CALL :monitoring-event:`CALL`
.. c:macro:: PY_MONITORING_EVENT_C_RAISE :monitoring-event:`C_RAISE`
.. c:macro:: PY_MONITORING_EVENT_C_RETURN :monitoring-event:`C_RETURN`
.. c:macro:: PY_MONITORING_EVENT_EXCEPTION_HANDLED :monitoring-event:`EXCEPTION_HANDLED`
.. c:macro:: PY_MONITORING_EVENT_INSTRUCTION :monitoring-event:`INSTRUCTION`
.. c:macro:: PY_MONITORING_EVENT_JUMP :monitoring-event:`JUMP`
.. c:macro:: PY_MONITORING_EVENT_LINE :monitoring-event:`LINE`
.. c:macro:: PY_MONITORING_EVENT_PY_RESUME :monitoring-event:`PY_RESUME`
.. c:macro:: PY_MONITORING_EVENT_PY_RETURN :monitoring-event:`PY_RETURN`
.. c:macro:: PY_MONITORING_EVENT_PY_START :monitoring-event:`PY_START`
.. c:macro:: PY_MONITORING_EVENT_PY_THROW :monitoring-event:`PY_THROW`
.. c:macro:: PY_MONITORING_EVENT_PY_UNWIND :monitoring-event:`PY_UNWIND`
.. c:macro:: PY_MONITORING_EVENT_PY_YIELD :monitoring-event:`PY_YIELD`
.. c:macro:: PY_MONITORING_EVENT_RAISE :monitoring-event:`RAISE`
.. c:macro:: PY_MONITORING_EVENT_RERAISE :monitoring-event:`RERAISE`
.. c:macro:: PY_MONITORING_EVENT_STOP_ITERATION :monitoring-event:`STOP_ITERATION`
================================================== =====================================
.. c:function:: int PyMonitoring_ExitScope(void)
Exit the last scope that was entered with :c:func:`!PyMonitoring_EnterScope`.
.. c:function:: int PY_MONITORING_IS_INSTRUMENTED_EVENT(uint8_t ev)
Return true if the event corresponding to the event ID *ev* is
a :ref:`local event <monitoring-event-local>`.
.. versionadded:: 3.13
.. deprecated:: 3.14
This function is :term:`soft deprecated`.

View file

@ -51,8 +51,8 @@ Number Protocol
Return a reasonable approximation for the mathematical value of *o1* divided by
*o2*, or ``NULL`` on failure. The return value is "approximate" because binary
floating point numbers are approximate; it is not possible to represent all real
numbers in base two. This function can return a floating point value when
floating-point numbers are approximate; it is not possible to represent all real
numbers in base two. This function can return a floating-point value when
passed two integers. This is the equivalent of the Python expression ``o1 / o2``.
@ -177,8 +177,8 @@ Number Protocol
Return a reasonable approximation for the mathematical value of *o1* divided by
*o2*, or ``NULL`` on failure. The return value is "approximate" because binary
floating point numbers are approximate; it is not possible to represent all real
numbers in base two. This function can return a floating point value when
floating-point numbers are approximate; it is not possible to represent all real
numbers in base two. This function can return a floating-point value when
passed two integers. The operation is done *in-place* when *o1* supports it.
This is the equivalent of the Python statement ``o1 /= o2``.

View file

@ -6,6 +6,56 @@ Object Protocol
===============
.. c:function:: PyObject* Py_GetConstant(unsigned int constant_id)
Get a :term:`strong reference` to a constant.
Set an exception and return ``NULL`` if *constant_id* is invalid.
*constant_id* must be one of these constant identifiers:
.. c:namespace:: NULL
======================================== ===== =========================
Constant Identifier Value Returned object
======================================== ===== =========================
.. c:macro:: Py_CONSTANT_NONE ``0`` :py:data:`None`
.. c:macro:: Py_CONSTANT_FALSE ``1`` :py:data:`False`
.. c:macro:: Py_CONSTANT_TRUE ``2`` :py:data:`True`
.. c:macro:: Py_CONSTANT_ELLIPSIS ``3`` :py:data:`Ellipsis`
.. c:macro:: Py_CONSTANT_NOT_IMPLEMENTED ``4`` :py:data:`NotImplemented`
.. c:macro:: Py_CONSTANT_ZERO ``5`` ``0``
.. c:macro:: Py_CONSTANT_ONE ``6`` ``1``
.. c:macro:: Py_CONSTANT_EMPTY_STR ``7`` ``''``
.. c:macro:: Py_CONSTANT_EMPTY_BYTES ``8`` ``b''``
.. c:macro:: Py_CONSTANT_EMPTY_TUPLE ``9`` ``()``
======================================== ===== =========================
Numeric values are only given for projects which cannot use the constant
identifiers.
.. versionadded:: 3.13
.. impl-detail::
In CPython, all of these constants are :term:`immortal`.
.. c:function:: PyObject* Py_GetConstantBorrowed(unsigned int constant_id)
Similar to :c:func:`Py_GetConstant`, but return a :term:`borrowed
reference`.
This function is primarily intended for backwards compatibility:
using :c:func:`Py_GetConstant` is recommended for new code.
The reference is borrowed from the interpreter, and is valid until the
interpreter finalization.
.. versionadded:: 3.13
.. c:var:: PyObject* Py_NotImplemented
The ``NotImplemented`` singleton, used to signal that an operation is
@ -16,7 +66,15 @@ Object Protocol
Properly handle returning :c:data:`Py_NotImplemented` from within a C
function (that is, create a new :term:`strong reference`
to NotImplemented and return it).
to :const:`NotImplemented` and return it).
.. c:macro:: Py_PRINT_RAW
Flag to be used with multiple functions that print the object (like
:c:func:`PyObject_Print` and :c:func:`PyFile_WriteObject`).
If passed, these function would use the :func:`str` of the object
instead of the :func:`repr`.
.. c:function:: int PyObject_Print(PyObject *o, FILE *fp, int flags)
@ -27,7 +85,7 @@ Object Protocol
instead of the :func:`repr`.
.. c:function:: int PyObject_HasAttrWithError(PyObject *o, const char *attr_name)
.. c:function:: int PyObject_HasAttrWithError(PyObject *o, PyObject *attr_name)
Returns ``1`` if *o* has the attribute *attr_name*, and ``0`` otherwise.
This is equivalent to the Python expression ``hasattr(o, attr_name)``.
@ -47,14 +105,14 @@ Object Protocol
.. c:function:: int PyObject_HasAttr(PyObject *o, PyObject *attr_name)
Returns ``1`` if *o* has the attribute *attr_name*, and ``0`` otherwise. This
is equivalent to the Python expression ``hasattr(o, attr_name)``. This function
always succeeds.
Returns ``1`` if *o* has the attribute *attr_name*, and ``0`` otherwise.
This function always succeeds.
.. note::
Exceptions that occur when this calls :meth:`~object.__getattr__` and
:meth:`~object.__getattribute__` methods are silently ignored.
:meth:`~object.__getattribute__` methods aren't propagated,
but instead given to :func:`sys.unraisablehook`.
For proper error handling, use :c:func:`PyObject_HasAttrWithError`,
:c:func:`PyObject_GetOptionalAttr` or :c:func:`PyObject_GetAttr` instead.
@ -149,6 +207,13 @@ Object Protocol
If *v* is ``NULL``, the attribute is deleted, but this feature is
deprecated in favour of using :c:func:`PyObject_DelAttrString`.
The number of different attribute names passed to this function
should be kept small, usually by using a statically allocated string
as *attr_name*.
For attribute names that aren't known at compile time, prefer calling
:c:func:`PyUnicode_FromString` and :c:func:`PyObject_SetAttr` directly.
For more details, see :c:func:`PyUnicode_InternFromString`, which may be
used internally to create a key object.
.. c:function:: int PyObject_GenericSetAttr(PyObject *o, PyObject *name, PyObject *value)
@ -174,6 +239,14 @@ Object Protocol
specified as a :c:expr:`const char*` UTF-8 encoded bytes string,
rather than a :c:expr:`PyObject*`.
The number of different attribute names passed to this function
should be kept small, usually by using a statically allocated string
as *attr_name*.
For attribute names that aren't known at compile time, prefer calling
:c:func:`PyUnicode_FromString` and :c:func:`PyObject_DelAttr` directly.
For more details, see :c:func:`PyUnicode_InternFromString`, which may be
used internally to create a key object for lookup.
.. c:function:: PyObject* PyObject_GenericGetDict(PyObject *o, void *context)
@ -222,12 +295,8 @@ Object Protocol
.. c:function:: int PyObject_RichCompareBool(PyObject *o1, PyObject *o2, int opid)
Compare the values of *o1* and *o2* using the operation specified by *opid*,
which must be one of :c:macro:`Py_LT`, :c:macro:`Py_LE`, :c:macro:`Py_EQ`,
:c:macro:`Py_NE`, :c:macro:`Py_GT`, or :c:macro:`Py_GE`, corresponding to ``<``,
``<=``, ``==``, ``!=``, ``>``, or ``>=`` respectively. Returns ``-1`` on error,
``0`` if the result is false, ``1`` otherwise. This is the equivalent of the
Python expression ``o1 op o2``, where ``op`` is the operator corresponding to
*opid*.
like :c:func:`PyObject_RichCompare`, but returns ``-1`` on error, ``0`` if
the result is false, ``1`` otherwise.
.. note::
If *o1* and *o2* are the same object, :c:func:`PyObject_RichCompareBool`
@ -299,14 +368,14 @@ Object Protocol
The result will be ``1`` when at least one of the checks returns ``1``,
otherwise it will be ``0``.
If *cls* has a :meth:`~class.__subclasscheck__` method, it will be called to
If *cls* has a :meth:`~type.__subclasscheck__` method, it will be called to
determine the subclass status as described in :pep:`3119`. Otherwise,
*derived* is a subclass of *cls* if it is a direct or indirect subclass,
i.e. contained in ``cls.__mro__``.
i.e. contained in :attr:`cls.__mro__ <type.__mro__>`.
Normally only class objects, i.e. instances of :class:`type` or a derived
class, are considered classes. However, objects can override this by having
a :attr:`~class.__bases__` attribute (which must be a tuple of base classes).
a :attr:`~type.__bases__` attribute (which must be a tuple of base classes).
.. c:function:: int PyObject_IsInstance(PyObject *inst, PyObject *cls)
@ -318,15 +387,15 @@ Object Protocol
The result will be ``1`` when at least one of the checks returns ``1``,
otherwise it will be ``0``.
If *cls* has a :meth:`~class.__instancecheck__` method, it will be called to
If *cls* has a :meth:`~type.__instancecheck__` method, it will be called to
determine the subclass status as described in :pep:`3119`. Otherwise, *inst*
is an instance of *cls* if its class is a subclass of *cls*.
An instance *inst* can override what is considered its class by having a
:attr:`~instance.__class__` attribute.
:attr:`~object.__class__` attribute.
An object *cls* can override if it is considered a class, and what its base
classes are, by having a :attr:`~class.__bases__` attribute (which must be a tuple
classes are, by having a :attr:`~type.__bases__` attribute (which must be a tuple
of base classes).
@ -424,6 +493,13 @@ Object Protocol
on failure. This is equivalent to the Python statement ``del o[key]``.
.. c:function:: int PyObject_DelItemString(PyObject *o, const char *key)
This is the same as :c:func:`PyObject_DelItem`, but *key* is
specified as a :c:expr:`const char*` UTF-8 encoded bytes string,
rather than a :c:expr:`PyObject*`.
.. c:function:: PyObject* PyObject_Dir(PyObject *o)
This is equivalent to the Python expression ``dir(o)``, returning a (possibly
@ -441,6 +517,12 @@ Object Protocol
iterated.
.. c:function:: PyObject* PyObject_SelfIter(PyObject *obj)
This is equivalent to the Python ``__iter__(self): return self`` method.
It is intended for :term:`iterator` types, to be used in the :c:member:`PyTypeObject.tp_iter` slot.
.. c:function:: PyObject* PyObject_GetAIter(PyObject *o)
This is the equivalent to the Python expression ``aiter(o)``. Takes an
@ -507,3 +589,119 @@ Object Protocol
has the :c:macro:`Py_TPFLAGS_MANAGED_DICT` flag set.
.. versionadded:: 3.13
.. c:function:: int PyUnstable_Object_EnableDeferredRefcount(PyObject *obj)
Enable `deferred reference counting <https://peps.python.org/pep-0703/#deferred-reference-counting>`_ on *obj*,
if supported by the runtime. In the :term:`free-threaded <free threading>` build,
this allows the interpreter to avoid reference count adjustments to *obj*,
which may improve multi-threaded performance. The tradeoff is
that *obj* will only be deallocated by the tracing garbage collector.
This function returns ``1`` if deferred reference counting is enabled on *obj*
(including when it was enabled before the call),
and ``0`` if deferred reference counting is not supported or if the hint was
ignored by the runtime. This function is thread-safe, and cannot fail.
This function does nothing on builds with the :term:`GIL` enabled, which do
not support deferred reference counting. This also does nothing if *obj* is not
an object tracked by the garbage collector (see :func:`gc.is_tracked` and
:c:func:`PyObject_GC_IsTracked`).
This function is intended to be used soon after *obj* is created,
by the code that creates it.
.. versionadded:: 3.14
.. c:function:: int PyUnstable_IsImmortal(PyObject *obj)
This function returns non-zero if *obj* is :term:`immortal`, and zero
otherwise. This function cannot fail.
.. note::
Objects that are immortal in one CPython version are not guaranteed to
be immortal in another.
.. versionadded:: 3.14
.. c:function:: int PyUnstable_TryIncRef(PyObject *obj)
Increments the reference count of *obj* if it is not zero. Returns ``1``
if the object's reference count was successfully incremented. Otherwise,
this function returns ``0``.
:c:func:`PyUnstable_EnableTryIncRef` must have been called
earlier on *obj* or this function may spuriously return ``0`` in the
:term:`free threading` build.
This function is logically equivalent to the following C code, except that
it behaves atomically in the :term:`free threading` build::
if (Py_REFCNT(op) > 0) {
Py_INCREF(op);
return 1;
}
return 0;
This is intended as a building block for managing weak references
without the overhead of a Python :ref:`weak reference object <weakrefobjects>`.
Typically, correct use of this function requires support from *obj*'s
deallocator (:c:member:`~PyTypeObject.tp_dealloc`).
For example, the following sketch could be adapted to implement a
"weakmap" that works like a :py:class:`~weakref.WeakValueDictionary`
for a specific type:
.. code-block:: c
PyMutex mutex;
PyObject *
add_entry(weakmap_key_type *key, PyObject *value)
{
PyUnstable_EnableTryIncRef(value);
weakmap_type weakmap = ...;
PyMutex_Lock(&mutex);
weakmap_add_entry(weakmap, key, value);
PyMutex_Unlock(&mutex);
Py_RETURN_NONE;
}
PyObject *
get_value(weakmap_key_type *key)
{
weakmap_type weakmap = ...;
PyMutex_Lock(&mutex);
PyObject *result = weakmap_find(weakmap, key);
if (PyUnstable_TryIncRef(result)) {
// `result` is safe to use
PyMutex_Unlock(&mutex);
return result;
}
// if we get here, `result` is starting to be garbage-collected,
// but has not been removed from the weakmap yet
PyMutex_Unlock(&mutex);
return NULL;
}
// tp_dealloc function for weakmap values
void
value_dealloc(PyObject *value)
{
weakmap_type weakmap = ...;
PyMutex_Lock(&mutex);
weakmap_remove_value(weakmap, value);
...
PyMutex_Unlock(&mutex);
}
.. versionadded:: 3.14
.. c:function:: void PyUnstable_EnableTryIncRef(PyObject *obj)
Enables subsequent uses of :c:func:`PyUnstable_TryIncRef` on *obj*. The
caller must hold a :term:`strong reference` to *obj* when calling this.
.. versionadded:: 3.14

View file

@ -16,7 +16,7 @@ kernel/git/torvalds/linux.git/tree/tools/perf/Documentation/jit-interface.txt>`_
In Python, these helper APIs can be used by libraries and features that rely
on generating machine code on the fly.
Note that holding the Global Interpreter Lock (GIL) is not required for these APIs.
Note that holding an :term:`attached thread state` is not required for these APIs.
.. c:function:: int PyUnstable_PerfMapState_Init(void)

Some files were not shown because too many files have changed in this diff Show more