Skip to content

[3.12] For-else deserves its own section in the tutorial (GH-123946) #126615

[3.12] For-else deserves its own section in the tutorial (GH-123946)

[3.12] For-else deserves its own section in the tutorial (GH-123946) #126615

Workflow file for this run

name: Tests
on:
workflow_dispatch:
push:
branches:
- 'main'
- '3.12'
- '3.11'
- '3.10'
- '3.9'
- '3.8'
pull_request:
branches:
- 'main'
- '3.12'
- '3.11'
- '3.10'
- '3.9'
- '3.8'
permissions:
contents: read
concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}-reusable
cancel-in-progress: true
jobs:
check_source:
name: Change detection
# To use boolean outputs from this job, parse them as JSON.
# Here's some examples:
#
# if: fromJSON(needs.check_source.outputs.run-docs)
#
# ${{
# fromJSON(needs.check_source.outputs.run_tests)
# && 'truthy-branch'
# || 'falsy-branch'
# }}
#
uses: ./.github/workflows/reusable-change-detection.yml
check-docs:
name: Docs
needs: check_source
if: fromJSON(needs.check_source.outputs.run-docs)
uses: ./.github/workflows/reusable-docs.yml
check_abi:
name: 'Check if the ABI has changed'
runs-on: ubuntu-22.04
needs: check_source
if: needs.check_source.outputs.run_tests == 'true'
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
- name: Install dependencies
run: |
sudo ./.github/workflows/posix-deps-apt.sh
sudo apt-get install -yq abigail-tools
- name: Build CPython
env:
CFLAGS: -g3 -O0
run: |
# Build Python with the libpython dynamic library
./configure --enable-shared
make -j4
- name: Check for changes in the ABI
id: check
run: |
if ! make check-abidump; then
echo "Generated ABI file is not up to date."
echo "Please add the release manager of this branch as a reviewer of this PR."
echo ""
echo "The up to date ABI file should be attached to this build as an artifact."
echo ""
echo "To learn more about this check: https://devguide.python.org/setup/#regenerate-the-abi-dump"
echo ""
exit 1
fi
- name: Generate updated ABI files
if: ${{ failure() && steps.check.conclusion == 'failure' }}
run: |
make regen-abidump
- uses: actions/upload-artifact@v4
name: Publish updated ABI files
if: ${{ failure() && steps.check.conclusion == 'failure' }}
with:
name: abi-data
path: ./Doc/data/*.abi
check_generated_files:
name: 'Check if generated files are up to date'
# Don't use ubuntu-latest but a specific version to make the job
# reproducible: to get the same tools versions (autoconf, aclocal, ...)
runs-on: ubuntu-22.04
timeout-minutes: 60
needs: check_source
if: needs.check_source.outputs.run_tests == 'true'
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: '3.x'
- name: Runner image version
run: echo "IMAGE_VERSION=${ImageVersion}" >> $GITHUB_ENV
- name: Restore config.cache
uses: actions/cache@v4
with:
path: config.cache
key: ${{ github.job }}-${{ runner.os }}-${{ env.IMAGE_VERSION }}-${{ needs.check_source.outputs.config_hash }}-${{ env.pythonLocation }}
- name: Install Dependencies
run: sudo ./.github/workflows/posix-deps-apt.sh
- name: Add ccache to PATH
run: echo "PATH=/usr/lib/ccache:$PATH" >> $GITHUB_ENV
- name: Configure ccache action
uses: hendrikmuhs/ccache-action@v1.2
with:
save: false
- name: Check Autoconf and aclocal versions
run: |
grep "Generated by GNU Autoconf 2.71" configure
grep "aclocal 1.16.5" aclocal.m4
grep -q "runstatedir" configure
grep -q "PKG_PROG_PKG_CONFIG" aclocal.m4
- name: Configure CPython
run: |
# Build Python with the libpython dynamic library
./configure --config-cache --with-pydebug --enable-shared
- name: Regenerate autoconf files
# Same command used by Tools/build/regen-configure.sh ($AUTORECONF)
run: autoreconf -ivf -Werror
- name: Build CPython
run: |
# Deepfreeze will usually cause global objects to be added or removed,
# so we run it before regen-global-objects gets rum (in regen-all).
make regen-deepfreeze
make -j4 regen-all
make regen-stdlib-module-names regen-sbom
- name: Check for changes
run: |
git add -u
changes=$(git status --porcelain)
# Check for changes in regenerated files
if test -n "$changes"; then
echo "Generated files not up to date."
echo "Perhaps you forgot to run make regen-all or build.bat --regen. ;)"
echo "configure files must be regenerated with a specific version of autoconf."
echo "$changes"
echo ""
git diff --staged || true
exit 1
fi
- name: Check exported libpython symbols
run: make smelly
- name: Check limited ABI symbols
run: make check-limited-abi
- name: Check for unsupported C global variables
if: github.event_name == 'pull_request' # $GITHUB_EVENT_NAME
run: make check-c-globals
build_windows:
name: >-
Windows
${{ fromJSON(matrix.free-threading) && '(free-threading)' || '' }}
needs: check_source
if: fromJSON(needs.check_source.outputs.run_tests)
strategy:
matrix:
arch:
- Win32
- x64
- arm64
free-threading:
- false
# - true
uses: ./.github/workflows/reusable-windows.yml
with:
arch: ${{ matrix.arch }}
free-threading: ${{ matrix.free-threading }}
build_windows_msi:
name: >- # ${{ '' } is a hack to nest jobs under the same sidebar category
Windows MSI${{ '' }}
needs: check_source
if: fromJSON(needs.check_source.outputs.run-win-msi)
strategy:
matrix:
arch:
- x86
- x64
- arm64
uses: ./.github/workflows/reusable-windows-msi.yml
with:
arch: ${{ matrix.arch }}
build_macos:
name: >-
macOS
${{ fromJSON(matrix.free-threading) && '(free-threading)' || '' }}
needs: check_source
if: needs.check_source.outputs.run_tests == 'true'
strategy:
fail-fast: false
matrix:
# Cirrus and macos-14 are M1, macos-13 is default GHA Intel.
# macOS 13 only runs tests against the GIL-enabled CPython.
# Cirrus used for upstream, macos-14 for forks.
os:
- ghcr.io/cirruslabs/macos-runner:sonoma
- macos-14
- macos-13
is-fork: # only used for the exclusion trick
- ${{ github.repository_owner != 'python' }}
free-threading:
- false
# - true
exclude:
- os: ghcr.io/cirruslabs/macos-runner:sonoma
is-fork: true
- os: macos-14
is-fork: false
- os: macos-13
free-threading: true
uses: ./.github/workflows/reusable-macos.yml
with:
config_hash: ${{ needs.check_source.outputs.config_hash }}
free-threading: ${{ matrix.free-threading }}
os: ${{ matrix.os }}
build_ubuntu:
name: >-
Ubuntu
${{ fromJSON(matrix.free-threading) && '(free-threading)' || '' }}
needs: check_source
if: needs.check_source.outputs.run_tests == 'true'
strategy:
matrix:
free-threading:
- false
# - true
uses: ./.github/workflows/reusable-ubuntu.yml
with:
config_hash: ${{ needs.check_source.outputs.config_hash }}
free-threading: ${{ matrix.free-threading }}
build_ubuntu_ssltests:
name: 'Ubuntu SSL tests with OpenSSL'
runs-on: ${{ matrix.os }}
timeout-minutes: 60
needs: check_source
if: needs.check_source.outputs.run_tests == 'true'
strategy:
fail-fast: false
matrix:
os: [ubuntu-22.04]
openssl_ver: [3.0.15, 3.1.7, 3.2.3, 3.3.2]
env:
OPENSSL_VER: ${{ matrix.openssl_ver }}
MULTISSL_DIR: ${{ github.workspace }}/multissl
OPENSSL_DIR: ${{ github.workspace }}/multissl/openssl/${{ matrix.openssl_ver }}
LD_LIBRARY_PATH: ${{ github.workspace }}/multissl/openssl/${{ matrix.openssl_ver }}/lib
steps:
- uses: actions/checkout@v4
- name: Runner image version
run: echo "IMAGE_VERSION=${ImageVersion}" >> $GITHUB_ENV
- name: Restore config.cache
uses: actions/cache@v4
with:
path: config.cache
key: ${{ github.job }}-${{ runner.os }}-${{ env.IMAGE_VERSION }}-${{ needs.check_source.outputs.config_hash }}
- name: Register gcc problem matcher
run: echo "::add-matcher::.github/problem-matchers/gcc.json"
- name: Install Dependencies
run: sudo ./.github/workflows/posix-deps-apt.sh
- name: Configure OpenSSL env vars
run: |
echo "MULTISSL_DIR=${GITHUB_WORKSPACE}/multissl" >> $GITHUB_ENV
echo "OPENSSL_DIR=${GITHUB_WORKSPACE}/multissl/openssl/${OPENSSL_VER}" >> $GITHUB_ENV
echo "LD_LIBRARY_PATH=${GITHUB_WORKSPACE}/multissl/openssl/${OPENSSL_VER}/lib" >> $GITHUB_ENV
- name: 'Restore OpenSSL build'
id: cache-openssl
uses: actions/cache@v4
with:
path: ./multissl/openssl/${{ env.OPENSSL_VER }}
key: ${{ matrix.os }}-multissl-openssl-${{ env.OPENSSL_VER }}
- name: Install OpenSSL
if: steps.cache-openssl.outputs.cache-hit != 'true'
run: python3 Tools/ssl/multissltests.py --steps=library --base-directory $MULTISSL_DIR --openssl $OPENSSL_VER --system Linux
- name: Add ccache to PATH
run: |
echo "PATH=/usr/lib/ccache:$PATH" >> $GITHUB_ENV
- name: Configure ccache action
uses: hendrikmuhs/ccache-action@v1.2
with:
save: false
- name: Configure CPython
run: ./configure --config-cache --with-pydebug --with-openssl=$OPENSSL_DIR
- name: Build CPython
run: make -j4
- name: Display build info
run: make pythoninfo
- name: SSL tests
run: ./python Lib/test/ssltests.py
test_hypothesis:
name: "Hypothesis tests on Ubuntu"
runs-on: ubuntu-22.04
timeout-minutes: 60
needs: check_source
if: needs.check_source.outputs.run_tests == 'true' && needs.check_source.outputs.run_hypothesis == 'true'
env:
OPENSSL_VER: 3.0.15
PYTHONSTRICTEXTENSIONBUILD: 1
steps:
- uses: actions/checkout@v4
- name: Register gcc problem matcher
run: echo "::add-matcher::.github/problem-matchers/gcc.json"
- name: Install dependencies
run: sudo ./.github/workflows/posix-deps-apt.sh
- name: Configure OpenSSL env vars
run: |
echo "MULTISSL_DIR=${GITHUB_WORKSPACE}/multissl" >> $GITHUB_ENV
echo "OPENSSL_DIR=${GITHUB_WORKSPACE}/multissl/openssl/${OPENSSL_VER}" >> $GITHUB_ENV
echo "LD_LIBRARY_PATH=${GITHUB_WORKSPACE}/multissl/openssl/${OPENSSL_VER}/lib" >> $GITHUB_ENV
- name: 'Restore OpenSSL build'
id: cache-openssl
uses: actions/cache@v4
with:
path: ./multissl/openssl/${{ env.OPENSSL_VER }}
key: ${{ runner.os }}-multissl-openssl-${{ env.OPENSSL_VER }}
- name: Install OpenSSL
if: steps.cache-openssl.outputs.cache-hit != 'true'
run: python3 Tools/ssl/multissltests.py --steps=library --base-directory $MULTISSL_DIR --openssl $OPENSSL_VER --system Linux
- name: Add ccache to PATH
run: |
echo "PATH=/usr/lib/ccache:$PATH" >> $GITHUB_ENV
- name: Configure ccache action
uses: hendrikmuhs/ccache-action@v1.2
with:
save: false
- name: Setup directory envs for out-of-tree builds
run: |
echo "CPYTHON_RO_SRCDIR=$(realpath -m ${GITHUB_WORKSPACE}/../cpython-ro-srcdir)" >> $GITHUB_ENV
echo "CPYTHON_BUILDDIR=$(realpath -m ${GITHUB_WORKSPACE}/../cpython-builddir)" >> $GITHUB_ENV
- name: Create directories for read-only out-of-tree builds
run: mkdir -p $CPYTHON_RO_SRCDIR $CPYTHON_BUILDDIR
- name: Bind mount sources read-only
run: sudo mount --bind -o ro $GITHUB_WORKSPACE $CPYTHON_RO_SRCDIR
- name: Runner image version
run: echo "IMAGE_VERSION=${ImageVersion}" >> $GITHUB_ENV
- name: Restore config.cache
uses: actions/cache@v4
with:
path: ${{ env.CPYTHON_BUILDDIR }}/config.cache
key: ${{ github.job }}-${{ runner.os }}-${{ env.IMAGE_VERSION }}-${{ needs.check_source.outputs.config_hash }}
- name: Configure CPython out-of-tree
working-directory: ${{ env.CPYTHON_BUILDDIR }}
run: |
../cpython-ro-srcdir/configure \
--config-cache \
--with-pydebug \
--with-openssl=$OPENSSL_DIR
- name: Build CPython out-of-tree
working-directory: ${{ env.CPYTHON_BUILDDIR }}
run: make -j4
- name: Display build info
working-directory: ${{ env.CPYTHON_BUILDDIR }}
run: make pythoninfo
- name: Remount sources writable for tests
# some tests write to srcdir, lack of pyc files slows down testing
run: sudo mount $CPYTHON_RO_SRCDIR -oremount,rw
- name: Setup directory envs for out-of-tree builds
run: |
echo "CPYTHON_BUILDDIR=$(realpath -m ${GITHUB_WORKSPACE}/../cpython-builddir)" >> $GITHUB_ENV
- name: "Create hypothesis venv"
working-directory: ${{ env.CPYTHON_BUILDDIR }}
run: |
VENV_LOC=$(realpath -m .)/hypovenv
VENV_PYTHON=$VENV_LOC/bin/python
echo "HYPOVENV=${VENV_LOC}" >> $GITHUB_ENV
echo "VENV_PYTHON=${VENV_PYTHON}" >> $GITHUB_ENV
./python -m venv $VENV_LOC && $VENV_PYTHON -m pip install -r ${GITHUB_WORKSPACE}/Tools/requirements-hypothesis.txt
- name: 'Restore Hypothesis database'
id: cache-hypothesis-database
uses: actions/cache@v4
with:
path: ./hypothesis
key: hypothesis-database-${{ github.head_ref || github.run_id }}
restore-keys: |
hypothesis-database-
- name: "Run tests"
working-directory: ${{ env.CPYTHON_BUILDDIR }}
run: |
# Most of the excluded tests are slow test suites with no property tests
#
# (GH-104097) test_sysconfig is skipped because it has tests that are
# failing when executed from inside a virtual environment.
${{ env.VENV_PYTHON }} -m test \
-W \
-o \
-j4 \
-x test_asyncio \
-x test_multiprocessing_fork \
-x test_multiprocessing_forkserver \
-x test_multiprocessing_spawn \
-x test_concurrent_futures \
-x test_socket \
-x test_subprocess \
-x test_signal \
-x test_sysconfig
- uses: actions/upload-artifact@v4
if: always()
with:
name: hypothesis-example-db
path: .hypothesis/examples/
build_asan:
name: 'Address sanitizer'
runs-on: ubuntu-22.04
timeout-minutes: 60
needs: check_source
if: needs.check_source.outputs.run_tests == 'true'
env:
OPENSSL_VER: 3.0.15
PYTHONSTRICTEXTENSIONBUILD: 1
ASAN_OPTIONS: detect_leaks=0:allocator_may_return_null=1:handle_segv=0
steps:
- uses: actions/checkout@v4
- name: Runner image version
run: echo "IMAGE_VERSION=${ImageVersion}" >> $GITHUB_ENV
- name: Restore config.cache
uses: actions/cache@v4
with:
path: config.cache
key: ${{ github.job }}-${{ runner.os }}-${{ env.IMAGE_VERSION }}-${{ needs.check_source.outputs.config_hash }}
- name: Register gcc problem matcher
run: echo "::add-matcher::.github/problem-matchers/gcc.json"
- name: Install Dependencies
run: sudo ./.github/workflows/posix-deps-apt.sh
- name: Set up GCC-10 for ASAN
uses: egor-tensin/setup-gcc@v1
with:
version: 10
- name: Configure OpenSSL env vars
run: |
echo "MULTISSL_DIR=${GITHUB_WORKSPACE}/multissl" >> $GITHUB_ENV
echo "OPENSSL_DIR=${GITHUB_WORKSPACE}/multissl/openssl/${OPENSSL_VER}" >> $GITHUB_ENV
echo "LD_LIBRARY_PATH=${GITHUB_WORKSPACE}/multissl/openssl/${OPENSSL_VER}/lib" >> $GITHUB_ENV
- name: 'Restore OpenSSL build'
id: cache-openssl
uses: actions/cache@v4
with:
path: ./multissl/openssl/${{ env.OPENSSL_VER }}
key: ${{ matrix.os }}-multissl-openssl-${{ env.OPENSSL_VER }}
- name: Install OpenSSL
if: steps.cache-openssl.outputs.cache-hit != 'true'
run: python3 Tools/ssl/multissltests.py --steps=library --base-directory $MULTISSL_DIR --openssl $OPENSSL_VER --system Linux
- name: Add ccache to PATH
run: |
echo "PATH=/usr/lib/ccache:$PATH" >> $GITHUB_ENV
- name: Configure ccache action
uses: hendrikmuhs/ccache-action@v1.2
with:
save: ${{ github.event_name == 'push' }}
max-size: "200M"
- name: Configure CPython
run: ./configure --config-cache --with-address-sanitizer --without-pymalloc
- name: Build CPython
run: make -j4
- name: Display build info
run: make pythoninfo
- name: Tests
run: xvfb-run make buildbottest TESTOPTS="-j4 -uall,-cpu"
build_tsan:
name: 'Thread sanitizer'
needs: check_source
if: needs.check_source.outputs.run_tests == 'true'
uses: ./.github/workflows/reusable-tsan.yml
with:
config_hash: ${{ needs.check_source.outputs.config_hash }}
options: ./configure --config-cache --with-thread-sanitizer --with-pydebug
all-required-green: # This job does nothing and is only used for the branch protection
name: All required checks pass
if: always()
needs:
- check_source # Transitive dependency, needed to access `run_tests` value
- check-docs
- check_generated_files
- build_macos
- build_ubuntu
- build_ubuntu_ssltests
- build_windows
- build_windows_msi
- test_hypothesis
- build_asan
- build_tsan
runs-on: ubuntu-latest
steps:
- name: Check whether the needed jobs succeeded or failed
uses: re-actors/alls-green@05ac9388f0aebcb5727afa17fcccfecd6f8ec5fe
with:
allowed-failures: >-
build_macos,
build_ubuntu_ssltests,
build_windows_msi,
test_hypothesis,
allowed-skips: >-
${{
!fromJSON(needs.check_source.outputs.run-docs)
&& '
check-docs,
'
|| ''
}}
${{
needs.check_source.outputs.run_tests != 'true'
&& '
check_generated_files,
build_macos,
build_ubuntu,
build_ubuntu_ssltests,
build_windows,
build_asan,
build_tsan,
'
|| ''
}}
${{
!fromJSON(needs.check_source.outputs.run_hypothesis)
&& '
test_hypothesis,
'
|| ''
}}
jobs: ${{ toJSON(needs) }}